gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package liquibase.util; import liquibase.database.Database; import liquibase.database.core.DB2Database; import liquibase.database.core.MySQLDatabase; import liquibase.database.core.OracleDatabase; import liquibase.datatype.DataTypeFactory; import liquibase.datatype.LiquibaseDataType; import liquibase.datatype.core.*; import liquibase.logging.LogFactory; import liquibase.statement.DatabaseFunction; import liquibase.structure.core.Column; import liquibase.structure.core.DataType; import java.math.BigDecimal; import java.sql.Types; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Scanner; import java.util.regex.Matcher; import java.util.regex.Pattern; public class SqlUtil { public static boolean isNumeric(int dataType) { List<Integer> numericTypes = Arrays.asList( Types.BIGINT, Types.BIT, Types.INTEGER, Types.SMALLINT, Types.TINYINT, Types.DECIMAL, Types.DOUBLE, Types.FLOAT, Types.NUMERIC, Types.REAL ); return numericTypes.contains(dataType); } public static boolean isBoolean(int dataType) { return dataType == Types.BOOLEAN; } public static boolean isDate(int dataType) { List<Integer> validTypes = Arrays.asList( Types.DATE, Types.TIME, Types.TIMESTAMP ); return validTypes.contains(dataType); } public static Object parseValue(Database database, Object val, DataType type) { if (!(val instanceof String)) { return val; } int typeId = Integer.MIN_VALUE; if (type.getDataTypeId() != null) { typeId = type.getDataTypeId(); } String typeName = type.getTypeName(); LiquibaseDataType liquibaseDataType = DataTypeFactory.getInstance().from(type, database); String stringVal = (String) val; if (stringVal.isEmpty()) { if (liquibaseDataType instanceof CharType) { return ""; } else { return null; } } if (database instanceof OracleDatabase && !stringVal.startsWith("'") && !stringVal.endsWith("'")) { //oracle returns functions without quotes Object maybeDate = null; if (liquibaseDataType instanceof DateType || typeId == Types.DATE) { if (stringVal.endsWith("'HH24:MI:SS')")) { maybeDate = DataTypeFactory.getInstance().fromDescription("time", database).sqlToObject(stringVal, database); } else { maybeDate = DataTypeFactory.getInstance().fromDescription("date", database).sqlToObject(stringVal, database); } } else if (liquibaseDataType instanceof DateTimeType || typeId == Types.TIMESTAMP) { maybeDate = DataTypeFactory.getInstance().fromDescription("datetime", database).sqlToObject(stringVal, database); } else if (!stringVal.matches("\\d+\\.?\\d*")) { //not just a number return new DatabaseFunction(stringVal); } if (maybeDate != null) { if (maybeDate instanceof java.util.Date) { return maybeDate; } else { return new DatabaseFunction(stringVal); } } } boolean strippedSingleQuotes = false; if (stringVal.startsWith("'") && stringVal.endsWith("'")) { stringVal = stringVal.substring(1, stringVal.length() - 1); strippedSingleQuotes = true; } else if (stringVal.startsWith("((") && stringVal.endsWith("))")) { stringVal = stringVal.substring(2, stringVal.length() - 2); } else if (stringVal.startsWith("('") && stringVal.endsWith("')")) { stringVal = stringVal.substring(2, stringVal.length() - 2); } else if (stringVal.startsWith("(") && stringVal.endsWith(")")) { return new DatabaseFunction(stringVal.substring(1, stringVal.length() - 1)); } Scanner scanner = new Scanner(stringVal.trim()); if (typeId == Types.ARRAY) { return new DatabaseFunction(stringVal); } else if ((liquibaseDataType instanceof BigIntType || typeId == Types.BIGINT)) { if (scanner.hasNextBigInteger()) { return scanner.nextBigInteger(); } else { return new DatabaseFunction(stringVal); } } else if (typeId == Types.BINARY) { return new DatabaseFunction(stringVal.trim()); } else if (typeId == Types.BIT) { if (stringVal.startsWith("b'")) { //mysql returns boolean values as b'0' and b'1' stringVal = stringVal.replaceFirst("b'", "").replaceFirst("'$", ""); } stringVal = stringVal.trim(); if (scanner.hasNextBoolean()) { return scanner.nextBoolean(); } else { return new Integer(stringVal); } } else if (liquibaseDataType instanceof BlobType|| typeId == Types.BLOB) { if (strippedSingleQuotes) { return stringVal; } else { return new DatabaseFunction(stringVal); } } else if ((liquibaseDataType instanceof BooleanType || typeId == Types.BOOLEAN )) { if (scanner.hasNextBoolean()) { return scanner.nextBoolean(); } else { return new DatabaseFunction(stringVal); } } else if (liquibaseDataType instanceof CharType || typeId == Types.CHAR) { return stringVal; } else if (liquibaseDataType instanceof ClobType || typeId == Types.CLOB) { return stringVal; } else if (typeId == Types.DATALINK) { return new DatabaseFunction(stringVal); } else if (liquibaseDataType instanceof DateType || typeId == Types.DATE) { if (typeName.equalsIgnoreCase("year")) { return stringVal.trim(); } return DataTypeFactory.getInstance().fromDescription("date", database).sqlToObject(stringVal, database); } else if ((liquibaseDataType instanceof DecimalType || typeId == Types.DECIMAL)) { if (scanner.hasNextBigDecimal()) { return scanner.nextBigDecimal(); } else { return new DatabaseFunction(stringVal); } } else if (typeId == Types.DISTINCT) { return new DatabaseFunction(stringVal); } else if ((liquibaseDataType instanceof DoubleType || typeId == Types.DOUBLE)) { if (scanner.hasNextDouble()) { return scanner.nextDouble(); } else { return new DatabaseFunction(stringVal); } } else if ((liquibaseDataType instanceof FloatType || typeId == Types.FLOAT)) { if (scanner.hasNextFloat()) { return scanner.nextFloat(); } else { return new DatabaseFunction(stringVal); } } else if ((liquibaseDataType instanceof IntType || typeId == Types.INTEGER)) { if (scanner.hasNextInt()) { return scanner.nextInt(); } else { return new DatabaseFunction(stringVal); } } else if (typeId == Types.JAVA_OBJECT) { return new DatabaseFunction(stringVal); } else if (typeId == Types.LONGNVARCHAR) { return stringVal; } else if (typeId == Types.LONGVARBINARY) { return new DatabaseFunction(stringVal); } else if (typeId == Types.LONGVARCHAR) { return stringVal; } else if (liquibaseDataType instanceof NCharType || typeId == Types.NCHAR || liquibaseDataType.getName().equalsIgnoreCase("NCLOB")) { return stringVal; } else if (typeId == Types.NCLOB) { return stringVal; } else if (typeId == Types.NULL) { return null; } else if ((liquibaseDataType instanceof NumberType || typeId == Types.NUMERIC)) { if (scanner.hasNextBigDecimal()) { return scanner.nextBigDecimal(); } else { return new DatabaseFunction(stringVal); } } else if (liquibaseDataType instanceof NVarcharType || typeId == Types.NVARCHAR) { return stringVal; } else if (typeId == Types.OTHER) { if (database instanceof DB2Database && typeName.equalsIgnoreCase("DECFLOAT")) { return new BigDecimal(stringVal); } return new DatabaseFunction(stringVal); } else if (typeId == Types.REAL) { return new BigDecimal(stringVal.trim()); } else if (typeId == Types.REF) { return new DatabaseFunction(stringVal); } else if (typeId == Types.ROWID) { return new DatabaseFunction(stringVal); } else if ((liquibaseDataType instanceof SmallIntType || typeId == Types.SMALLINT)) { if (scanner.hasNextInt()) { return scanner.nextInt(); } else { return new DatabaseFunction(stringVal); } } else if (typeId == Types.SQLXML) { return new DatabaseFunction(stringVal); } else if (typeId == Types.STRUCT) { return new DatabaseFunction(stringVal); } else if (liquibaseDataType instanceof TimeType || typeId == Types.TIME) { return DataTypeFactory.getInstance().fromDescription("time", database).sqlToObject(stringVal, database); } else if (liquibaseDataType instanceof DateTimeType || liquibaseDataType instanceof TimestampType || typeId == Types.TIMESTAMP) { return DataTypeFactory.getInstance().fromDescription("datetime", database).sqlToObject(stringVal, database); } else if ((liquibaseDataType instanceof TinyIntType || typeId == Types.TINYINT)) { if (scanner.hasNextInt()) { return scanner.nextInt(); } else { return new DatabaseFunction(stringVal); } } else if (typeId == Types.VARBINARY) { return new DatabaseFunction(stringVal); } else if (liquibaseDataType instanceof VarcharType || typeId == Types.VARCHAR) { return stringVal; } else if (database instanceof MySQLDatabase && typeName.toLowerCase().startsWith("enum")) { return stringVal; } else { LogFactory.getLogger().info("Unknown default value: value '" + stringVal + "' type " + typeName + " (" + type + "), assuming it is a function"); return new DatabaseFunction(stringVal); } } public static String replacePredicatePlaceholders(Database database, String predicate, List<String> columnNames, List<Object> parameters) { Matcher matcher = Pattern.compile(":name|\\?|:value").matcher(predicate.trim()); StringBuffer sb = new StringBuffer(); Iterator<String> columnNameIter = columnNames.iterator(); Iterator<Object> paramIter = parameters.iterator(); while (matcher.find()) { if (matcher.group().equals(":name")) { while (columnNameIter.hasNext()) { String columnName = columnNameIter.next(); if (columnName == null) { continue; } matcher.appendReplacement(sb, Matcher.quoteReplacement(database.escapeObjectName(columnName, Column.class))); break; } } else if (paramIter.hasNext()) { Object param = paramIter.next(); matcher.appendReplacement(sb, Matcher.quoteReplacement(DataTypeFactory.getInstance().fromObject(param, database).objectToSql(param, database))); } } matcher.appendTail(sb); return sb.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.datasketches.pig.kll; import java.io.IOException; import org.apache.datasketches.kll.KllFloatsSketch; import org.apache.datasketches.memory.Memory; import org.apache.pig.Accumulator; import org.apache.pig.Algebraic; import org.apache.pig.EvalFunc; import org.apache.pig.data.DataBag; import org.apache.pig.data.DataByteArray; import org.apache.pig.data.Tuple; import org.apache.pig.data.TupleFactory; /** * This UDF is to build sketches from data. * This class implements both the <i>Accumulator</i> and <i>Algebraic</i> interfaces for * performance optimization. */ public class DataToSketch extends EvalFunc<DataByteArray> implements Accumulator<DataByteArray>, Algebraic { private static final TupleFactory TUPLE_FACTORY_ = TupleFactory.getInstance(); // With the single exception of the Accumulator interface, UDFs are stateless. // All parameters kept at the class level must be final, except for the accumSketch. private final int k_; private KllFloatsSketch accumSketch_; // TOP LEVEL API /** * Default constructor. Assumes default k. */ public DataToSketch() { this(KllFloatsSketch.DEFAULT_K); } /** * String constructor. * * @param kStr string representation of k */ public DataToSketch(final String kStr) { this(Integer.parseInt(kStr)); } /** * Base constructor. * * @param k parameter that determines the accuracy and size of the sketch. */ private DataToSketch(final int k) { super(); k_ = k; } //@formatter:off /** * Top-level exec function. * This method accepts an input Tuple containing a Bag of one or more inner <b>Datum Tuples</b> * and returns a single updated <b>Sketch</b> as a DataByteArray. * * <p>Types are in the form: Java data type: Pig DataType * * <p><b>Input Tuple</b> * <ul> * <li>Tuple: TUPLE (Must contain only one field) * <ul> * <li>index 0: DataBag: BAG (May contain 0 or more Inner Tuples) * <ul> * <li>index 0: Tuple: TUPLE <b>Datum Tuple</b></li> * <li>...</li> * <li>index n-1: Tuple: TUPLE <b>Datum Tuple</b></li> * </ul> * </li> * </ul> * </li> * </ul> * * <b>Datum Tuple</b> * <ul> * <li>Tuple: TUPLE (Must contain only one field) * <ul> * <li>index 0: Float: FLOAT</li> * </ul> * </li> * </ul> * * <b>Sketch Tuple</b> * <ul> * <li>Tuple: TUPLE (Contains exactly 1 field) * <ul> * <li>index 0: DataByteArray: BYTEARRAY = a serialized KllFloatsSketch object.</li> * </ul> * </li> * </ul> * * @param inputTuple A tuple containing a single bag, containing Datum Tuples * @return serialized sketch * @see "org.apache.pig.EvalFunc.exec(org.apache.pig.data.Tuple)" * @throws IOException from Pig */ // @formatter:on @Override // TOP LEVEL EXEC public DataByteArray exec(final Tuple inputTuple) throws IOException { //The exec is a stateless function. It operates on the input and returns a result. final KllFloatsSketch sketch = new KllFloatsSketch(k_); if ((inputTuple != null) && (inputTuple.size() > 0)) { final DataBag bag = (DataBag) inputTuple.get(0); for (final Tuple innerTuple: bag) { sketch.update((Float) innerTuple.get(0)); } } return new DataByteArray(sketch.toByteArray()); } // ACCUMULATOR INTERFACE /** * An <i>Accumulator</i> version of the standard <i>exec()</i> method. Like <i>exec()</i>, * accumulator is called with a bag of Datum Tuples. Unlike <i>exec()</i>, it doesn't serialize the * sketch at the end. Instead, it can be called multiple times, each time with another bag of Datum Tuples. * * @param inputTuple A tuple containing a single bag, containing Datum Tuples. * @see #exec * @see "org.apache.pig.Accumulator.accumulate(org.apache.pig.data.Tuple)" * @throws IOException by Pig */ @Override public void accumulate(final Tuple inputTuple) throws IOException { if ((inputTuple == null) || (inputTuple.size() == 0)) { return; } final DataBag bag = (DataBag) inputTuple.get(0); if (bag == null) { return; } if (accumSketch_ == null) { accumSketch_ = new KllFloatsSketch(k_); } for (final Tuple innerTuple: bag) { accumSketch_.update((Float) innerTuple.get(0)); } } /** * Returns the result that has been built up by multiple calls to {@link #accumulate}. * * @return serialized sketch * @see "org.apache.pig.Accumulator.getValue()" */ @Override public DataByteArray getValue() { if (accumSketch_ != null) { return new DataByteArray(accumSketch_.toByteArray()); } // return empty sketch return new DataByteArray(new KllFloatsSketch(k_).toByteArray()); } /** * Cleans up the UDF state after being called using the {@link Accumulator} interface. * * @see "org.apache.pig.Accumulator.cleanup()" */ @Override public void cleanup() { accumSketch_ = null; } // ALGEBRAIC INTERFACE @Override public String getInitial() { return Initial.class.getName(); } @Override public String getIntermed() { return Intermediate.class.getName(); } @Override public String getFinal() { return Final.class.getName(); } // STATIC Initial Class only called by Pig /** * Class used to calculate the initial pass of an Algebraic sketch operation. * * <p> * The Initial class simply passes through all records unchanged so that they can be * processed by the intermediate processor instead.</p> */ public static class Initial extends EvalFunc<Tuple> { // The Algebraic worker classes (Initial, Intermediate and Final) are static and stateless. // The constructors and parameters must mirror the parent class as there is no linkage // between them. /** * Default constructor. */ public Initial() {} /** * Constructor with explicit k as string. * * @param kStr string representation of k */ public Initial(final String kStr) {} @Override public Tuple exec(final Tuple inputTuple) throws IOException { return inputTuple; } } // STATIC Intermediate Class only called by Pig /** * Class used to calculate the intermediate pass of an <i>Algebraic</i> sketch operation. * It will receive a bag of values returned by either the <i>Intermediate</i> * stage or the <i>Initial</i> stages, so it needs to be able to differentiate between and * interpret both types. */ public static class Intermediate extends EvalFunc<Tuple> { // The Algebraic worker classes (Initial, Intermediate and Final) are static and stateless. // The constructors and parameters must mirror the parent class as there is no linkage // between them. private final int k_; /** * Default constructor. Assumes default k. */ public Intermediate() { this(KllFloatsSketch.DEFAULT_K); } /** * Constructor with explicit k as string. Pig will call. * this and pass the same constructor arguments as the base UDF. * * @param kStr string representation of k */ public Intermediate(final String kStr) { this(Integer.parseInt(kStr)); } /** * Constructor with primitive k. * * @param k parameter that determines the accuracy and size of the sketch. */ private Intermediate(final int k) { k_ = k; } @Override public Tuple exec(final Tuple inputTuple) throws IOException { //throws is in API return TUPLE_FACTORY_.newTuple(process(inputTuple, k_)); } } // STATIC Final Class only called by Pig /** * Class used to calculate the final pass of an <i>Algebraic</i> sketch operation. * It will receive a bag of values returned by either the <i>Intermediate</i> * stage or the <i>Initial</i> stages, so it needs to be able to differentiate between and * interpret both types. */ public static class Final extends EvalFunc<DataByteArray> { // The Algebraic worker classes (Initial, Intermediate and Final) are static and stateless. // The constructors and parameters must mirror the parent class as there is no linkage // between them. private final int k_; /** * Default constructor. Assumes default k. */ public Final() { this(KllFloatsSketch.DEFAULT_K); } /** * Constructor with explicit k as string. Pig will call * this and pass the same constructor arguments as the base UDF. * * @param kStr string representation of k */ public Final(final String kStr) { this(Integer.parseInt(kStr)); } /** * Constructor with primitive k. * * @param k parameter that determines the accuracy and size of the sketch. */ private Final(final int k) { k_ = k; } @Override public DataByteArray exec(final Tuple inputTuple) throws IOException { return process(inputTuple, k_); } } private static DataByteArray process(final Tuple inputTuple, final int k) throws IOException { final KllFloatsSketch sketch = new KllFloatsSketch(k); if ((inputTuple != null) && (inputTuple.size() > 0)) { final DataBag outerBag = (DataBag) inputTuple.get(0); for (final Tuple dataTuple: outerBag) { final Object f0 = dataTuple.get(0); if (f0 == null) { continue; } if (f0 instanceof DataBag) { final DataBag innerBag = (DataBag) f0; // inputTuple.bag0.dataTupleN.f0:bag if (innerBag.size() == 0) { continue; } // If field 0 of a dataTuple is a Bag all innerTuples of this inner bag // will be passed into the union. // It is due to system bagged outputs from multiple mapper Initial functions. // The Intermediate stage was bypassed. for (final Tuple innerTuple: innerBag) { sketch.update((Float) innerTuple.get(0)); } } else if (f0 instanceof DataByteArray) { // inputTuple.bag0.dataTupleN.f0:DBA // If field 0 of a dataTuple is a DataByteArray we assume it is a sketch // due to system bagged outputs from multiple mapper Intermediate functions. // Each dataTuple.DBA:sketch will merged into the union. final DataByteArray dba = (DataByteArray) f0; sketch.merge(KllFloatsSketch.heapify(Memory.wrap(dba.get()))); } else { throw new IllegalArgumentException("dataTuple.Field0: Is not a DataByteArray: " + f0.getClass().getName()); } } } return new DataByteArray(sketch.toByteArray()); } }
package de.bht.mmi.iot.service; import de.bht.mmi.iot.constants.CacheConstants; import de.bht.mmi.iot.exception.EntityNotFoundException; import de.bht.mmi.iot.exception.NotAuthorizedException; import de.bht.mmi.iot.model.Cluster; import de.bht.mmi.iot.model.Gateway; import de.bht.mmi.iot.model.Sensor; import de.bht.mmi.iot.model.User; import de.bht.mmi.iot.repository.SensorRepository; import org.apache.commons.collections4.CollectionUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cache.annotation.Cacheable; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.stereotype.Service; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import static de.bht.mmi.iot.constants.RoleConstants.*; @Service public class SensorServiceImpl implements SensorService { @Autowired private SensorRepository sensorRepository; @Autowired private UserService userService; @Autowired private GatewayService gatewayService; @Autowired private ClusterService clusterService; @Autowired private CacheService cacheService; @Override public Iterable<Sensor> getAll() { return sensorRepository.findAll(); } @Override public Iterable<Sensor> getAllForIds(Iterable<String> ids) { return sensorRepository.findAll(ids); } @Override public Iterable<Sensor> getAll(UserDetails authenticatedUser) throws EntityNotFoundException { if (userService.isAnyRolePresent(authenticatedUser, ROLE_ADMIN, ROLE_GET_ALL_SENSOR)) { return getAll(); } final String username = authenticatedUser.getUsername(); final User user = userService.loadUserByUsername(username); final Set<Sensor> result = new HashSet<>(); CollectionUtils.addAll(result, getAllByOwner(username)); CollectionUtils.addAll(result, getAllReleasedForUser(username)); for (String clusterId : user.getReleasedForClusters()) { CollectionUtils.addAll(result, getAllByClusterId(clusterId)); } return result; } @Override public Sensor getOne(String sensorId) throws EntityNotFoundException { Sensor sensor = sensorRepository.findOne(sensorId); if (sensor != null) { return sensor; } else { throw new EntityNotFoundException(String.format("Sensor with id '%s' not found!", sensorId)); } } @Override public Iterable<Sensor> getAllReleasedForUser(String username) throws EntityNotFoundException { final User user = userService.loadUserByUsername(username); final Iterable<Sensor> userSensors = sensorRepository.findAll(user.getReleasedForSensors()); return userSensors != null ? userSensors : Collections.emptyList(); } @Override public Iterable<Sensor> getAllByOwner(String username) throws EntityNotFoundException { userService.loadUserByUsername(username); return sensorRepository.findByOwner(username); } @Override public Iterable<Sensor> getAllByOwner(String username, UserDetails authenticatedUser) throws EntityNotFoundException, NotAuthorizedException { userService.loadUserByUsername(username); if (userService.isAnyRolePresent(authenticatedUser, ROLE_ADMIN, ROLE_GET_ALL_SENSOR) || username.equals(authenticatedUser.getUsername())) { return getAllByOwner(username); } throw new NotAuthorizedException( String.format("You are not authorized to get all sensors for owner '%s'", username)); } @Override public Iterable<Sensor> getAllByGatewayId(String gatewayId) throws EntityNotFoundException { gatewayService.getOne(gatewayId); return sensorRepository.findByAttachedGateway(gatewayId); } @Override public Iterable<Sensor> getAllByGatewayId(String gatewayId, UserDetails authenticatedUser) throws EntityNotFoundException, NotAuthorizedException { final Gateway gateway = gatewayService.getOne(gatewayId); final User user = userService.loadUserByUsername(authenticatedUser.getUsername()); final List<String> releasedForGatewayIds = user.getReleasedForGateways(); final boolean isOwner = gateway.getOwner().equals(authenticatedUser.getUsername()); if (userService.isAnyRolePresent(authenticatedUser, ROLE_ADMIN, ROLE_GET_ALL_GATEWAY) || isOwner || releasedForGatewayIds.contains(gatewayId)) { final Iterable<Sensor> allSensorsFromGateway = getAllByGatewayId(gatewayId); if (userService.isRolePresent(authenticatedUser, ROLE_GET_ALL_SENSOR) || isOwner) { return allSensorsFromGateway; } final Iterable<Sensor> allSensorsReleasedForUser = getAll(authenticatedUser); return CollectionUtils.intersection(allSensorsFromGateway, allSensorsReleasedForUser); } else { throw new NotAuthorizedException( String.format("You are not authorized to access gateway with id: '%s", gatewayId)); } } @Override public Iterable<Sensor> getAllByClusterId(String clusterId) throws EntityNotFoundException { clusterService.getOne(clusterId); return sensorRepository.findByAttachedCluster(clusterId); } @Override public Iterable<Sensor> getAllByClusterId(String clusterId, UserDetails authenticatedUser) throws EntityNotFoundException, NotAuthorizedException { final Cluster cluster = clusterService.getOne(clusterId); final User user = userService.loadUserByUsername(authenticatedUser.getUsername()); final List<String> releasedForClusters = user.getReleasedForClusters(); final boolean isOwner = cluster.getOwner().equals(authenticatedUser.getUsername()); if (userService.isAnyRolePresent(authenticatedUser, ROLE_ADMIN, ROLE_GET_ALL_CLUSTER) || isOwner || releasedForClusters.contains(clusterId)) { final Iterable<Sensor> allSensorsFromCluster = getAllByClusterId(clusterId); if (userService.isRolePresent(authenticatedUser, ROLE_GET_ALL_SENSOR) || isOwner) { return allSensorsFromCluster; } final Iterable<Sensor> allSensorsReleasedForUser = getAll(authenticatedUser); return CollectionUtils.intersection(allSensorsFromCluster, allSensorsReleasedForUser); } else { throw new NotAuthorizedException( String.format("You are not authorized to access cluster with id: '%s", clusterId)); } } @Override public Sensor save(Sensor sensor) throws EntityNotFoundException { userService.loadUserByUsername(sensor.getOwner()); if (sensor.getAttachedCluster() != null) { clusterService.getOne(sensor.getAttachedCluster()); } if (sensor.getAttachedGateway() != null) { gatewayService.getOne(sensor.getAttachedGateway()); } return sensorRepository.save(sensor); } @Override public Sensor save(Sensor sensor, UserDetails authenticatedUser) throws EntityNotFoundException, NotAuthorizedException { Sensor oldSensor = null; if (sensor.getId() != null) { oldSensor = sensorRepository.findOne(sensor.getId()); } if (oldSensor == null) { if (userService.isAnyRolePresent(authenticatedUser, ROLE_ADMIN, ROLE_CREATE_SENSOR)) { return save(sensor); } } else { if (userService.isAnyRolePresent(authenticatedUser, ROLE_ADMIN, ROLE_UPDATE_SENSOR) || oldSensor.getOwner().equals(authenticatedUser.getUsername())) { final Sensor savedSensor = save(sensor); cacheService.getOneByCacheName(CacheConstants.CACHE_SENSOR_ACTIVE).evict(savedSensor.getId()); return savedSensor; } } throw new NotAuthorizedException("You are not authorized to save/update sensors"); } @Override public void delete(String sensorId) throws EntityNotFoundException { getOne(sensorId); sensorRepository.delete(sensorId); } @Override public void delete(String sensorId, UserDetails authenticatedUser) throws EntityNotFoundException, NotAuthorizedException { final Sensor sensor = getOne(sensorId); if (userService.isAnyRolePresent(authenticatedUser, ROLE_ADMIN, ROLE_DELETE_SENSOR) || sensor.getOwner().equals(authenticatedUser.getUsername())) { sensorRepository.delete(sensor); return; } throw new NotAuthorizedException( String.format("You are not authorized to delete sensor with id '%s'", sensor.getId())); } @Override @Cacheable(CacheConstants.CACHE_SENSOR_ACTIVE) public boolean isActive(String id) throws EntityNotFoundException { return getOne(id).isActive(); } }
package gov.nih.nci.cagrid.wsenum; import gov.nih.nci.cagrid.common.Utils; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.io.StringWriter; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import java.util.NoSuchElementException; import javax.xml.namespace.QName; import javax.xml.soap.SOAPElement; import junit.framework.TestCase; import org.apache.axis.AxisEngine; import org.apache.axis.EngineConfiguration; import org.apache.axis.MessageContext; import org.apache.axis.configuration.FileProvider; import org.apache.axis.server.AxisServer; import org.apache.axis.types.Duration; import org.globus.ws.enumeration.EnumIterator; import org.globus.ws.enumeration.IterationConstraints; import org.globus.ws.enumeration.IterationResult; import org.globus.ws.enumeration.TimeoutException; import org.projectmobius.bookstore.Book; /** * CompleteEnumIteratorBaseTest * Test case for complete enumeration iterator implementations * * @author David Ervin * * @created Apr 10, 2007 12:16:58 PM * @version $Id: CompleteEnumIteratorBaseTest.java,v 1.4 2008-11-04 15:27:15 dervin Exp $ */ public abstract class CompleteEnumIteratorBaseTest extends TestCase { private List<Object> objectList; private EnumIterator enumIterator; private String iteratorClassName; public CompleteEnumIteratorBaseTest(String iteratorClassName) { super("EnumIter testing for " + iteratorClassName); this.iteratorClassName = iteratorClassName; } protected List<Object> getObjectList() { return objectList; } protected EnumIterator getEnumIterator() { return enumIterator; } public void setUp() { // need a list of data objects objectList = new ArrayList<Object>(); for (int i = 0; i < 10; i++) { Book b = new Book(); b.setAuthor("caGrid Testing Book " + i); b.setISBN(String.valueOf(i)); objectList.add(b); } // set up the enum iterator try { Class iterClass = Class.forName(iteratorClassName); Method createIteratorMethod = iterClass.getDeclaredMethod( "createIterator", new Class[] {List.class, QName.class}); Object[] args = {objectList, TestingConstants.BOOK_QNAME}; enumIterator = (EnumIterator) createIteratorMethod.invoke(null, args); } catch (Exception ex) { ex.printStackTrace(); fail("Error initializing the Concurrent Iterator: " + ex.getMessage()); } } public void tearDown() { enumIterator.release(); try { enumIterator.next(new IterationConstraints()); fail("Enumeration released, but did not throw exception on next() call"); } catch (Exception ex) { assertEquals("Unexpected exception thrown", NoSuchElementException.class.getName(), ex.getClass().getName()); enumIterator = null; } } public void testRetrieveSingleResult() { IterationConstraints cons = new IterationConstraints(1, -1, null); IterationResult result = enumIterator.next(cons); SOAPElement[] rawElements = result.getItems(); assertTrue("No elements were returned", rawElements != null); assertEquals("Unexpected number of results returned", 1, rawElements.length); // deserialize the result try { String xml = rawElements[0].toString(); Book b = (Book) deserializeDocumentString(xml, Book.class); boolean found = bookInOriginalList(b); assertTrue("Returned book was not found in original object list", found); } catch (Exception ex) { ex.printStackTrace(); fail("Error deserializing result: " + ex.getMessage()); } } public void testRetrieveMultipleResults() { IterationConstraints cons = new IterationConstraints(3, -1, null); IterationResult result = enumIterator.next(cons); SOAPElement[] rawElements = result.getItems(); assertTrue("No elements were returned", rawElements != null); assertEquals("Unexpected number of results returned", 3, rawElements.length); for (int i = 0; i < rawElements.length; i++) { // deserialize the result try { Book b = (Book) deserializeDocumentString( rawElements[i].toString(), Book.class); boolean found = bookInOriginalList(b); assertTrue("Returned book not found in original object list", found); } catch (Exception ex) { ex.printStackTrace(); fail("Error deserializing result: " + ex.getMessage()); } } } public void testRetrieveAllResults() { // ask for more results than we actually have IterationConstraints cons = new IterationConstraints(objectList.size() + 1, -1, null); IterationResult result = enumIterator.next(cons); SOAPElement[] rawElements = result.getItems(); assertTrue("No elements were returned", rawElements != null); assertEquals("Unexpected number of results returned", objectList.size(), rawElements.length); assertTrue("End of was not sequence reached", result.isEndOfSequence()); for (int i = 0; i < rawElements.length; i++) { // deserialize the result try { Book b = (Book) deserializeDocumentString( rawElements[i].toString(), Book.class); boolean found = bookInOriginalList(b); assertTrue("Returned book not found in original object list", found); } catch (Exception ex) { ex.printStackTrace(); fail("Error deserializing result: " + ex.getMessage()); } } } public void testResultsTimeout() { slowDownIterator(); // this duration (1 sec) should time out Duration maxWait = new Duration(false, 0, 0, 0, 0, 0, 1); // ask for all the results IterationConstraints cons = new IterationConstraints( objectList.size(), -1, maxWait); try { // this should timeout enumIterator.next(cons); fail("Query did not time out"); } catch (TimeoutException ex) { // expected } catch (Exception ex) { ex.printStackTrace(); fail("Unexpected exception of type " + ex.getClass().getName() + " occured: " + ex.getMessage()); } } public void testCharLimitExceded() { // ask for all the results, but only enough chars for the first element int charCount = -1; StringWriter writer = new StringWriter(); try { Utils.serializeObject(objectList.get(0), TestingConstants.BOOK_QNAME, writer); // trim() because serializeObject() appends a newline to the writer String text = writer.getBuffer().toString().trim(); charCount = (text.length() * 2) - 1; } catch (Exception ex) { ex.printStackTrace(); fail("Error determining object char count: " + ex.getMessage()); } IterationConstraints cons = new IterationConstraints( objectList.size(), charCount, null); IterationResult result = enumIterator.next(cons); SOAPElement[] rawResults = result.getItems(); assertTrue("Enumeration did not return results", rawResults != null); assertFalse("Enumeration mistakenly returned all results", rawResults.length == objectList.size()); assertEquals("Unexpected number of results returned", 1, rawResults.length); // verify content Book original = null; Book returned = null; try { original = (Book) deserializeDocumentString( writer.getBuffer().toString(), Book.class); returned = (Book) deserializeDocumentString( rawResults[0].toString(), Book.class); } catch (Exception ex) { fail("Error deserializing objects: " + ex.getMessage()); } boolean equal = original.getAuthor().equals(returned.getAuthor()) && original.getISBN().equals(returned.getISBN()); assertTrue("Expected book and returned book do not match", equal); } protected boolean bookInOriginalList(Book g) { // verify the book is part of the original object list for (int i = 0; i < objectList.size(); i++) { Book current = (Book) objectList.get(i); if (current.getAuthor().equals(g.getAuthor()) && current.getISBN().equals(g.getISBN())) { return true; } } return false; } protected Object deserializeDocumentString(String xmlDocument, Class objectClass) throws Exception { return Utils.deserializeObject(new StringReader(xmlDocument), objectClass); } protected MessageContext createMessageContext(InputStream configStream) { EngineConfiguration engineConfig = new FileProvider(configStream); AxisEngine engine = new AxisServer(engineConfig); MessageContext context = new MessageContext(engine); context.setEncodingStyle(""); context.setProperty(AxisEngine.PROP_DOMULTIREFS, Boolean.FALSE); // the following two properties prevent xsd types from appearing in // every single element in the serialized XML context.setProperty(AxisEngine.PROP_EMIT_ALL_TYPES, Boolean.FALSE); context.setProperty(AxisEngine.PROP_SEND_XSI, Boolean.FALSE); return context; } /** * "Fixes" the reader for xml data to wait 500ms every time it reads a line * from disk. This effectively slows down the calls to next() inside * the iterator to the point that timeouts are a real possibility */ protected void slowDownIterator() { // parent class has the file reader Class iterClass = enumIterator.getClass().getSuperclass(); try { Field readerField = iterClass.getDeclaredField("fileReader"); readerField.setAccessible(true); BufferedReader originalReader = (BufferedReader) readerField.get(enumIterator); BufferedReader slowReader = new BufferedReader(originalReader) { public String readLine() throws IOException { try { Thread.sleep(500); } catch (Exception ex) { // whatever } return super.readLine(); } }; readerField.set(enumIterator, slowReader); } catch (Exception ex) { ex.printStackTrace(); fail("Error slowing down the iterator: " + ex.getMessage()); } } }
package io.github.marcelbraghetto.dailydeviations.features.home.logic; import android.content.Context; import android.content.Intent; import android.databinding.ObservableField; import android.net.Uri; import android.support.annotation.IdRes; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import com.google.android.gms.appinvite.AppInviteInvitation; import org.greenrobot.eventbus.Subscribe; import org.greenrobot.eventbus.ThreadMode; import javax.inject.Inject; import io.github.marcelbraghetto.dailydeviations.R; import io.github.marcelbraghetto.dailydeviations.features.about.ui.AboutFragment; import io.github.marcelbraghetto.dailydeviations.features.collection.logic.CollectionArguments; import io.github.marcelbraghetto.dailydeviations.features.collection.logic.providers.contracts.CollectionProvider; import io.github.marcelbraghetto.dailydeviations.features.collection.ui.CollectionFragment; import io.github.marcelbraghetto.dailydeviations.features.detail.ui.DetailActivity; import io.github.marcelbraghetto.dailydeviations.features.settings.ui.SettingsFragment; import io.github.marcelbraghetto.dailydeviations.framework.artworks.contracts.ArtworksProvider; import io.github.marcelbraghetto.dailydeviations.framework.artworks.models.CollectionFilterMode; import io.github.marcelbraghetto.dailydeviations.framework.foundation.analytics.contracts.Analytics; import io.github.marcelbraghetto.dailydeviations.framework.foundation.analytics.contracts.AnalyticsProvider; import io.github.marcelbraghetto.dailydeviations.framework.foundation.core.BaseViewModel; import io.github.marcelbraghetto.dailydeviations.framework.foundation.eventbus.contracts.EventBusProvider; import io.github.marcelbraghetto.dailydeviations.framework.foundation.eventbus.contracts.EventBusSubscriber; import io.github.marcelbraghetto.dailydeviations.framework.foundation.eventbus.events.CollectionFilterModeToggleEvent; import io.github.marcelbraghetto.dailydeviations.framework.foundation.strings.contracts.StringsProvider; /** * Created by Marcel Braghetto on 1/12/15. * * View model for the home activity which is responsible for the nav menu and content population. */ public class HomeViewModel extends BaseViewModel<HomeViewModel.Actions> implements EventBusSubscriber { //region Data binding public final Glue glue = new Glue(); public static class Glue { public ObservableField<String> navigationTitle = new ObservableField<>(""); public ObservableField<Integer> selectedMenuId = new ObservableField<>(R.id.nav_menu_browse); } //endregion //region Private fields private static final String SCREEN_NAME = "HomeScreen"; private static final int INVITE_REQUEST_CODE = 666; private final Context mApplicationContext; private final ArtworksProvider mArtworksProvider; private final EventBusProvider mEventBusProvider; private final StringsProvider mStringsProvider; private final CollectionProvider mCollectionProvider; private final AnalyticsProvider mAnalyticsProvider; //endregion //region Public methods @Inject public HomeViewModel(@NonNull Context applicationContext, @NonNull ArtworksProvider artworksProvider, @NonNull EventBusProvider eventBusProvider, @NonNull StringsProvider stringsProvider, @NonNull CollectionProvider collectionProvider, @NonNull AnalyticsProvider analyticsProvider) { super(Actions.class); mApplicationContext = applicationContext; mArtworksProvider = artworksProvider; mEventBusProvider = eventBusProvider; mStringsProvider = stringsProvider; mCollectionProvider = collectionProvider; mAnalyticsProvider = analyticsProvider; } /** * Initialize the view model with the given callback delegate and saved state if there is any. * @param actionDelegate to callback to. */ public void begin(@Nullable Actions actionDelegate) { setActionDelegate(actionDelegate); showCurrentContentScreen(false); mActionDelegate.showToggleButtons(); } /** * User has selected a menu item from the navigation drawer with the given item id. * @param itemId for the selected menu item. */ public void menuItemSelected(@IdRes int itemId) { mActionDelegate.closeNavigationMenu(); switch (itemId) { case R.id.nav_menu_browse: { setSelectedMenuItemId(itemId); mAnalyticsProvider.trackEvent(Analytics.CONTENT_TYPE_MENU_ITEM_SELECTED, SCREEN_NAME, "Browse"); showCurrentContentScreen(true); mActionDelegate.showToggleButtons(); break; } case R.id.nav_menu_tell_friends: { mAnalyticsProvider.trackEvent(Analytics.CONTENT_TYPE_MENU_ITEM_SELECTED, SCREEN_NAME, "TellFriends"); Intent intent = new AppInviteInvitation.IntentBuilder(mStringsProvider.getString(R.string.app_invite_title)) .setMessage(mStringsProvider.getString(R.string.app_invite_message)) .setCallToActionText(mStringsProvider.getString(R.string.app_invite_call_to_action)) .build(); mActionDelegate.startActivityForResult(intent, INVITE_REQUEST_CODE); break; } case R.id.nav_menu_rate_app: { mAnalyticsProvider.trackEvent(Analytics.CONTENT_TYPE_MENU_ITEM_SELECTED, SCREEN_NAME, "RateApp"); Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse("https://play.google.com/store/apps/details?id=io.github.marcelbraghetto.dailydeviations")); mActionDelegate.startActivity(intent); break; } case R.id.nav_menu_settings: { setSelectedMenuItemId(itemId); mAnalyticsProvider.trackEvent(Analytics.CONTENT_TYPE_MENU_ITEM_SELECTED, SCREEN_NAME, "Settings"); setNavigationTitle(mStringsProvider.getString(R.string.nav_menu_settings)); mActionDelegate.hideToggleButtons(); mActionDelegate.replaceContent(SettingsFragment.newInstance(), true); break; } case R.id.nav_menu_about: { setSelectedMenuItemId(itemId); mAnalyticsProvider.trackEvent(Analytics.CONTENT_TYPE_MENU_ITEM_SELECTED, SCREEN_NAME, "About"); setNavigationTitle(mStringsProvider.getString(R.string.nav_menu_about)); mActionDelegate.hideToggleButtons(); mActionDelegate.replaceContent(AboutFragment.newInstance(), true); break; } } } public void backPressed() { // If we are already looking at the main content, then exit the app. if(getSelectedMenuItemId() == R.id.nav_menu_browse) { mActionDelegate.finishActivity(); return; } // Otherwise default back to the main content. showCurrentContentScreen(true); mActionDelegate.showToggleButtons(); } /** * The screen was started. */ public void screenStarted() { mAnalyticsProvider.trackScreenView(SCREEN_NAME); subscribeToEventBus(); // If we have no saved artworks, then we probably have never fetched any so // proceed with triggering a fetch operation. Over time though, the alarm // manager driven intent service should periodically attempt to update the // data set. if(!mArtworksProvider.hasSavedArtworks()) { mArtworksProvider.refreshData(); } } /** * The screen was stopped. */ public void screenStopped() { unsubscribeFromEventBus(); } //endregion //region Event bus /** * Broadcast received for a 'favourites' event indicating that the user had toggled * between the main collection view and their favourites view. * @param event that was broadcast. */ @SuppressWarnings("unused") @Subscribe(threadMode = ThreadMode.MAIN) public void onEvent(@NonNull CollectionFilterModeToggleEvent event) { showCurrentContentScreen(true); } @SuppressWarnings("unused") @Subscribe(threadMode = ThreadMode.MAIN) public void onEvent(@NonNull HomeNavHeaderDetailsEvent event) { Intent intent = new Intent(mApplicationContext, DetailActivity.class); event.getArtwork().putInto(intent); mActionDelegate.closeNavigationMenu(); mActionDelegate.startActivity(intent); } @SuppressWarnings("unused") @Subscribe(threadMode = ThreadMode.MAIN) public void onEvent(@NonNull HomeSnackbarEvent event) { mActionDelegate.showSnackbar(event.getMessage()); } @Override public void subscribeToEventBus() { mEventBusProvider.subscribe(this); } @Override public void unsubscribeFromEventBus() { mEventBusProvider.unsubscribe(this); } //endregion //region Private methods private int getSelectedMenuItemId() { return glue.selectedMenuId.get(); } private void setSelectedMenuItemId(@IdRes int selectedMenuItemId) { glue.selectedMenuId.set(selectedMenuItemId); } private void setNavigationTitle(String value) { glue.navigationTitle.set(value); } private void showCurrentContentScreen(boolean animated) { setSelectedMenuItemId(R.id.nav_menu_browse); CollectionFilterMode filterMode = mCollectionProvider.getCollectionFilterMode(); switch (filterMode) { case Favourites: { setNavigationTitle(mStringsProvider.getString(R.string.nav_menu_favourites)); mActionDelegate.replaceContent(CollectionFragment.newInstance(new CollectionArguments(CollectionFilterMode.Favourites)), animated); break; } default: { setNavigationTitle(mStringsProvider.getString(R.string.nav_menu_browse)); mActionDelegate.replaceContent(CollectionFragment.newInstance(new CollectionArguments(CollectionFilterMode.All)), animated); break; } } } //endregion //region Actions delegate contract public interface Actions { /** * Close the navigation drawer programmatically. */ void closeNavigationMenu(); /** * Replace the currently displayed content * with the given fragment. * @param fragment to display */ void replaceContent(@NonNull Fragment fragment, boolean animated); /** * Start the given activity intent. * @param intent to start. */ void startActivity(@NonNull Intent intent); /** * Start the given activity intent for result. * @param intent to start. */ void startActivityForResult(@NonNull Intent intent, int requestCode); /** * Finish the current activity. */ void finishActivity(); /** * Show the toggle buttons for navigating the collection views. */ void showToggleButtons(); /** * Hide the toggle buttons for navigating the collection views. */ void hideToggleButtons(); /** * Display a snack bar message. * @param message to display. */ void showSnackbar(@NonNull String message); } //endregion }
/** * Copyright 2007-2016, Kaazing Corporation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaazing.gateway.transport.sse; import static java.lang.String.format; import java.io.IOException; import java.util.concurrent.Callable; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import javax.annotation.Resource; import org.apache.mina.core.filterchain.IoFilter; import org.apache.mina.core.filterchain.IoFilterChain; import org.apache.mina.core.future.ConnectFuture; import org.apache.mina.core.future.DefaultConnectFuture; import org.apache.mina.core.future.IoFuture; import org.apache.mina.core.future.IoFutureListener; import org.apache.mina.core.service.IoHandler; import org.apache.mina.core.service.TransportMetadata; import org.apache.mina.core.session.IoSession; import org.apache.mina.core.session.IoSessionInitializer; import org.kaazing.gateway.resource.address.Protocol; import org.kaazing.gateway.resource.address.ResourceAddress; import org.kaazing.gateway.resource.address.ResourceAddressFactory; import org.kaazing.gateway.transport.AbstractBridgeConnector; import org.kaazing.gateway.transport.BridgeConnector; import org.kaazing.gateway.transport.BridgeServiceFactory; import org.kaazing.gateway.transport.DefaultTransportMetadata; import org.kaazing.gateway.transport.IoHandlerAdapter; import org.kaazing.gateway.transport.TypedAttributeKey; import org.kaazing.gateway.transport.http.HttpProtocol; import org.kaazing.gateway.transport.http.HttpSession; import org.kaazing.gateway.transport.sse.bridge.SseMessage; import org.kaazing.gateway.transport.sse.bridge.filter.SseBuffer; import org.kaazing.gateway.transport.sse.bridge.filter.SseBufferAllocator; import org.kaazing.gateway.transport.sse.bridge.filter.SseConnectCodecFilter; import org.kaazing.gateway.util.scheduler.SchedulerProvider; import org.kaazing.mina.core.buffer.IoBufferAllocatorEx; import org.kaazing.mina.core.buffer.IoBufferEx; public class SseConnector extends AbstractBridgeConnector<SseSession> { private static final TypedAttributeKey<Callable<SseSession>> SSE_SESSION_FACTORY_KEY = new TypedAttributeKey<>(SseConnector.class, "sseSessionFactory"); private static final TypedAttributeKey<ConnectFuture> SSE_CONNECT_FUTURE_KEY = new TypedAttributeKey<>(SseConnector.class, "sseConnectFuture"); private static final TypedAttributeKey<SseSession> SSE_SESSION_KEY = new TypedAttributeKey<>(SseConnector.class, "sseSession"); private static final String CODEC_FILTER = SseProtocol.NAME + "#codec"; private ScheduledExecutorService scheduler; private BridgeServiceFactory bridgeServiceFactory; private ResourceAddressFactory resourceAddressFactory; @Resource(name = "bridgeServiceFactory") public void setBridgeServiceFactory(BridgeServiceFactory bridgeServiceFactory) { this.bridgeServiceFactory = bridgeServiceFactory; } private IoFilter sseCodec; public SseConnector() { super(new DefaultSseSessionConfig()); } @Resource(name = "schedulerProvider") public void setSchedulerProvider(SchedulerProvider provider) { this.scheduler = provider.getScheduler("SseConnector_reconnect", false); } @Resource(name = "resourceAddressFactory") public void setResourceAddressFactory(ResourceAddressFactory factory) { this.resourceAddressFactory = factory; } @Override public TransportMetadata getTransportMetadata() { return new DefaultTransportMetadata(SseProtocol.NAME, SseSessionConfig.class); } @Override public void init() { super.init(); sseCodec = new SseConnectCodecFilter(); } @Override public void addBridgeFilters(IoFilterChain filterChain) { filterChain.addLast(CODEC_FILTER, sseCodec); } @Override public void removeBridgeFilters(IoFilterChain filterChain) { removeFilter(filterChain, CODEC_FILTER); } @Override protected boolean canConnect(String transportName) { return transportName.equals("sse"); } @Override protected <T extends ConnectFuture> ConnectFuture connectInternal(ResourceAddress connectAddress, IoHandler handler, final IoSessionInitializer<T> initializer) { final DefaultConnectFuture sseConnectFuture = new DefaultConnectFuture(); // propagate connection failure, if necessary IoFutureListener<ConnectFuture> parentConnectListener = new IoFutureListener<ConnectFuture>() { @Override public void operationComplete(ConnectFuture future) { // fail bridge connect future if parent connect fails if (!future.isConnected()) { sseConnectFuture.setException(future.getException()); } } }; IoSessionInitializer<ConnectFuture> parentInitializer = createParentInitializer(connectAddress, handler, initializer, sseConnectFuture); final ResourceAddress transportAddress = connectAddress.getTransport(); if (transportAddress == null) { throw new RuntimeException("Cannot find transport for resource address "+connectAddress); } BridgeConnector connector = bridgeServiceFactory.newBridgeConnector(transportAddress); connector.connect(transportAddress, selectConnectHandler(transportAddress), parentInitializer).addListener(parentConnectListener); return sseConnectFuture; } private IoHandler selectConnectHandler(ResourceAddress address) { Protocol protocol = bridgeServiceFactory.getTransportFactory().getProtocol(address.getResource()); if ( protocol instanceof HttpProtocol ) { return httpHandler; } throw new RuntimeException(getClass()+ ": Cannot select a connect handler for address "+address); } @Override protected IoFuture dispose0() throws Exception { scheduler.shutdownNow(); return super.dispose0(); } private <T extends ConnectFuture> IoSessionInitializer<ConnectFuture> createParentInitializer(final ResourceAddress connectAddress, final IoHandler handler, final IoSessionInitializer<T> initializer, final DefaultConnectFuture sseConnectFuture) { // initialize parent session before connection attempt return new IoSessionInitializer<ConnectFuture>() { @Override public void initializeSession(final IoSession parent, ConnectFuture future) { // initializer for bridge session to specify bridge handler, // and call user-defined bridge session initializer if present final IoSessionInitializer<T> sseSessionInitializer = new IoSessionInitializer<T>() { @Override public void initializeSession(IoSession session, T future) { SseSession sseSession = (SseSession) session; sseSession.setHandler(handler); if (initializer != null) { initializer.initializeSession(session, future); } } }; final HttpSession httpSession = (HttpSession) parent; final IoBufferAllocatorEx<SseBuffer> allocator = new SseBufferAllocator(httpSession.getBufferAllocator()); // factory to create a new bridge session Callable<SseSession> createSession = new Callable<SseSession>() { @Override public SseSession call() throws Exception { Callable<SseSession> sseSessionFactory = new Callable<SseSession>() { @Override public SseSession call() throws Exception { return new SseSession(SseConnector.this, getProcessor(), connectAddress, connectAddress, httpSession, allocator); } }; return newSession(sseSessionInitializer, sseConnectFuture, sseSessionFactory); } }; SSE_SESSION_FACTORY_KEY.set(httpSession, createSession); SSE_CONNECT_FUTURE_KEY.set(httpSession, sseConnectFuture); } }; } private void reconnectOrClose(final SseSession sseSession) { SseSessionConfig config = sseSession.getConfig(); int retry = config.getRetry(); if (retry > 0 || config.isReconnecting()) { logger.debug("Reconnecting: {}", sseSession.getRemoteAddress()); config.setReconnecting(false); if (retry <= 0) { // reconnect immediately ResourceAddress connectAddress = sseSession.getRemoteAddress(); ReconnectListener connectListener = new ReconnectListener(sseSession); final ResourceAddress transportAddress = connectAddress.getTransport(); BridgeConnector connector = bridgeServiceFactory.newBridgeConnector(transportAddress); connector.connect(connectAddress, httpHandler, null).addListener(connectListener); } else { // reconnect after "retry" milliseconds scheduler.schedule(new ReconnectCommand(sseSession), retry, TimeUnit.MILLISECONDS); } } else { sseSession.reset(new IOException("Early termination of IO session").fillInStackTrace()); } } private IoHandler httpHandler = new IoHandlerAdapter<HttpSession>() { @Override protected void doSessionOpened(HttpSession session) throws Exception { // TODO session.get[Ready]Future().addListener(...) to check // response status / headers IoFilterChain filterChain = session.getFilterChain(); addBridgeFilters(filterChain); SseSession sseSession = SSE_SESSION_KEY.get(session); if (sseSession == null) { Callable<SseSession> sessionFactory = SSE_SESSION_FACTORY_KEY.remove(session); SseSession newSseSession = sessionFactory.call(); SSE_SESSION_KEY.set(session, newSseSession); } } @Override protected void doMessageReceived(final HttpSession session, Object message) throws Exception { SseMessage sseMessage = (SseMessage) message; String type = sseMessage.getType(); IoBufferEx data = sseMessage.getData(); String id = sseMessage.getId(); boolean reconnect = sseMessage.isReconnect(); int retry = sseMessage.getRetry(); SseSession sseSession = SSE_SESSION_KEY.get(session); SseSessionConfig config = sseSession.getConfig(); config.setReconnecting(reconnect); if (retry >= 0) { config.setRetry(retry); } if (id != null) { config.setLastId(id); } if (data != null && data.hasRemaining() && (type == null || "message".equals(type))) { IoFilterChain filterChain = sseSession.getFilterChain(); filterChain.fireMessageReceived(data); } } @Override protected void doSessionClosed(HttpSession session) throws Exception { final SseSession sseSession = SSE_SESSION_KEY.remove(session); assert (sseSession != null); // TODO: move redirect handling to HttpConnector (optionally) switch (session.getStatus()) { case REDIRECT_MOVED_PERMANENTLY: String location = session.getReadHeader("Location"); if (location == null) { sseSession.reset(new Exception("Redirect attempted without Location header").fillInStackTrace()); } else { ResourceAddress newConnectAddress = resourceAddressFactory.newResourceAddress(location); BridgeConnector connector = bridgeServiceFactory.newBridgeConnector(newConnectAddress); connector.connect(newConnectAddress, httpHandler, new IoSessionInitializer<ConnectFuture>() { @Override public void initializeSession(IoSession session, ConnectFuture future) { SSE_SESSION_FACTORY_KEY.set(session, new Callable<SseSession>() { @Override public SseSession call() throws Exception { return sseSession; } }); } }).addListener(new ReconnectListener(sseSession)); } break; default: reconnectOrClose(sseSession); break; } } @Override protected void doExceptionCaught(HttpSession session, Throwable cause) throws Exception { if (logger.isDebugEnabled()) { String message = format("Error on SSE connection attempt: %s", cause); if (logger.isTraceEnabled()) { // note: still debug level, but with extra detail about the exception logger.debug(message, cause); } else { logger.debug(message); } } session.close(true); ConnectFuture sseConnectFuture = SSE_CONNECT_FUTURE_KEY.remove(session); if (sseConnectFuture != null) { sseConnectFuture.setException(cause); } } }; private class ReconnectCommand implements Runnable { private final SseSession sseSession; public ReconnectCommand(SseSession sseSession) { this.sseSession = sseSession; } @Override public void run() { ResourceAddress connectAddress = sseSession.getRemoteAddress(); ReconnectListener connectListener = new ReconnectListener(sseSession); BridgeConnector connector = bridgeServiceFactory.newBridgeConnector(connectAddress); connector.connect(connectAddress, httpHandler, null).addListener(connectListener); } } private final class ReconnectListener implements IoFutureListener<ConnectFuture> { private final SseSession sseSession; private ReconnectListener(SseSession sseSession) { this.sseSession = sseSession; } @Override public void operationComplete(ConnectFuture future) { if (future.isConnected()) { IoSession session = future.getSession(); session.setAttribute(SSE_SESSION_KEY, sseSession); logger.debug("Reconnected: {}", sseSession.getRemoteAddress()); } else { reconnectOrClose(sseSession); } } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.job.entries.evalfilesmetrics; import org.pentaho.di.job.entry.validator.AbstractFileValidator; import org.pentaho.di.job.entry.validator.AndValidator; import org.pentaho.di.job.entry.validator.JobEntryValidatorUtils; import java.io.IOException; import java.math.BigDecimal; import java.util.Iterator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.vfs2.AllFileSelector; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSelectInfo; import org.apache.commons.vfs2.FileType; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.Result; import org.pentaho.di.core.ResultFile; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.Job; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entries.simpleeval.JobEntrySimpleEval; import org.pentaho.di.job.entry.JobEntryBase; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.job.entry.validator.ValidatorContext; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; /** * This defines a 'evaluate files metrics' job entry. * * @author Samatar Hassan * @since 26-02-2010 */ public class JobEntryEvalFilesMetrics extends JobEntryBase implements Cloneable, JobEntryInterface { private static Class<?> PKG = JobEntryEvalFilesMetrics.class; // for i18n purposes, needed by Translator2!! public static final BigDecimal ONE = new BigDecimal( 1 ); public static final String[] IncludeSubFoldersDesc = new String[] { BaseMessages.getString( PKG, "System.Combo.No" ), BaseMessages.getString( PKG, "System.Combo.Yes" ) }; public static final String[] IncludeSubFoldersCodes = new String[] { "N", "Y" }; private static final String YES = "Y"; private static final String NO = "N"; public static final String[] scaleDesc = new String[] { BaseMessages.getString( PKG, "JobEvalFilesMetrics.Bytes.Label" ), BaseMessages.getString( PKG, "JobEvalFilesMetrics.KBytes.Label" ), BaseMessages.getString( PKG, "JobEvalFilesMetrics.MBytes.Label" ), BaseMessages.getString( PKG, "JobEvalFilesMetrics.GBytes.Label" ) }; public static final String[] scaleCodes = new String[] { "bytes", "kbytes", "mbytes", "gbytes" }; public static final int SCALE_BYTES = 0; public static final int SCALE_KBYTES = 1; public static final int SCALE_MBYTES = 2; public static final int SCALE_GBYTES = 3; public int scale; public static final String[] SourceFilesDesc = new String[] { BaseMessages.getString( PKG, "JobEvalFilesMetrics.SourceFiles.Files.Label" ), BaseMessages.getString( PKG, "JobEvalFilesMetrics.SourceFiles.FilenamesResult.Label" ), BaseMessages.getString( PKG, "JobEvalFilesMetrics.SourceFiles.PreviousResult.Label" ), }; public static final String[] SourceFilesCodes = new String[] { "files", "filenamesresult", "previousresult" }; public static final int SOURCE_FILES_FILES = 0; public static final int SOURCE_FILES_FILENAMES_RESULT = 1; public static final int SOURCE_FILES_PREVIOUS_RESULT = 2; public int sourceFiles; public static final String[] EvaluationTypeDesc = new String[] { BaseMessages.getString( PKG, "JobEvalFilesMetrics.EvaluationType.Size.Label" ), BaseMessages.getString( PKG, "JobEvalFilesMetrics.EvaluationType.Count.Label" ), }; public static final String[] EvaluationTypeCodes = new String[] { "size", "count", }; public static final int EVALUATE_TYPE_SIZE = 0; public static final int EVALUATE_TYPE_COUNT = 1; public int evaluationType; private String comparevalue; private String minvalue; private String maxvalue; private int successConditionType; private String resultFilenamesWildcard; public boolean arg_from_previous; private String[] sourceFileFolder; private String[] sourceWildcard; private String[] sourceIncludeSubfolders; private BigDecimal evaluationValue; private BigDecimal filesCount; private long nrErrors; private String ResultFieldFile; private String ResultFieldWildcard; private String ResultFieldIncludesubFolders; private BigDecimal compareValue; private BigDecimal minValue; private BigDecimal maxValue; public JobEntryEvalFilesMetrics( String n ) { super( n, "" ); sourceFileFolder = null; sourceWildcard = null; sourceIncludeSubfolders = null; scale = SCALE_BYTES; sourceFiles = SOURCE_FILES_FILES; evaluationType = EVALUATE_TYPE_SIZE; successConditionType = JobEntrySimpleEval.SUCCESS_NUMBER_CONDITION_GREATER; resultFilenamesWildcard = null; ResultFieldFile = null; ResultFieldWildcard = null; ResultFieldIncludesubFolders = null; } public JobEntryEvalFilesMetrics() { this( "" ); } public void allocate( int nrFields ) { sourceFileFolder = new String[nrFields]; sourceWildcard = new String[nrFields]; sourceIncludeSubfolders = new String[nrFields]; } public Object clone() { JobEntryEvalFilesMetrics je = (JobEntryEvalFilesMetrics) super.clone(); if ( sourceFileFolder != null ) { int nrFields = sourceFileFolder.length; je.allocate( nrFields ); System.arraycopy( sourceFileFolder, 0, je.sourceFileFolder, 0, nrFields ); System.arraycopy( sourceWildcard, 0, je.sourceWildcard, 0, nrFields ); System.arraycopy( sourceIncludeSubfolders, 0, je.sourceIncludeSubfolders, 0, nrFields ); } return je; } public String getXML() { StringBuilder retval = new StringBuilder( 300 ); retval.append( super.getXML() ); retval.append( " " ).append( XMLHandler.addTagValue( "result_filenames_wildcard", resultFilenamesWildcard ) ); retval.append( " " ).append( XMLHandler.addTagValue( "Result_field_file", ResultFieldFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "Result_field_wildcard", ResultFieldWildcard ) ); retval.append( " " ).append( XMLHandler.addTagValue( "Result_field_includesubfolders", ResultFieldIncludesubFolders ) ); retval.append( " <fields>" ).append( Const.CR ); if ( sourceFileFolder != null ) { for ( int i = 0; i < sourceFileFolder.length; i++ ) { retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "source_filefolder", sourceFileFolder[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "wildcard", sourceWildcard[i] ) ); retval .append( " " ).append( XMLHandler.addTagValue( "include_subFolders", sourceIncludeSubfolders[i] ) ); retval.append( " </field>" ).append( Const.CR ); } } retval.append( " </fields>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "comparevalue", comparevalue ) ); retval.append( " " ).append( XMLHandler.addTagValue( "minvalue", minvalue ) ); retval.append( " " ).append( XMLHandler.addTagValue( "maxvalue", maxvalue ) ); retval.append( " " ).append( XMLHandler.addTagValue( "successnumbercondition", JobEntrySimpleEval .getSuccessNumberConditionCode( successConditionType ) ) ); retval.append( " " ).append( XMLHandler.addTagValue( "source_files", getSourceFilesCode( sourceFiles ) ) ); retval.append( " " ).append( XMLHandler.addTagValue( "evaluation_type", getEvaluationTypeCode( evaluationType ) ) ); retval.append( " " ).append( XMLHandler.addTagValue( "scale", getScaleCode( scale ) ) ); return retval.toString(); } public static String getIncludeSubFolders( String tt ) { if ( tt == null ) { return IncludeSubFoldersCodes[0]; } if ( tt.equals( IncludeSubFoldersDesc[1] ) ) { return IncludeSubFoldersCodes[1]; } else { return IncludeSubFoldersCodes[0]; } } public static String getIncludeSubFoldersDesc( String tt ) { if ( tt == null ) { return IncludeSubFoldersDesc[0]; } if ( tt.equals( IncludeSubFoldersCodes[1] ) ) { return IncludeSubFoldersDesc[1]; } else { return IncludeSubFoldersDesc[0]; } } public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); Node fields = XMLHandler.getSubNode( entrynode, "fields" ); // How many field arguments? int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFields ); // Read them all... for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); sourceFileFolder[i] = XMLHandler.getTagValue( fnode, "source_filefolder" ); sourceWildcard[i] = XMLHandler.getTagValue( fnode, "wildcard" ); sourceIncludeSubfolders[i] = XMLHandler.getTagValue( fnode, "include_subFolders" ); } resultFilenamesWildcard = XMLHandler.getTagValue( entrynode, "result_filenames_wildcard" ); ResultFieldFile = XMLHandler.getTagValue( entrynode, "result_field_file" ); ResultFieldWildcard = XMLHandler.getTagValue( entrynode, "result_field_wildcard" ); ResultFieldIncludesubFolders = XMLHandler.getTagValue( entrynode, "result_field_includesubfolders" ); comparevalue = XMLHandler.getTagValue( entrynode, "comparevalue" ); minvalue = XMLHandler.getTagValue( entrynode, "minvalue" ); maxvalue = XMLHandler.getTagValue( entrynode, "maxvalue" ); successConditionType = JobEntrySimpleEval.getSuccessNumberConditionByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "successnumbercondition" ), "" ) ); sourceFiles = getSourceFilesByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "source_files" ), "" ) ); evaluationType = getEvaluationTypeByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "evaluation_type" ), "" ) ); scale = getScaleByCode( Const.NVL( XMLHandler.getTagValue( entrynode, "scale" ), "" ) ); } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.Exception.UnableLoadXML" ), xe ); } } public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" ); allocate( argnr ); // Read them all... for ( int a = 0; a < argnr; a++ ) { sourceFileFolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" ); sourceWildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" ); sourceIncludeSubfolders[a] = rep.getJobEntryAttributeString( id_jobentry, a, "include_subFolders" ); } resultFilenamesWildcard = rep.getJobEntryAttributeString( id_jobentry, "result_filenames_wildcard" ); ResultFieldFile = rep.getJobEntryAttributeString( id_jobentry, "result_field_file" ); ResultFieldWildcard = rep.getJobEntryAttributeString( id_jobentry, "result_field_wild" ); ResultFieldIncludesubFolders = rep.getJobEntryAttributeString( id_jobentry, "result_field_includesubfolders" ); comparevalue = rep.getJobEntryAttributeString( id_jobentry, "comparevalue" ); minvalue = rep.getJobEntryAttributeString( id_jobentry, "minvalue" ); maxvalue = rep.getJobEntryAttributeString( id_jobentry, "maxvalue" ); successConditionType = JobEntrySimpleEval.getSuccessNumberConditionByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "successnumbercondition" ), "" ) ); sourceFiles = getSourceFilesByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "source_files" ), "" ) ); evaluationType = getEvaluationTypeByCode( Const .NVL( rep.getJobEntryAttributeString( id_jobentry, "evaluation_type" ), "" ) ); scale = getScaleByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "scale" ), "" ) ); } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.Exception.UnableLoadRep" ) + id_jobentry, dbe ); } } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException { try { // save the arguments... if ( sourceFileFolder != null ) { for ( int i = 0; i < sourceFileFolder.length; i++ ) { rep.saveJobEntryAttribute( id_job, getObjectId(), i, "source_filefolder", sourceFileFolder[i] ); rep.saveJobEntryAttribute( id_job, getObjectId(), i, "wildcard", sourceWildcard[i] ); rep.saveJobEntryAttribute( id_job, getObjectId(), i, "include_subFolders", sourceIncludeSubfolders[i] ); } } rep.saveJobEntryAttribute( id_job, getObjectId(), "result_filenames_wildcard", resultFilenamesWildcard ); rep.saveJobEntryAttribute( id_job, getObjectId(), "result_field_file", ResultFieldFile ); rep.saveJobEntryAttribute( id_job, getObjectId(), "result_field_wild", ResultFieldWildcard ); rep.saveJobEntryAttribute( id_job, getObjectId(), "result_field_includesubfolders", ResultFieldIncludesubFolders ); rep.saveJobEntryAttribute( id_job, getObjectId(), "comparevalue", comparevalue ); rep.saveJobEntryAttribute( id_job, getObjectId(), "minvalue", minvalue ); rep.saveJobEntryAttribute( id_job, getObjectId(), "maxvalue", maxvalue ); rep.saveJobEntryAttribute( id_job, getObjectId(), "successnumbercondition", JobEntrySimpleEval .getSuccessNumberConditionCode( successConditionType ) ); rep.saveJobEntryAttribute( id_job, getObjectId(), "scale", getScaleCode( scale ) ); rep.saveJobEntryAttribute( id_job, getObjectId(), "source_files", getSourceFilesCode( sourceFiles ) ); rep .saveJobEntryAttribute( id_job, getObjectId(), "evaluation_type", getEvaluationTypeCode( evaluationType ) ); } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.Exception.UnableSaveRep" ) + id_job, dbe ); } } public Result execute( Result previousResult, int nr ) throws KettleException { Result result = previousResult; result.setNrErrors( 1 ); result.setResult( false ); List<RowMetaAndData> rows = result.getRows(); RowMetaAndData resultRow = null; try { initMetrics(); } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.Init", e.toString() ) ); return result; } // Get source and destination files, also wildcard String[] vsourcefilefolder = sourceFileFolder; String[] vwildcard = sourceWildcard; String[] vincludeSubFolders = sourceIncludeSubfolders; switch ( getSourceFiles() ) { case SOURCE_FILES_PREVIOUS_RESULT: // Filenames are retrieved from previous result rows String realResultFieldFile = environmentSubstitute( getResultFieldFile() ); String realResultFieldWildcard = environmentSubstitute( getResultFieldWildcard() ); String realResultFieldIncluseSubfolders = environmentSubstitute( getResultFieldIncludeSubfolders() ); int indexOfResultFieldFile = -1; if ( Const.isEmpty( realResultFieldFile ) ) { logError( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.ResultFieldsFileMissing" ) ); return result; } int indexOfResultFieldWildcard = -1; int indexOfResultFieldIncludeSubfolders = -1; // as such we must get rows if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.ArgFromPrevious.Found", ( rows != null ? rows.size() : 0 ) + "" ) ); } if ( rows != null && rows.size() > 0 ) { // We get rows RowMetaAndData firstRow = rows.get( 0 ); indexOfResultFieldFile = firstRow.getRowMeta().indexOfValue( realResultFieldFile ); if ( indexOfResultFieldFile == -1 ) { logError( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.CanNotFindField", realResultFieldFile ) ); return result; } if ( !Const.isEmpty( realResultFieldWildcard ) ) { indexOfResultFieldWildcard = firstRow.getRowMeta().indexOfValue( realResultFieldWildcard ); if ( indexOfResultFieldWildcard == -1 ) { logError( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.CanNotFindField", realResultFieldWildcard ) ); return result; } } if ( !Const.isEmpty( realResultFieldIncluseSubfolders ) ) { indexOfResultFieldIncludeSubfolders = firstRow.getRowMeta().indexOfValue( realResultFieldIncluseSubfolders ); if ( indexOfResultFieldIncludeSubfolders == -1 ) { logError( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.CanNotFindField", realResultFieldIncluseSubfolders ) ); return result; } } for ( int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++ ) { resultRow = rows.get( iteration ); // Get source and destination file names, also wildcard String vsourcefilefolder_previous = resultRow.getString( indexOfResultFieldFile, null ); String vwildcard_previous = null; if ( indexOfResultFieldWildcard > -1 ) { vwildcard_previous = resultRow.getString( indexOfResultFieldWildcard, null ); } String vincludeSubFolders_previous = NO; if ( indexOfResultFieldIncludeSubfolders > -1 ) { vincludeSubFolders_previous = resultRow.getString( indexOfResultFieldIncludeSubfolders, NO ); } if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.ProcessingRow", vsourcefilefolder_previous, vwildcard_previous ) ); } ProcessFileFolder( vsourcefilefolder_previous, vwildcard_previous, vincludeSubFolders_previous, parentJob, result ); } } break; case SOURCE_FILES_FILENAMES_RESULT: List<ResultFile> resultFiles = result.getResultFilesList(); if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.ResultFilenames.Found", ( resultFiles != null ? resultFiles.size() : 0 ) + "" ) ); } if ( resultFiles != null && resultFiles.size() > 0 ) { // Let's check wildcard Pattern pattern = null; String realPattern = environmentSubstitute( getResultFilenamesWildcard() ); if ( !Const.isEmpty( realPattern ) ) { pattern = Pattern.compile( realPattern ); } for ( Iterator<ResultFile> it = resultFiles.iterator(); it.hasNext() && !parentJob.isStopped(); ) { ResultFile resultFile = it.next(); FileObject file = resultFile.getFile(); try { if ( file != null && file.exists() ) { boolean getIt = true; if ( pattern != null ) { Matcher matcher = pattern.matcher( file.getName().getBaseName() ); getIt = matcher.matches(); } if ( getIt ) { getFileSize( file, result, parentJob ); } } } catch ( Exception e ) { incrementErrors(); logError( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.GettingFileFromResultFilenames", file.toString(), e.toString() ) ); } finally { if ( file != null ) { try { file.close(); } catch ( Exception e ) { /* Ignore */ } } } } } break; default: // static files/folders // from grid entered by user if ( vsourcefilefolder != null && vsourcefilefolder.length > 0 ) { for ( int i = 0; i < vsourcefilefolder.length && !parentJob.isStopped(); i++ ) { if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.ProcessingRow", vsourcefilefolder[i], vwildcard[i] ) ); } ProcessFileFolder( vsourcefilefolder[i], vwildcard[i], vincludeSubFolders[i], parentJob, result ); } } else { logError( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.FilesGridEmpty" ) ); return result; } break; } result.setResult( isSuccess() ); result.setNrErrors( getNrError() ); displayResults(); return result; } private void displayResults() { if ( isDetailed() ) { logDetailed( "=======================================" ); logDetailed( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.Info.FilesCount", String .valueOf( getFilesCount() ) ) ); if ( evaluationType == EVALUATE_TYPE_SIZE ) { logDetailed( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.Info.FilesSize", String .valueOf( getEvaluationValue() ) ) ); } logDetailed( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.Info.NrErrors", String .valueOf( getNrError() ) ) ); logDetailed( "=======================================" ); } } private long getNrError() { return this.nrErrors; } private BigDecimal getEvaluationValue() { return this.evaluationValue; } private BigDecimal getFilesCount() { return this.filesCount; } public int getSuccessConditionType() { return successConditionType; } public void setSuccessConditionType( int successConditionType ) { this.successConditionType = successConditionType; } private boolean isSuccess() { boolean retval = false; switch ( successConditionType ) { case JobEntrySimpleEval.SUCCESS_NUMBER_CONDITION_EQUAL: // equal if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.CompareWithValue", String .valueOf( evaluationValue ), String.valueOf( compareValue ) ) ); } retval = ( getEvaluationValue().compareTo( compareValue ) == 0 ); break; case JobEntrySimpleEval.SUCCESS_NUMBER_CONDITION_DIFFERENT: // different if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.CompareWithValue", String .valueOf( evaluationValue ), String.valueOf( compareValue ) ) ); } retval = ( getEvaluationValue().compareTo( compareValue ) != 0 ); break; case JobEntrySimpleEval.SUCCESS_NUMBER_CONDITION_SMALLER: // smaller if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.CompareWithValue", String .valueOf( evaluationValue ), String.valueOf( compareValue ) ) ); } retval = ( getEvaluationValue().compareTo( compareValue ) < 0 ); break; case JobEntrySimpleEval.SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL: // smaller or equal if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.CompareWithValue", String .valueOf( evaluationValue ), String.valueOf( compareValue ) ) ); } retval = ( getEvaluationValue().compareTo( compareValue ) <= 0 ); break; case JobEntrySimpleEval.SUCCESS_NUMBER_CONDITION_GREATER: // greater if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.CompareWithValue", String .valueOf( evaluationValue ), String.valueOf( compareValue ) ) ); } retval = ( getEvaluationValue().compareTo( compareValue ) > 0 ); break; case JobEntrySimpleEval.SUCCESS_NUMBER_CONDITION_GREATER_EQUAL: // greater or equal if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.CompareWithValue", String .valueOf( evaluationValue ), String.valueOf( compareValue ) ) ); } retval = ( getEvaluationValue().compareTo( compareValue ) >= 0 ); break; case JobEntrySimpleEval.SUCCESS_NUMBER_CONDITION_BETWEEN: // between min and max if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.CompareWithValues", String .valueOf( evaluationValue ), String.valueOf( minValue ), String.valueOf( maxValue ) ) ); } retval = ( getEvaluationValue().compareTo( minValue ) >= 0 && getEvaluationValue().compareTo( maxValue ) <= 0 ); break; default: break; } return retval; } private void initMetrics() throws Exception { evaluationValue = new BigDecimal( 0 ); filesCount = new BigDecimal( 0 ); nrErrors = 0; if ( successConditionType == JobEntrySimpleEval.SUCCESS_NUMBER_CONDITION_BETWEEN ) { minValue = new BigDecimal( environmentSubstitute( getMinValue() ) ); maxValue = new BigDecimal( environmentSubstitute( getMaxValue() ) ); } else { compareValue = new BigDecimal( environmentSubstitute( getCompareValue() ) ); } if ( evaluationType == EVALUATE_TYPE_SIZE ) { int multyply = 1; switch ( getScale() ) { case SCALE_KBYTES: multyply = 1024; break; case SCALE_MBYTES: multyply = 1048576; break; case SCALE_GBYTES: multyply = 1073741824; break; default: break; } if ( successConditionType == JobEntrySimpleEval.SUCCESS_NUMBER_CONDITION_BETWEEN ) { minValue = minValue.multiply( BigDecimal.valueOf( multyply ) ); maxValue = maxValue.multiply( BigDecimal.valueOf( multyply ) ); } else { compareValue = compareValue.multiply( BigDecimal.valueOf( multyply ) ); } } arg_from_previous = ( getSourceFiles() == SOURCE_FILES_PREVIOUS_RESULT ); } private void incrementErrors() { nrErrors++; } public int getSourceFiles() { return this.sourceFiles; } private void incrementFilesCount() { filesCount = filesCount.add( ONE ); } public String[] getSourceFileFolder() { return sourceFileFolder; } public void setSourceFileFolder( String[] sourceFileFolder ) { this.sourceFileFolder = sourceFileFolder; } public String[] getSourceWildcard() { return sourceWildcard; } public void setSourceWildcard( String[] sourceWildcard ) { this.sourceWildcard = sourceWildcard; } public String[] getSourceIncludeSubfolders() { return sourceIncludeSubfolders; } public void setSourceIncludeSubfolders( String[] sourceIncludeSubfolders ) { this.sourceIncludeSubfolders = sourceIncludeSubfolders; } public void setSourceFiles( int sourceFiles ) { this.sourceFiles = sourceFiles; } public String getResultFieldFile() { return this.ResultFieldFile; } public void setResultFieldFile( String field ) { this.ResultFieldFile = field; } public String getResultFieldWildcard() { return this.ResultFieldWildcard; } public void setResultFieldWildcard( String field ) { this.ResultFieldWildcard = field; } public String getResultFieldIncludeSubfolders() { return this.ResultFieldIncludesubFolders; } public void setResultFieldIncludeSubfolders( String field ) { this.ResultFieldIncludesubFolders = field; } private void ProcessFileFolder( String sourcefilefoldername, String wildcard, String includeSubfolders, Job parentJob, Result result ) { FileObject sourcefilefolder = null; FileObject CurrentFile = null; // Get real source file and wildcard String realSourceFilefoldername = environmentSubstitute( sourcefilefoldername ); if ( Const.isEmpty( realSourceFilefoldername ) ) { // Filename is empty! logError( BaseMessages.getString( PKG, "JobEvalFilesMetrics.log.FileFolderEmpty" ) ); incrementErrors(); return; } String realWildcard = environmentSubstitute( wildcard ); final boolean include_subfolders = YES.equalsIgnoreCase( includeSubfolders ); try { sourcefilefolder = KettleVFS.getFileObject( realSourceFilefoldername ); if ( sourcefilefolder.exists() ) { // File exists if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.FileExists", sourcefilefolder .toString() ) ); } if ( sourcefilefolder.getType() == FileType.FILE ) { // We deals here with a file // let's get file size getFileSize( sourcefilefolder, result, parentJob ); } else if ( sourcefilefolder.getType() == FileType.FOLDER ) { // We have a folder // we will fetch and extract files FileObject[] fileObjects = sourcefilefolder.findFiles( new AllFileSelector() { public boolean traverseDescendents( FileSelectInfo info ) { return info.getDepth() == 0 || include_subfolders; } public boolean includeFile( FileSelectInfo info ) { FileObject fileObject = info.getFile(); try { if ( fileObject == null ) { return false; } if ( fileObject.getType() != FileType.FILE ) { return false; } } catch ( Exception ex ) { // Upon error don't process the file. return false; } finally { if ( fileObject != null ) { try { fileObject.close(); } catch ( IOException ex ) { /* Ignore */ } } } return true; } } ); if ( fileObjects != null ) { for ( int j = 0; j < fileObjects.length && !parentJob.isStopped(); j++ ) { // Fetch files in list one after one ... CurrentFile = fileObjects[j]; if ( !CurrentFile.getParent().toString().equals( sourcefilefolder.toString() ) ) { // Not in the Base Folder..Only if include sub folders if ( include_subfolders ) { if ( GetFileWildcard( CurrentFile.getName().getBaseName(), realWildcard ) ) { getFileSize( CurrentFile, result, parentJob ); } } } else { // In the base folder if ( GetFileWildcard( CurrentFile.getName().getBaseName(), realWildcard ) ) { getFileSize( CurrentFile, result, parentJob ); } } } } } else { incrementErrors(); logError( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.UnknowFileFormat", sourcefilefolder .toString() ) ); } } else { incrementErrors(); logError( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.SourceFileNotExists", realSourceFilefoldername ) ); } } catch ( Exception e ) { incrementErrors(); logError( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.Exception.Processing", realSourceFilefoldername.toString(), e .getMessage() ) ); } finally { if ( sourcefilefolder != null ) { try { sourcefilefolder.close(); } catch ( IOException ex ) { /* Ignore */ } } if ( CurrentFile != null ) { try { CurrentFile.close(); } catch ( IOException ex ) { /* Ignore */ } } } } private void getFileSize( FileObject file, Result result, Job parentJob ) { try { incrementFilesCount(); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.GetFile", file.toString(), String .valueOf( getFilesCount() ) ) ); } switch ( evaluationType ) { case EVALUATE_TYPE_SIZE: BigDecimal fileSize = BigDecimal.valueOf( file.getContent().getSize() ); evaluationValue = evaluationValue.add( fileSize ); if ( isDebug() ) { logDebug( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Log.AddedFileSize", String .valueOf( fileSize ), file.toString() ) ); } break; default: evaluationValue = evaluationValue.add( ONE ); break; } } catch ( Exception e ) { incrementErrors(); logError( BaseMessages.getString( PKG, "JobEvalFilesMetrics.Error.GettingFileSize", file.toString(), e .toString() ) ); } } /********************************************************** * * @param selectedfile * @param wildcard * @return True if the selectedfile matches the wildcard **********************************************************/ private boolean GetFileWildcard( String selectedfile, String wildcard ) { Pattern pattern = null; boolean getIt = true; if ( !Const.isEmpty( wildcard ) ) { pattern = Pattern.compile( wildcard ); // First see if the file matches the regular expression! if ( pattern != null ) { Matcher matcher = pattern.matcher( selectedfile ); getIt = matcher.matches(); } } return getIt; } public void setMinValue( String minvalue ) { this.minvalue = minvalue; } public String getMinValue() { return minvalue; } public void setCompareValue( String comparevalue ) { this.comparevalue = comparevalue; } public String getCompareValue() { return comparevalue; } public void setResultFilenamesWildcard( String resultwildcard ) { this.resultFilenamesWildcard = resultwildcard; } public String getResultFilenamesWildcard() { return this.resultFilenamesWildcard; } public void setMaxValue( String maxvalue ) { this.maxvalue = maxvalue; } public String getMaxValue() { return maxvalue; } public static int getScaleByDesc( String tt ) { if ( tt == null ) { return 0; } for ( int i = 0; i < scaleDesc.length; i++ ) { if ( scaleDesc[i].equalsIgnoreCase( tt ) ) { return i; } } // If this fails, try to match using the code. return getScaleByCode( tt ); } public static int getSourceFilesByDesc( String tt ) { if ( tt == null ) { return 0; } for ( int i = 0; i < SourceFilesDesc.length; i++ ) { if ( SourceFilesDesc[i].equalsIgnoreCase( tt ) ) { return i; } } // If this fails, try to match using the code. return getSourceFilesByCode( tt ); } public static int getEvaluationTypeByDesc( String tt ) { if ( tt == null ) { return 0; } for ( int i = 0; i < EvaluationTypeDesc.length; i++ ) { if ( EvaluationTypeDesc[i].equalsIgnoreCase( tt ) ) { return i; } } // If this fails, try to match using the code. return getEvaluationTypeByCode( tt ); } private static int getScaleByCode( String tt ) { if ( tt == null ) { return 0; } for ( int i = 0; i < scaleCodes.length; i++ ) { if ( scaleCodes[i].equalsIgnoreCase( tt ) ) { return i; } } return 0; } private static int getSourceFilesByCode( String tt ) { if ( tt == null ) { return 0; } for ( int i = 0; i < SourceFilesCodes.length; i++ ) { if ( SourceFilesCodes[i].equalsIgnoreCase( tt ) ) { return i; } } return 0; } private static int getEvaluationTypeByCode( String tt ) { if ( tt == null ) { return 0; } for ( int i = 0; i < EvaluationTypeCodes.length; i++ ) { if ( EvaluationTypeCodes[i].equalsIgnoreCase( tt ) ) { return i; } } return 0; } public static String getScaleDesc( int i ) { if ( i < 0 || i >= scaleDesc.length ) { return scaleDesc[0]; } return scaleDesc[i]; } public static String getEvaluationTypeDesc( int i ) { if ( i < 0 || i >= EvaluationTypeDesc.length ) { return EvaluationTypeDesc[0]; } return EvaluationTypeDesc[i]; } public static String getSourceFilesDesc( int i ) { if ( i < 0 || i >= SourceFilesDesc.length ) { return SourceFilesDesc[0]; } return SourceFilesDesc[i]; } public static String getScaleCode( int i ) { if ( i < 0 || i >= scaleCodes.length ) { return scaleCodes[0]; } return scaleCodes[i]; } public static String getSourceFilesCode( int i ) { if ( i < 0 || i >= SourceFilesCodes.length ) { return SourceFilesCodes[0]; } return SourceFilesCodes[i]; } public static String getEvaluationTypeCode( int i ) { if ( i < 0 || i >= EvaluationTypeCodes.length ) { return EvaluationTypeCodes[0]; } return EvaluationTypeCodes[i]; } public int getScale() { return this.scale; } public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space, Repository repository, IMetaStore metaStore ) { boolean res = JobEntryValidatorUtils.andValidator().validate( this, "arguments", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notNullValidator() ) ); if ( res == false ) { return; } ValidatorContext ctx = new ValidatorContext(); AbstractFileValidator.putVariableSpace( ctx, getVariables() ); AndValidator.putValidators( ctx, JobEntryValidatorUtils.notNullValidator(), JobEntryValidatorUtils.fileExistsValidator() ); for ( int i = 0; i < sourceFileFolder.length; i++ ) { JobEntryValidatorUtils.andValidator().validate( this, "arguments[" + i + "]", remarks, ctx ); } } public boolean evaluates() { return true; } }
/* * Copyright (c) 1997, 2010, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.codemodel.internal.util; import java.nio.CharBuffer; import java.nio.charset.CoderResult; /** * Utility class for dealing with surrogates. * * @author Mark Reinhold * @version 1.11, 03/01/23 */ class Surrogate { private Surrogate() { } // UTF-16 surrogate-character ranges // public static final char MIN_HIGH = '\uD800'; public static final char MAX_HIGH = '\uDBFF'; public static final char MIN_LOW = '\uDC00'; public static final char MAX_LOW = '\uDFFF'; public static final char MIN = MIN_HIGH; public static final char MAX = MAX_LOW; // Range of UCS-4 values that need surrogates in UTF-16 // public static final int UCS4_MIN = 0x10000; public static final int UCS4_MAX = (1 << 20) + UCS4_MIN - 1; /** * Tells whether or not the given UTF-16 value is a high surrogate. */ public static boolean isHigh(int c) { return (MIN_HIGH <= c) && (c <= MAX_HIGH); } /** * Tells whether or not the given UTF-16 value is a low surrogate. */ public static boolean isLow(int c) { return (MIN_LOW <= c) && (c <= MAX_LOW); } /** * Tells whether or not the given UTF-16 value is a surrogate character, */ public static boolean is(int c) { return (MIN <= c) && (c <= MAX); } /** * Tells whether or not the given UCS-4 character must be represented as a * surrogate pair in UTF-16. */ public static boolean neededFor(int uc) { return (uc >= UCS4_MIN) && (uc <= UCS4_MAX); } /** * Returns the high UTF-16 surrogate for the given UCS-4 character. */ public static char high(int uc) { return (char)(0xd800 | (((uc - UCS4_MIN) >> 10) & 0x3ff)); } /** * Returns the low UTF-16 surrogate for the given UCS-4 character. */ public static char low(int uc) { return (char)(0xdc00 | ((uc - UCS4_MIN) & 0x3ff)); } /** * Converts the given surrogate pair into a 32-bit UCS-4 character. */ public static int toUCS4(char c, char d) { return (((c & 0x3ff) << 10) | (d & 0x3ff)) + 0x10000; } /** * Surrogate parsing support. Charset implementations may use instances of * this class to handle the details of parsing UTF-16 surrogate pairs. */ public static class Parser { public Parser() { } private int character; // UCS-4 private CoderResult error = CoderResult.UNDERFLOW; private boolean isPair; /** * Returns the UCS-4 character previously parsed. */ public int character() { return character; } /** * Tells whether or not the previously-parsed UCS-4 character was * originally represented by a surrogate pair. */ public boolean isPair() { return isPair; } /** * Returns the number of UTF-16 characters consumed by the previous * parse. */ public int increment() { return isPair ? 2 : 1; } /** * If the previous parse operation detected an error, return the object * describing that error. */ public CoderResult error() { return error; } /** * Returns an unmappable-input result object, with the appropriate * input length, for the previously-parsed character. */ public CoderResult unmappableResult() { return CoderResult.unmappableForLength(isPair ? 2 : 1); } /** * Parses a UCS-4 character from the given source buffer, handling * surrogates. * * @param c The first character * @param in The source buffer, from which one more character * will be consumed if c is a high surrogate * * @return Either a parsed UCS-4 character, in which case the isPair() * and increment() methods will return meaningful values, or * -1, in which case error() will return a descriptive result * object */ public int parse(char c, CharBuffer in) { if (isHigh(c)) { if (!in.hasRemaining()) { error = CoderResult.UNDERFLOW; return -1; } char d = in.get(); if (isLow(d)) { character = toUCS4(c, d); isPair = true; error = null; return character; } error = CoderResult.malformedForLength(1); return -1; } if (isLow(c)) { error = CoderResult.malformedForLength(1); return -1; } character = c; isPair = false; error = null; return character; } /** * Parses a UCS-4 character from the given source buffer, handling * surrogates. * * @param c The first character * @param ia The input array, from which one more character * will be consumed if c is a high surrogate * @param ip The input index * @param il The input limit * * @return Either a parsed UCS-4 character, in which case the isPair() * and increment() methods will return meaningful values, or * -1, in which case error() will return a descriptive result * object */ public int parse(char c, char[] ia, int ip, int il) { if (isHigh(c)) { if (il - ip < 2) { error = CoderResult.UNDERFLOW; return -1; } char d = ia[ip + 1]; if (isLow(d)) { character = toUCS4(c, d); isPair = true; error = null; return character; } error = CoderResult.malformedForLength(1); return -1; } if (isLow(c)) { error = CoderResult.malformedForLength(1); return -1; } character = c; isPair = false; error = null; return character; } } /** * Surrogate generation support. Charset implementations may use instances * of this class to handle the details of generating UTF-16 surrogate * pairs. */ public static class Generator { public Generator() { } private CoderResult error = CoderResult.OVERFLOW; /** * If the previous generation operation detected an error, return the * object describing that error. */ public CoderResult error() { return error; } /** * Generates one or two UTF-16 characters to represent the given UCS-4 * character. * * @param uc The UCS-4 character * @param len The number of input bytes from which the UCS-4 value * was constructed (used when creating result objects) * @param dst The destination buffer, to which one or two UTF-16 * characters will be written * * @return Either a positive count of the number of UTF-16 characters * written to the destination buffer, or -1, in which case * error() will return a descriptive result object */ public int generate(int uc, int len, CharBuffer dst) { if (uc <= 0xffff) { if (is(uc)) { error = CoderResult.malformedForLength(len); return -1; } if (dst.remaining() < 1) { error = CoderResult.OVERFLOW; return -1; } dst.put((char)uc); error = null; return 1; } if (uc < UCS4_MIN) { error = CoderResult.malformedForLength(len); return -1; } if (uc <= UCS4_MAX) { if (dst.remaining() < 2) { error = CoderResult.OVERFLOW; return -1; } dst.put(high(uc)); dst.put(low(uc)); error = null; return 2; } error = CoderResult.unmappableForLength(len); return -1; } /** * Generates one or two UTF-16 characters to represent the given UCS-4 * character. * * @param uc The UCS-4 character * @param len The number of input bytes from which the UCS-4 value * was constructed (used when creating result objects) * @param da The destination array, to which one or two UTF-16 * characters will be written * @param dp The destination position * @param dl The destination limit * * @return Either a positive count of the number of UTF-16 characters * written to the destination buffer, or -1, in which case * error() will return a descriptive result object */ public int generate(int uc, int len, char[] da, int dp, int dl) { if (uc <= 0xffff) { if (is(uc)) { error = CoderResult.malformedForLength(len); return -1; } if (dl - dp < 1) { error = CoderResult.OVERFLOW; return -1; } da[dp] = (char)uc; error = null; return 1; } if (uc < UCS4_MIN) { error = CoderResult.malformedForLength(len); return -1; } if (uc <= UCS4_MAX) { if (dl - dp < 2) { error = CoderResult.OVERFLOW; return -1; } da[dp] = high(uc); da[dp + 1] = low(uc); error = null; return 2; } error = CoderResult.unmappableForLength(len); return -1; } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.storage.v2.stub; import static com.google.storage.v2.StorageClient.ListBucketsPagedResponse; import static com.google.storage.v2.StorageClient.ListHmacKeysPagedResponse; import static com.google.storage.v2.StorageClient.ListNotificationsPagedResponse; import static com.google.storage.v2.StorageClient.ListObjectsPagedResponse; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.ClientStreamingCallable; import com.google.api.gax.rpc.ServerStreamingCallable; import com.google.api.gax.rpc.UnaryCallable; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import com.google.storage.v2.Bucket; import com.google.storage.v2.ComposeObjectRequest; import com.google.storage.v2.CreateBucketRequest; import com.google.storage.v2.CreateHmacKeyRequest; import com.google.storage.v2.CreateHmacKeyResponse; import com.google.storage.v2.CreateNotificationRequest; import com.google.storage.v2.DeleteBucketRequest; import com.google.storage.v2.DeleteHmacKeyRequest; import com.google.storage.v2.DeleteNotificationRequest; import com.google.storage.v2.DeleteObjectRequest; import com.google.storage.v2.GetBucketRequest; import com.google.storage.v2.GetHmacKeyRequest; import com.google.storage.v2.GetNotificationRequest; import com.google.storage.v2.GetObjectRequest; import com.google.storage.v2.GetServiceAccountRequest; import com.google.storage.v2.HmacKeyMetadata; import com.google.storage.v2.ListBucketsRequest; import com.google.storage.v2.ListBucketsResponse; import com.google.storage.v2.ListHmacKeysRequest; import com.google.storage.v2.ListHmacKeysResponse; import com.google.storage.v2.ListNotificationsRequest; import com.google.storage.v2.ListNotificationsResponse; import com.google.storage.v2.ListObjectsRequest; import com.google.storage.v2.ListObjectsResponse; import com.google.storage.v2.LockBucketRetentionPolicyRequest; import com.google.storage.v2.Notification; import com.google.storage.v2.Object; import com.google.storage.v2.QueryWriteStatusRequest; import com.google.storage.v2.QueryWriteStatusResponse; import com.google.storage.v2.ReadObjectRequest; import com.google.storage.v2.ReadObjectResponse; import com.google.storage.v2.RewriteObjectRequest; import com.google.storage.v2.RewriteResponse; import com.google.storage.v2.ServiceAccount; import com.google.storage.v2.StartResumableWriteRequest; import com.google.storage.v2.StartResumableWriteResponse; import com.google.storage.v2.UpdateBucketRequest; import com.google.storage.v2.UpdateHmacKeyRequest; import com.google.storage.v2.UpdateObjectRequest; import com.google.storage.v2.WriteObjectRequest; import com.google.storage.v2.WriteObjectResponse; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the Storage service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class GrpcStorageStub extends StorageStub { private static final MethodDescriptor<DeleteBucketRequest, Empty> deleteBucketMethodDescriptor = MethodDescriptor.<DeleteBucketRequest, Empty>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/DeleteBucket") .setRequestMarshaller(ProtoUtils.marshaller(DeleteBucketRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); private static final MethodDescriptor<GetBucketRequest, Bucket> getBucketMethodDescriptor = MethodDescriptor.<GetBucketRequest, Bucket>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/GetBucket") .setRequestMarshaller(ProtoUtils.marshaller(GetBucketRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Bucket.getDefaultInstance())) .build(); private static final MethodDescriptor<CreateBucketRequest, Bucket> createBucketMethodDescriptor = MethodDescriptor.<CreateBucketRequest, Bucket>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/CreateBucket") .setRequestMarshaller(ProtoUtils.marshaller(CreateBucketRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Bucket.getDefaultInstance())) .build(); private static final MethodDescriptor<ListBucketsRequest, ListBucketsResponse> listBucketsMethodDescriptor = MethodDescriptor.<ListBucketsRequest, ListBucketsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/ListBuckets") .setRequestMarshaller(ProtoUtils.marshaller(ListBucketsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListBucketsResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<LockBucketRetentionPolicyRequest, Bucket> lockBucketRetentionPolicyMethodDescriptor = MethodDescriptor.<LockBucketRetentionPolicyRequest, Bucket>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/LockBucketRetentionPolicy") .setRequestMarshaller( ProtoUtils.marshaller(LockBucketRetentionPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Bucket.getDefaultInstance())) .build(); private static final MethodDescriptor<GetIamPolicyRequest, Policy> getIamPolicyMethodDescriptor = MethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/GetIamPolicy") .setRequestMarshaller(ProtoUtils.marshaller(GetIamPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance())) .build(); private static final MethodDescriptor<SetIamPolicyRequest, Policy> setIamPolicyMethodDescriptor = MethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/SetIamPolicy") .setRequestMarshaller(ProtoUtils.marshaller(SetIamPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance())) .build(); private static final MethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsMethodDescriptor = MethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/TestIamPermissions") .setRequestMarshaller( ProtoUtils.marshaller(TestIamPermissionsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(TestIamPermissionsResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<UpdateBucketRequest, Bucket> updateBucketMethodDescriptor = MethodDescriptor.<UpdateBucketRequest, Bucket>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/UpdateBucket") .setRequestMarshaller(ProtoUtils.marshaller(UpdateBucketRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Bucket.getDefaultInstance())) .build(); private static final MethodDescriptor<DeleteNotificationRequest, Empty> deleteNotificationMethodDescriptor = MethodDescriptor.<DeleteNotificationRequest, Empty>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/DeleteNotification") .setRequestMarshaller( ProtoUtils.marshaller(DeleteNotificationRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); private static final MethodDescriptor<GetNotificationRequest, Notification> getNotificationMethodDescriptor = MethodDescriptor.<GetNotificationRequest, Notification>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/GetNotification") .setRequestMarshaller( ProtoUtils.marshaller(GetNotificationRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Notification.getDefaultInstance())) .build(); private static final MethodDescriptor<CreateNotificationRequest, Notification> createNotificationMethodDescriptor = MethodDescriptor.<CreateNotificationRequest, Notification>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/CreateNotification") .setRequestMarshaller( ProtoUtils.marshaller(CreateNotificationRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Notification.getDefaultInstance())) .build(); private static final MethodDescriptor<ListNotificationsRequest, ListNotificationsResponse> listNotificationsMethodDescriptor = MethodDescriptor.<ListNotificationsRequest, ListNotificationsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/ListNotifications") .setRequestMarshaller( ProtoUtils.marshaller(ListNotificationsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListNotificationsResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<ComposeObjectRequest, Object> composeObjectMethodDescriptor = MethodDescriptor.<ComposeObjectRequest, Object>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/ComposeObject") .setRequestMarshaller( ProtoUtils.marshaller(ComposeObjectRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Object.getDefaultInstance())) .build(); private static final MethodDescriptor<DeleteObjectRequest, Empty> deleteObjectMethodDescriptor = MethodDescriptor.<DeleteObjectRequest, Empty>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/DeleteObject") .setRequestMarshaller(ProtoUtils.marshaller(DeleteObjectRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); private static final MethodDescriptor<GetObjectRequest, Object> getObjectMethodDescriptor = MethodDescriptor.<GetObjectRequest, Object>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/GetObject") .setRequestMarshaller(ProtoUtils.marshaller(GetObjectRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Object.getDefaultInstance())) .build(); private static final MethodDescriptor<ReadObjectRequest, ReadObjectResponse> readObjectMethodDescriptor = MethodDescriptor.<ReadObjectRequest, ReadObjectResponse>newBuilder() .setType(MethodDescriptor.MethodType.SERVER_STREAMING) .setFullMethodName("google.storage.v2.Storage/ReadObject") .setRequestMarshaller(ProtoUtils.marshaller(ReadObjectRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ReadObjectResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<UpdateObjectRequest, Object> updateObjectMethodDescriptor = MethodDescriptor.<UpdateObjectRequest, Object>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/UpdateObject") .setRequestMarshaller(ProtoUtils.marshaller(UpdateObjectRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Object.getDefaultInstance())) .build(); private static final MethodDescriptor<WriteObjectRequest, WriteObjectResponse> writeObjectMethodDescriptor = MethodDescriptor.<WriteObjectRequest, WriteObjectResponse>newBuilder() .setType(MethodDescriptor.MethodType.CLIENT_STREAMING) .setFullMethodName("google.storage.v2.Storage/WriteObject") .setRequestMarshaller(ProtoUtils.marshaller(WriteObjectRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(WriteObjectResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<ListObjectsRequest, ListObjectsResponse> listObjectsMethodDescriptor = MethodDescriptor.<ListObjectsRequest, ListObjectsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/ListObjects") .setRequestMarshaller(ProtoUtils.marshaller(ListObjectsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListObjectsResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<RewriteObjectRequest, RewriteResponse> rewriteObjectMethodDescriptor = MethodDescriptor.<RewriteObjectRequest, RewriteResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/RewriteObject") .setRequestMarshaller( ProtoUtils.marshaller(RewriteObjectRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(RewriteResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<StartResumableWriteRequest, StartResumableWriteResponse> startResumableWriteMethodDescriptor = MethodDescriptor.<StartResumableWriteRequest, StartResumableWriteResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/StartResumableWrite") .setRequestMarshaller( ProtoUtils.marshaller(StartResumableWriteRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(StartResumableWriteResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<QueryWriteStatusRequest, QueryWriteStatusResponse> queryWriteStatusMethodDescriptor = MethodDescriptor.<QueryWriteStatusRequest, QueryWriteStatusResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/QueryWriteStatus") .setRequestMarshaller( ProtoUtils.marshaller(QueryWriteStatusRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(QueryWriteStatusResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<GetServiceAccountRequest, ServiceAccount> getServiceAccountMethodDescriptor = MethodDescriptor.<GetServiceAccountRequest, ServiceAccount>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/GetServiceAccount") .setRequestMarshaller( ProtoUtils.marshaller(GetServiceAccountRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(ServiceAccount.getDefaultInstance())) .build(); private static final MethodDescriptor<CreateHmacKeyRequest, CreateHmacKeyResponse> createHmacKeyMethodDescriptor = MethodDescriptor.<CreateHmacKeyRequest, CreateHmacKeyResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/CreateHmacKey") .setRequestMarshaller( ProtoUtils.marshaller(CreateHmacKeyRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(CreateHmacKeyResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<DeleteHmacKeyRequest, Empty> deleteHmacKeyMethodDescriptor = MethodDescriptor.<DeleteHmacKeyRequest, Empty>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/DeleteHmacKey") .setRequestMarshaller(ProtoUtils.marshaller(DeleteHmacKeyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance())) .build(); private static final MethodDescriptor<GetHmacKeyRequest, HmacKeyMetadata> getHmacKeyMethodDescriptor = MethodDescriptor.<GetHmacKeyRequest, HmacKeyMetadata>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/GetHmacKey") .setRequestMarshaller(ProtoUtils.marshaller(GetHmacKeyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(HmacKeyMetadata.getDefaultInstance())) .build(); private static final MethodDescriptor<ListHmacKeysRequest, ListHmacKeysResponse> listHmacKeysMethodDescriptor = MethodDescriptor.<ListHmacKeysRequest, ListHmacKeysResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/ListHmacKeys") .setRequestMarshaller(ProtoUtils.marshaller(ListHmacKeysRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListHmacKeysResponse.getDefaultInstance())) .build(); private static final MethodDescriptor<UpdateHmacKeyRequest, HmacKeyMetadata> updateHmacKeyMethodDescriptor = MethodDescriptor.<UpdateHmacKeyRequest, HmacKeyMetadata>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.storage.v2.Storage/UpdateHmacKey") .setRequestMarshaller( ProtoUtils.marshaller(UpdateHmacKeyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(HmacKeyMetadata.getDefaultInstance())) .build(); private final UnaryCallable<DeleteBucketRequest, Empty> deleteBucketCallable; private final UnaryCallable<GetBucketRequest, Bucket> getBucketCallable; private final UnaryCallable<CreateBucketRequest, Bucket> createBucketCallable; private final UnaryCallable<ListBucketsRequest, ListBucketsResponse> listBucketsCallable; private final UnaryCallable<ListBucketsRequest, ListBucketsPagedResponse> listBucketsPagedCallable; private final UnaryCallable<LockBucketRetentionPolicyRequest, Bucket> lockBucketRetentionPolicyCallable; private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable; private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable; private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable; private final UnaryCallable<UpdateBucketRequest, Bucket> updateBucketCallable; private final UnaryCallable<DeleteNotificationRequest, Empty> deleteNotificationCallable; private final UnaryCallable<GetNotificationRequest, Notification> getNotificationCallable; private final UnaryCallable<CreateNotificationRequest, Notification> createNotificationCallable; private final UnaryCallable<ListNotificationsRequest, ListNotificationsResponse> listNotificationsCallable; private final UnaryCallable<ListNotificationsRequest, ListNotificationsPagedResponse> listNotificationsPagedCallable; private final UnaryCallable<ComposeObjectRequest, Object> composeObjectCallable; private final UnaryCallable<DeleteObjectRequest, Empty> deleteObjectCallable; private final UnaryCallable<GetObjectRequest, Object> getObjectCallable; private final ServerStreamingCallable<ReadObjectRequest, ReadObjectResponse> readObjectCallable; private final UnaryCallable<UpdateObjectRequest, Object> updateObjectCallable; private final ClientStreamingCallable<WriteObjectRequest, WriteObjectResponse> writeObjectCallable; private final UnaryCallable<ListObjectsRequest, ListObjectsResponse> listObjectsCallable; private final UnaryCallable<ListObjectsRequest, ListObjectsPagedResponse> listObjectsPagedCallable; private final UnaryCallable<RewriteObjectRequest, RewriteResponse> rewriteObjectCallable; private final UnaryCallable<StartResumableWriteRequest, StartResumableWriteResponse> startResumableWriteCallable; private final UnaryCallable<QueryWriteStatusRequest, QueryWriteStatusResponse> queryWriteStatusCallable; private final UnaryCallable<GetServiceAccountRequest, ServiceAccount> getServiceAccountCallable; private final UnaryCallable<CreateHmacKeyRequest, CreateHmacKeyResponse> createHmacKeyCallable; private final UnaryCallable<DeleteHmacKeyRequest, Empty> deleteHmacKeyCallable; private final UnaryCallable<GetHmacKeyRequest, HmacKeyMetadata> getHmacKeyCallable; private final UnaryCallable<ListHmacKeysRequest, ListHmacKeysResponse> listHmacKeysCallable; private final UnaryCallable<ListHmacKeysRequest, ListHmacKeysPagedResponse> listHmacKeysPagedCallable; private final UnaryCallable<UpdateHmacKeyRequest, HmacKeyMetadata> updateHmacKeyCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcStorageStub create(StorageStubSettings settings) throws IOException { return new GrpcStorageStub(settings, ClientContext.create(settings)); } public static final GrpcStorageStub create(ClientContext clientContext) throws IOException { return new GrpcStorageStub(StorageStubSettings.newBuilder().build(), clientContext); } public static final GrpcStorageStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcStorageStub( StorageStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcStorageStub, using the given settings. This is protected so that * it is easy to make a subclass, but otherwise, the static factory methods should be preferred. */ protected GrpcStorageStub(StorageStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcStorageCallableFactory()); } /** * Constructs an instance of GrpcStorageStub, using the given settings. This is protected so that * it is easy to make a subclass, but otherwise, the static factory methods should be preferred. */ protected GrpcStorageStub( StorageStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<DeleteBucketRequest, Empty> deleteBucketTransportSettings = GrpcCallSettings.<DeleteBucketRequest, Empty>newBuilder() .setMethodDescriptor(deleteBucketMethodDescriptor) .build(); GrpcCallSettings<GetBucketRequest, Bucket> getBucketTransportSettings = GrpcCallSettings.<GetBucketRequest, Bucket>newBuilder() .setMethodDescriptor(getBucketMethodDescriptor) .build(); GrpcCallSettings<CreateBucketRequest, Bucket> createBucketTransportSettings = GrpcCallSettings.<CreateBucketRequest, Bucket>newBuilder() .setMethodDescriptor(createBucketMethodDescriptor) .build(); GrpcCallSettings<ListBucketsRequest, ListBucketsResponse> listBucketsTransportSettings = GrpcCallSettings.<ListBucketsRequest, ListBucketsResponse>newBuilder() .setMethodDescriptor(listBucketsMethodDescriptor) .build(); GrpcCallSettings<LockBucketRetentionPolicyRequest, Bucket> lockBucketRetentionPolicyTransportSettings = GrpcCallSettings.<LockBucketRetentionPolicyRequest, Bucket>newBuilder() .setMethodDescriptor(lockBucketRetentionPolicyMethodDescriptor) .build(); GrpcCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings = GrpcCallSettings.<GetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(getIamPolicyMethodDescriptor) .build(); GrpcCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings = GrpcCallSettings.<SetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(setIamPolicyMethodDescriptor) .build(); GrpcCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsTransportSettings = GrpcCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setMethodDescriptor(testIamPermissionsMethodDescriptor) .build(); GrpcCallSettings<UpdateBucketRequest, Bucket> updateBucketTransportSettings = GrpcCallSettings.<UpdateBucketRequest, Bucket>newBuilder() .setMethodDescriptor(updateBucketMethodDescriptor) .build(); GrpcCallSettings<DeleteNotificationRequest, Empty> deleteNotificationTransportSettings = GrpcCallSettings.<DeleteNotificationRequest, Empty>newBuilder() .setMethodDescriptor(deleteNotificationMethodDescriptor) .build(); GrpcCallSettings<GetNotificationRequest, Notification> getNotificationTransportSettings = GrpcCallSettings.<GetNotificationRequest, Notification>newBuilder() .setMethodDescriptor(getNotificationMethodDescriptor) .build(); GrpcCallSettings<CreateNotificationRequest, Notification> createNotificationTransportSettings = GrpcCallSettings.<CreateNotificationRequest, Notification>newBuilder() .setMethodDescriptor(createNotificationMethodDescriptor) .build(); GrpcCallSettings<ListNotificationsRequest, ListNotificationsResponse> listNotificationsTransportSettings = GrpcCallSettings.<ListNotificationsRequest, ListNotificationsResponse>newBuilder() .setMethodDescriptor(listNotificationsMethodDescriptor) .build(); GrpcCallSettings<ComposeObjectRequest, Object> composeObjectTransportSettings = GrpcCallSettings.<ComposeObjectRequest, Object>newBuilder() .setMethodDescriptor(composeObjectMethodDescriptor) .build(); GrpcCallSettings<DeleteObjectRequest, Empty> deleteObjectTransportSettings = GrpcCallSettings.<DeleteObjectRequest, Empty>newBuilder() .setMethodDescriptor(deleteObjectMethodDescriptor) .build(); GrpcCallSettings<GetObjectRequest, Object> getObjectTransportSettings = GrpcCallSettings.<GetObjectRequest, Object>newBuilder() .setMethodDescriptor(getObjectMethodDescriptor) .build(); GrpcCallSettings<ReadObjectRequest, ReadObjectResponse> readObjectTransportSettings = GrpcCallSettings.<ReadObjectRequest, ReadObjectResponse>newBuilder() .setMethodDescriptor(readObjectMethodDescriptor) .build(); GrpcCallSettings<UpdateObjectRequest, Object> updateObjectTransportSettings = GrpcCallSettings.<UpdateObjectRequest, Object>newBuilder() .setMethodDescriptor(updateObjectMethodDescriptor) .build(); GrpcCallSettings<WriteObjectRequest, WriteObjectResponse> writeObjectTransportSettings = GrpcCallSettings.<WriteObjectRequest, WriteObjectResponse>newBuilder() .setMethodDescriptor(writeObjectMethodDescriptor) .build(); GrpcCallSettings<ListObjectsRequest, ListObjectsResponse> listObjectsTransportSettings = GrpcCallSettings.<ListObjectsRequest, ListObjectsResponse>newBuilder() .setMethodDescriptor(listObjectsMethodDescriptor) .build(); GrpcCallSettings<RewriteObjectRequest, RewriteResponse> rewriteObjectTransportSettings = GrpcCallSettings.<RewriteObjectRequest, RewriteResponse>newBuilder() .setMethodDescriptor(rewriteObjectMethodDescriptor) .build(); GrpcCallSettings<StartResumableWriteRequest, StartResumableWriteResponse> startResumableWriteTransportSettings = GrpcCallSettings.<StartResumableWriteRequest, StartResumableWriteResponse>newBuilder() .setMethodDescriptor(startResumableWriteMethodDescriptor) .build(); GrpcCallSettings<QueryWriteStatusRequest, QueryWriteStatusResponse> queryWriteStatusTransportSettings = GrpcCallSettings.<QueryWriteStatusRequest, QueryWriteStatusResponse>newBuilder() .setMethodDescriptor(queryWriteStatusMethodDescriptor) .build(); GrpcCallSettings<GetServiceAccountRequest, ServiceAccount> getServiceAccountTransportSettings = GrpcCallSettings.<GetServiceAccountRequest, ServiceAccount>newBuilder() .setMethodDescriptor(getServiceAccountMethodDescriptor) .build(); GrpcCallSettings<CreateHmacKeyRequest, CreateHmacKeyResponse> createHmacKeyTransportSettings = GrpcCallSettings.<CreateHmacKeyRequest, CreateHmacKeyResponse>newBuilder() .setMethodDescriptor(createHmacKeyMethodDescriptor) .build(); GrpcCallSettings<DeleteHmacKeyRequest, Empty> deleteHmacKeyTransportSettings = GrpcCallSettings.<DeleteHmacKeyRequest, Empty>newBuilder() .setMethodDescriptor(deleteHmacKeyMethodDescriptor) .build(); GrpcCallSettings<GetHmacKeyRequest, HmacKeyMetadata> getHmacKeyTransportSettings = GrpcCallSettings.<GetHmacKeyRequest, HmacKeyMetadata>newBuilder() .setMethodDescriptor(getHmacKeyMethodDescriptor) .build(); GrpcCallSettings<ListHmacKeysRequest, ListHmacKeysResponse> listHmacKeysTransportSettings = GrpcCallSettings.<ListHmacKeysRequest, ListHmacKeysResponse>newBuilder() .setMethodDescriptor(listHmacKeysMethodDescriptor) .build(); GrpcCallSettings<UpdateHmacKeyRequest, HmacKeyMetadata> updateHmacKeyTransportSettings = GrpcCallSettings.<UpdateHmacKeyRequest, HmacKeyMetadata>newBuilder() .setMethodDescriptor(updateHmacKeyMethodDescriptor) .build(); this.deleteBucketCallable = callableFactory.createUnaryCallable( deleteBucketTransportSettings, settings.deleteBucketSettings(), clientContext); this.getBucketCallable = callableFactory.createUnaryCallable( getBucketTransportSettings, settings.getBucketSettings(), clientContext); this.createBucketCallable = callableFactory.createUnaryCallable( createBucketTransportSettings, settings.createBucketSettings(), clientContext); this.listBucketsCallable = callableFactory.createUnaryCallable( listBucketsTransportSettings, settings.listBucketsSettings(), clientContext); this.listBucketsPagedCallable = callableFactory.createPagedCallable( listBucketsTransportSettings, settings.listBucketsSettings(), clientContext); this.lockBucketRetentionPolicyCallable = callableFactory.createUnaryCallable( lockBucketRetentionPolicyTransportSettings, settings.lockBucketRetentionPolicySettings(), clientContext); this.getIamPolicyCallable = callableFactory.createUnaryCallable( getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext); this.setIamPolicyCallable = callableFactory.createUnaryCallable( setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext); this.testIamPermissionsCallable = callableFactory.createUnaryCallable( testIamPermissionsTransportSettings, settings.testIamPermissionsSettings(), clientContext); this.updateBucketCallable = callableFactory.createUnaryCallable( updateBucketTransportSettings, settings.updateBucketSettings(), clientContext); this.deleteNotificationCallable = callableFactory.createUnaryCallable( deleteNotificationTransportSettings, settings.deleteNotificationSettings(), clientContext); this.getNotificationCallable = callableFactory.createUnaryCallable( getNotificationTransportSettings, settings.getNotificationSettings(), clientContext); this.createNotificationCallable = callableFactory.createUnaryCallable( createNotificationTransportSettings, settings.createNotificationSettings(), clientContext); this.listNotificationsCallable = callableFactory.createUnaryCallable( listNotificationsTransportSettings, settings.listNotificationsSettings(), clientContext); this.listNotificationsPagedCallable = callableFactory.createPagedCallable( listNotificationsTransportSettings, settings.listNotificationsSettings(), clientContext); this.composeObjectCallable = callableFactory.createUnaryCallable( composeObjectTransportSettings, settings.composeObjectSettings(), clientContext); this.deleteObjectCallable = callableFactory.createUnaryCallable( deleteObjectTransportSettings, settings.deleteObjectSettings(), clientContext); this.getObjectCallable = callableFactory.createUnaryCallable( getObjectTransportSettings, settings.getObjectSettings(), clientContext); this.readObjectCallable = callableFactory.createServerStreamingCallable( readObjectTransportSettings, settings.readObjectSettings(), clientContext); this.updateObjectCallable = callableFactory.createUnaryCallable( updateObjectTransportSettings, settings.updateObjectSettings(), clientContext); this.writeObjectCallable = callableFactory.createClientStreamingCallable( writeObjectTransportSettings, settings.writeObjectSettings(), clientContext); this.listObjectsCallable = callableFactory.createUnaryCallable( listObjectsTransportSettings, settings.listObjectsSettings(), clientContext); this.listObjectsPagedCallable = callableFactory.createPagedCallable( listObjectsTransportSettings, settings.listObjectsSettings(), clientContext); this.rewriteObjectCallable = callableFactory.createUnaryCallable( rewriteObjectTransportSettings, settings.rewriteObjectSettings(), clientContext); this.startResumableWriteCallable = callableFactory.createUnaryCallable( startResumableWriteTransportSettings, settings.startResumableWriteSettings(), clientContext); this.queryWriteStatusCallable = callableFactory.createUnaryCallable( queryWriteStatusTransportSettings, settings.queryWriteStatusSettings(), clientContext); this.getServiceAccountCallable = callableFactory.createUnaryCallable( getServiceAccountTransportSettings, settings.getServiceAccountSettings(), clientContext); this.createHmacKeyCallable = callableFactory.createUnaryCallable( createHmacKeyTransportSettings, settings.createHmacKeySettings(), clientContext); this.deleteHmacKeyCallable = callableFactory.createUnaryCallable( deleteHmacKeyTransportSettings, settings.deleteHmacKeySettings(), clientContext); this.getHmacKeyCallable = callableFactory.createUnaryCallable( getHmacKeyTransportSettings, settings.getHmacKeySettings(), clientContext); this.listHmacKeysCallable = callableFactory.createUnaryCallable( listHmacKeysTransportSettings, settings.listHmacKeysSettings(), clientContext); this.listHmacKeysPagedCallable = callableFactory.createPagedCallable( listHmacKeysTransportSettings, settings.listHmacKeysSettings(), clientContext); this.updateHmacKeyCallable = callableFactory.createUnaryCallable( updateHmacKeyTransportSettings, settings.updateHmacKeySettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<DeleteBucketRequest, Empty> deleteBucketCallable() { return deleteBucketCallable; } @Override public UnaryCallable<GetBucketRequest, Bucket> getBucketCallable() { return getBucketCallable; } @Override public UnaryCallable<CreateBucketRequest, Bucket> createBucketCallable() { return createBucketCallable; } @Override public UnaryCallable<ListBucketsRequest, ListBucketsResponse> listBucketsCallable() { return listBucketsCallable; } @Override public UnaryCallable<ListBucketsRequest, ListBucketsPagedResponse> listBucketsPagedCallable() { return listBucketsPagedCallable; } @Override public UnaryCallable<LockBucketRetentionPolicyRequest, Bucket> lockBucketRetentionPolicyCallable() { return lockBucketRetentionPolicyCallable; } @Override public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() { return getIamPolicyCallable; } @Override public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() { return setIamPolicyCallable; } @Override public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable() { return testIamPermissionsCallable; } @Override public UnaryCallable<UpdateBucketRequest, Bucket> updateBucketCallable() { return updateBucketCallable; } @Override public UnaryCallable<DeleteNotificationRequest, Empty> deleteNotificationCallable() { return deleteNotificationCallable; } @Override public UnaryCallable<GetNotificationRequest, Notification> getNotificationCallable() { return getNotificationCallable; } @Override public UnaryCallable<CreateNotificationRequest, Notification> createNotificationCallable() { return createNotificationCallable; } @Override public UnaryCallable<ListNotificationsRequest, ListNotificationsResponse> listNotificationsCallable() { return listNotificationsCallable; } @Override public UnaryCallable<ListNotificationsRequest, ListNotificationsPagedResponse> listNotificationsPagedCallable() { return listNotificationsPagedCallable; } @Override public UnaryCallable<ComposeObjectRequest, Object> composeObjectCallable() { return composeObjectCallable; } @Override public UnaryCallable<DeleteObjectRequest, Empty> deleteObjectCallable() { return deleteObjectCallable; } @Override public UnaryCallable<GetObjectRequest, Object> getObjectCallable() { return getObjectCallable; } @Override public ServerStreamingCallable<ReadObjectRequest, ReadObjectResponse> readObjectCallable() { return readObjectCallable; } @Override public UnaryCallable<UpdateObjectRequest, Object> updateObjectCallable() { return updateObjectCallable; } @Override public ClientStreamingCallable<WriteObjectRequest, WriteObjectResponse> writeObjectCallable() { return writeObjectCallable; } @Override public UnaryCallable<ListObjectsRequest, ListObjectsResponse> listObjectsCallable() { return listObjectsCallable; } @Override public UnaryCallable<ListObjectsRequest, ListObjectsPagedResponse> listObjectsPagedCallable() { return listObjectsPagedCallable; } @Override public UnaryCallable<RewriteObjectRequest, RewriteResponse> rewriteObjectCallable() { return rewriteObjectCallable; } @Override public UnaryCallable<StartResumableWriteRequest, StartResumableWriteResponse> startResumableWriteCallable() { return startResumableWriteCallable; } @Override public UnaryCallable<QueryWriteStatusRequest, QueryWriteStatusResponse> queryWriteStatusCallable() { return queryWriteStatusCallable; } @Override public UnaryCallable<GetServiceAccountRequest, ServiceAccount> getServiceAccountCallable() { return getServiceAccountCallable; } @Override public UnaryCallable<CreateHmacKeyRequest, CreateHmacKeyResponse> createHmacKeyCallable() { return createHmacKeyCallable; } @Override public UnaryCallable<DeleteHmacKeyRequest, Empty> deleteHmacKeyCallable() { return deleteHmacKeyCallable; } @Override public UnaryCallable<GetHmacKeyRequest, HmacKeyMetadata> getHmacKeyCallable() { return getHmacKeyCallable; } @Override public UnaryCallable<ListHmacKeysRequest, ListHmacKeysResponse> listHmacKeysCallable() { return listHmacKeysCallable; } @Override public UnaryCallable<ListHmacKeysRequest, ListHmacKeysPagedResponse> listHmacKeysPagedCallable() { return listHmacKeysPagedCallable; } @Override public UnaryCallable<UpdateHmacKeyRequest, HmacKeyMetadata> updateHmacKeyCallable() { return updateHmacKeyCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
/* * Copyright 2014 The gRPC Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.grpc.internal; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.base.Splitter; import com.google.common.base.Stopwatch; import com.google.common.base.Supplier; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.grpc.CallOptions; import io.grpc.ClientStreamTracer; import io.grpc.ClientStreamTracer.StreamInfo; import io.grpc.InternalChannelz.SocketStats; import io.grpc.InternalLogId; import io.grpc.InternalMetadata; import io.grpc.InternalMetadata.TrustedAsciiMarshaller; import io.grpc.LoadBalancer.PickResult; import io.grpc.LoadBalancer.Subchannel; import io.grpc.Metadata; import io.grpc.MethodDescriptor; import io.grpc.ProxiedSocketAddress; import io.grpc.ProxyDetector; import io.grpc.Status; import io.grpc.internal.ClientStreamListener.RpcProgress; import io.grpc.internal.SharedResourceHolder.Resource; import io.grpc.internal.StreamListener.MessageProducer; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.HttpURLConnection; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.util.Collection; import java.util.List; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; /** * Common utilities for GRPC. */ public final class GrpcUtil { private static final Logger log = Logger.getLogger(GrpcUtil.class.getName()); public static final Charset US_ASCII = Charset.forName("US-ASCII"); /** * {@link io.grpc.Metadata.Key} for the timeout header. */ public static final Metadata.Key<Long> TIMEOUT_KEY = Metadata.Key.of(GrpcUtil.TIMEOUT, new TimeoutMarshaller()); /** * {@link io.grpc.Metadata.Key} for the message encoding header. */ public static final Metadata.Key<String> MESSAGE_ENCODING_KEY = Metadata.Key.of(GrpcUtil.MESSAGE_ENCODING, Metadata.ASCII_STRING_MARSHALLER); /** * {@link io.grpc.Metadata.Key} for the accepted message encodings header. */ public static final Metadata.Key<byte[]> MESSAGE_ACCEPT_ENCODING_KEY = InternalMetadata.keyOf(GrpcUtil.MESSAGE_ACCEPT_ENCODING, new AcceptEncodingMarshaller()); /** * {@link io.grpc.Metadata.Key} for the stream's content encoding header. */ public static final Metadata.Key<String> CONTENT_ENCODING_KEY = Metadata.Key.of(GrpcUtil.CONTENT_ENCODING, Metadata.ASCII_STRING_MARSHALLER); /** * {@link io.grpc.Metadata.Key} for the stream's accepted content encoding header. */ public static final Metadata.Key<byte[]> CONTENT_ACCEPT_ENCODING_KEY = InternalMetadata.keyOf(GrpcUtil.CONTENT_ACCEPT_ENCODING, new AcceptEncodingMarshaller()); static final Metadata.Key<String> CONTENT_LENGTH_KEY = Metadata.Key.of("content-length", Metadata.ASCII_STRING_MARSHALLER); private static final class AcceptEncodingMarshaller implements TrustedAsciiMarshaller<byte[]> { @Override public byte[] toAsciiString(byte[] value) { return value; } @Override public byte[] parseAsciiString(byte[] serialized) { return serialized; } } /** * {@link io.grpc.Metadata.Key} for the Content-Type request/response header. */ public static final Metadata.Key<String> CONTENT_TYPE_KEY = Metadata.Key.of("content-type", Metadata.ASCII_STRING_MARSHALLER); /** * {@link io.grpc.Metadata.Key} for the Transfer encoding. */ public static final Metadata.Key<String> TE_HEADER = Metadata.Key.of("te", Metadata.ASCII_STRING_MARSHALLER); /** * {@link io.grpc.Metadata.Key} for the Content-Type request/response header. */ public static final Metadata.Key<String> USER_AGENT_KEY = Metadata.Key.of("user-agent", Metadata.ASCII_STRING_MARSHALLER); /** * The default port for plain-text connections. */ public static final int DEFAULT_PORT_PLAINTEXT = 80; /** * The default port for SSL connections. */ public static final int DEFAULT_PORT_SSL = 443; /** * Content-Type used for GRPC-over-HTTP/2. */ public static final String CONTENT_TYPE_GRPC = "application/grpc"; /** * The HTTP method used for GRPC requests. */ public static final String HTTP_METHOD = "POST"; /** * The TE (transport encoding) header for requests over HTTP/2. */ public static final String TE_TRAILERS = "trailers"; /** * The Timeout header name. */ public static final String TIMEOUT = "grpc-timeout"; /** * The message encoding (i.e. compression) that can be used in the stream. */ public static final String MESSAGE_ENCODING = "grpc-encoding"; /** * The accepted message encodings (i.e. compression) that can be used in the stream. */ public static final String MESSAGE_ACCEPT_ENCODING = "grpc-accept-encoding"; /** * The content-encoding used to compress the full gRPC stream. */ public static final String CONTENT_ENCODING = "content-encoding"; /** * The accepted content-encodings that can be used to compress the full gRPC stream. */ public static final String CONTENT_ACCEPT_ENCODING = "accept-encoding"; /** * The default maximum uncompressed size (in bytes) for inbound messages. Defaults to 4 MiB. */ public static final int DEFAULT_MAX_MESSAGE_SIZE = 4 * 1024 * 1024; /** * The default maximum size (in bytes) for inbound header/trailer. */ // Update documentation in public-facing Builders when changing this value. public static final int DEFAULT_MAX_HEADER_LIST_SIZE = 8192; public static final Splitter ACCEPT_ENCODING_SPLITTER = Splitter.on(',').trimResults(); private static final String IMPLEMENTATION_VERSION = "1.44.0-SNAPSHOT"; // CURRENT_GRPC_VERSION /** * The default timeout in nanos for a keepalive ping request. */ public static final long DEFAULT_KEEPALIVE_TIMEOUT_NANOS = TimeUnit.SECONDS.toNanos(20L); /** * The magic keepalive time value that disables client keepalive. */ public static final long KEEPALIVE_TIME_NANOS_DISABLED = Long.MAX_VALUE; /** * The default delay in nanos for server keepalive. */ public static final long DEFAULT_SERVER_KEEPALIVE_TIME_NANOS = TimeUnit.HOURS.toNanos(2L); /** * The default timeout in nanos for a server keepalive ping request. */ public static final long DEFAULT_SERVER_KEEPALIVE_TIMEOUT_NANOS = TimeUnit.SECONDS.toNanos(20L); /** * The magic keepalive time value that disables keepalive. */ public static final long SERVER_KEEPALIVE_TIME_NANOS_DISABLED = Long.MAX_VALUE; /** * The default proxy detector. */ public static final ProxyDetector DEFAULT_PROXY_DETECTOR = new ProxyDetectorImpl(); /** * A proxy detector that always claims no proxy is needed. */ public static final ProxyDetector NOOP_PROXY_DETECTOR = new ProxyDetector() { @Nullable @Override public ProxiedSocketAddress proxyFor(SocketAddress targetServerAddress) { return null; } }; /** * The very default load-balancing policy. */ public static final String DEFAULT_LB_POLICY = "pick_first"; /** * RPCs created on the Channel returned by {@link io.grpc.LoadBalancer.Subchannel#asChannel} * will have this option with value {@code true}. They will be treated differently from * the ones created by application. */ public static final CallOptions.Key<Boolean> CALL_OPTIONS_RPC_OWNED_BY_BALANCER = CallOptions.Key.create("io.grpc.internal.CALL_OPTIONS_RPC_OWNED_BY_BALANCER"); private static final ClientStreamTracer NOOP_TRACER = new ClientStreamTracer() {}; /** * Returns true if an RPC with the given properties should be counted when calculating the * in-use state of a transport. */ public static boolean shouldBeCountedForInUse(CallOptions callOptions) { return !Boolean.TRUE.equals(callOptions.getOption(CALL_OPTIONS_RPC_OWNED_BY_BALANCER)); } /** * Maps HTTP error response status codes to transport codes, as defined in <a * href="https://github.com/grpc/grpc/blob/master/doc/http-grpc-status-mapping.md"> * http-grpc-status-mapping.md</a>. Never returns a status for which {@code status.isOk()} is * {@code true}. */ public static Status httpStatusToGrpcStatus(int httpStatusCode) { return httpStatusToGrpcCode(httpStatusCode).toStatus() .withDescription("HTTP status code " + httpStatusCode); } private static Status.Code httpStatusToGrpcCode(int httpStatusCode) { if (httpStatusCode >= 100 && httpStatusCode < 200) { // 1xx. These headers should have been ignored. return Status.Code.INTERNAL; } switch (httpStatusCode) { case HttpURLConnection.HTTP_BAD_REQUEST: // 400 case 431: // Request Header Fields Too Large // TODO(carl-mastrangelo): this should be added to the http-grpc-status-mapping.md doc. return Status.Code.INTERNAL; case HttpURLConnection.HTTP_UNAUTHORIZED: // 401 return Status.Code.UNAUTHENTICATED; case HttpURLConnection.HTTP_FORBIDDEN: // 403 return Status.Code.PERMISSION_DENIED; case HttpURLConnection.HTTP_NOT_FOUND: // 404 return Status.Code.UNIMPLEMENTED; case 429: // Too Many Requests case HttpURLConnection.HTTP_BAD_GATEWAY: // 502 case HttpURLConnection.HTTP_UNAVAILABLE: // 503 case HttpURLConnection.HTTP_GATEWAY_TIMEOUT: // 504 return Status.Code.UNAVAILABLE; default: return Status.Code.UNKNOWN; } } /** * All error codes identified by the HTTP/2 spec. Used in GOAWAY and RST_STREAM frames. */ public enum Http2Error { /** * Servers implementing a graceful shutdown of the connection will send {@code GOAWAY} with * {@code NO_ERROR}. In this case it is important to indicate to the application that the * request should be retried (i.e. {@link Status#UNAVAILABLE}). */ NO_ERROR(0x0, Status.UNAVAILABLE), PROTOCOL_ERROR(0x1, Status.INTERNAL), INTERNAL_ERROR(0x2, Status.INTERNAL), FLOW_CONTROL_ERROR(0x3, Status.INTERNAL), SETTINGS_TIMEOUT(0x4, Status.INTERNAL), STREAM_CLOSED(0x5, Status.INTERNAL), FRAME_SIZE_ERROR(0x6, Status.INTERNAL), REFUSED_STREAM(0x7, Status.UNAVAILABLE), CANCEL(0x8, Status.CANCELLED), COMPRESSION_ERROR(0x9, Status.INTERNAL), CONNECT_ERROR(0xA, Status.INTERNAL), ENHANCE_YOUR_CALM(0xB, Status.RESOURCE_EXHAUSTED.withDescription("Bandwidth exhausted")), INADEQUATE_SECURITY(0xC, Status.PERMISSION_DENIED.withDescription("Permission denied as " + "protocol is not secure enough to call")), HTTP_1_1_REQUIRED(0xD, Status.UNKNOWN); // Populate a mapping of code to enum value for quick look-up. private static final Http2Error[] codeMap = buildHttp2CodeMap(); private static Http2Error[] buildHttp2CodeMap() { Http2Error[] errors = Http2Error.values(); int size = (int) errors[errors.length - 1].code() + 1; Http2Error[] http2CodeMap = new Http2Error[size]; for (Http2Error error : errors) { int index = (int) error.code(); http2CodeMap[index] = error; } return http2CodeMap; } private final int code; // Status is not guaranteed to be deeply immutable. Don't care though, since that's only true // when there are exceptions in the Status, which is not true here. @SuppressWarnings("ImmutableEnumChecker") private final Status status; Http2Error(int code, Status status) { this.code = code; String description = "HTTP/2 error code: " + this.name(); if (status.getDescription() != null) { description += " (" + status.getDescription() + ")"; } this.status = status.withDescription(description); } /** * Gets the code for this error used on the wire. */ public long code() { return code; } /** * Gets the {@link Status} associated with this HTTP/2 code. */ public Status status() { return status; } /** * Looks up the HTTP/2 error code enum value for the specified code. * * @param code an HTTP/2 error code value. * @return the HTTP/2 error code enum or {@code null} if not found. */ public static Http2Error forCode(long code) { if (code >= codeMap.length || code < 0) { return null; } return codeMap[(int) code]; } /** * Looks up the {@link Status} from the given HTTP/2 error code. This is preferred over {@code * forCode(code).status()}, to more easily conform to HTTP/2: * * <blockquote>Unknown or unsupported error codes MUST NOT trigger any special behavior. * These MAY be treated by an implementation as being equivalent to INTERNAL_ERROR.</blockquote> * * @param code the HTTP/2 error code. * @return a {@link Status} representing the given error. */ public static Status statusForCode(long code) { Http2Error error = forCode(code); if (error == null) { // This "forgets" the message of INTERNAL_ERROR while keeping the same status code. Status.Code statusCode = INTERNAL_ERROR.status().getCode(); return Status.fromCodeValue(statusCode.value()) .withDescription("Unrecognized HTTP/2 error code: " + code); } return error.status(); } } /** * Indicates whether or not the given value is a valid gRPC content-type. */ public static boolean isGrpcContentType(String contentType) { if (contentType == null) { return false; } if (CONTENT_TYPE_GRPC.length() > contentType.length()) { return false; } contentType = contentType.toLowerCase(); if (!contentType.startsWith(CONTENT_TYPE_GRPC)) { // Not a gRPC content-type. return false; } if (contentType.length() == CONTENT_TYPE_GRPC.length()) { // The strings match exactly. return true; } // The contentType matches, but is longer than the expected string. // We need to support variations on the content-type (e.g. +proto, +json) as defined by the // gRPC wire spec. char nextChar = contentType.charAt(CONTENT_TYPE_GRPC.length()); return nextChar == '+' || nextChar == ';'; } /** * Gets the User-Agent string for the gRPC transport. */ public static String getGrpcUserAgent( String transportName, @Nullable String applicationUserAgent) { StringBuilder builder = new StringBuilder(); if (applicationUserAgent != null) { builder.append(applicationUserAgent); builder.append(' '); } builder.append("grpc-java-"); builder.append(transportName); builder.append('/'); builder.append(IMPLEMENTATION_VERSION); return builder.toString(); } @Immutable public static final class GrpcBuildVersion { private final String userAgent; private final String implementationVersion; private GrpcBuildVersion(String userAgent, String implementationVersion) { this.userAgent = Preconditions.checkNotNull(userAgent, "userAgentName"); this.implementationVersion = Preconditions.checkNotNull(implementationVersion, "implementationVersion"); } public String getUserAgent() { return userAgent; } public String getImplementationVersion() { return implementationVersion; } @Override public String toString() { return userAgent + " " + implementationVersion; } } /** * Returns the build version of gRPC. */ public static GrpcBuildVersion getGrpcBuildVersion() { return new GrpcBuildVersion("gRPC Java", IMPLEMENTATION_VERSION); } /** * Parse an authority into a URI for retrieving the host and port. */ public static URI authorityToUri(String authority) { Preconditions.checkNotNull(authority, "authority"); URI uri; try { uri = new URI(null, authority, null, null, null); } catch (URISyntaxException ex) { throw new IllegalArgumentException("Invalid authority: " + authority, ex); } return uri; } /** * Verify {@code authority} is valid for use with gRPC. The syntax must be valid and it must not * include userinfo. * * @return the {@code authority} provided */ public static String checkAuthority(String authority) { URI uri = authorityToUri(authority); checkArgument(uri.getHost() != null, "No host in authority '%s'", authority); checkArgument(uri.getUserInfo() == null, "Userinfo must not be present on authority: '%s'", authority); return authority; } /** * Combine a host and port into an authority string. */ // There is a copy of this method in io.grpc.Grpc public static String authorityFromHostAndPort(String host, int port) { try { return new URI(null, null, host, port, null, null, null).getAuthority(); } catch (URISyntaxException ex) { throw new IllegalArgumentException("Invalid host or port: " + host + " " + port, ex); } } /** * Shared executor for channels. */ public static final Resource<Executor> SHARED_CHANNEL_EXECUTOR = new Resource<Executor>() { private static final String NAME = "grpc-default-executor"; @Override public Executor create() { return Executors.newCachedThreadPool(getThreadFactory(NAME + "-%d", true)); } @Override public void close(Executor instance) { ((ExecutorService) instance).shutdown(); } @Override public String toString() { return NAME; } }; /** * Shared single-threaded executor for managing channel timers. */ public static final Resource<ScheduledExecutorService> TIMER_SERVICE = new Resource<ScheduledExecutorService>() { @Override public ScheduledExecutorService create() { // We don't use newSingleThreadScheduledExecutor because it doesn't return a // ScheduledThreadPoolExecutor. ScheduledExecutorService service = Executors.newScheduledThreadPool( 1, getThreadFactory("grpc-timer-%d", true)); // If there are long timeouts that are cancelled, they will not actually be removed from // the executors queue. This forces immediate removal upon cancellation to avoid a // memory leak. Reflection is used because we cannot use methods added in Java 1.7. If // the method does not exist, we give up. Note that the method is not present in 1.6, but // _is_ present in the android standard library. try { Method method = service.getClass().getMethod("setRemoveOnCancelPolicy", boolean.class); method.invoke(service, true); } catch (NoSuchMethodException e) { // no op } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } return Executors.unconfigurableScheduledExecutorService(service); } @Override public void close(ScheduledExecutorService instance) { instance.shutdown(); } }; /** * Get a {@link ThreadFactory} suitable for use in the current environment. * @param nameFormat to apply to threads created by the factory. * @param daemon {@code true} if the threads the factory creates are daemon threads, {@code false} * otherwise. * @return a {@link ThreadFactory}. */ public static ThreadFactory getThreadFactory(String nameFormat, boolean daemon) { return new ThreadFactoryBuilder() .setDaemon(daemon) .setNameFormat(nameFormat) .build(); } /** * The factory of default Stopwatches. */ public static final Supplier<Stopwatch> STOPWATCH_SUPPLIER = new Supplier<Stopwatch>() { @Override public Stopwatch get() { return Stopwatch.createUnstarted(); } }; /** * Returns the host via {@link InetSocketAddress#getHostString} if it is possible, * i.e. in jdk >= 7. * Otherwise, return it via {@link InetSocketAddress#getHostName} which may incur a DNS lookup. */ public static String getHost(InetSocketAddress addr) { try { Method getHostStringMethod = InetSocketAddress.class.getMethod("getHostString"); return (String) getHostStringMethod.invoke(addr); } catch (NoSuchMethodException e) { // noop } catch (IllegalAccessException e) { // noop } catch (InvocationTargetException e) { // noop } return addr.getHostName(); } /** * Marshals a nanoseconds representation of the timeout to and from a string representation, * consisting of an ASCII decimal representation of a number with at most 8 digits, followed by a * unit: * n = nanoseconds * u = microseconds * m = milliseconds * S = seconds * M = minutes * H = hours * * <p>The representation is greedy with respect to precision. That is, 2 seconds will be * represented as `2000000u`.</p> * * <p>See <a href="https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests">the * request header definition</a></p> */ @VisibleForTesting static class TimeoutMarshaller implements Metadata.AsciiMarshaller<Long> { @Override public String toAsciiString(Long timeoutNanos) { long cutoff = 100000000; TimeUnit unit = TimeUnit.NANOSECONDS; if (timeoutNanos < 0) { throw new IllegalArgumentException("Timeout too small"); } else if (timeoutNanos < cutoff) { return timeoutNanos + "n"; } else if (timeoutNanos < cutoff * 1000L) { return unit.toMicros(timeoutNanos) + "u"; } else if (timeoutNanos < cutoff * 1000L * 1000L) { return unit.toMillis(timeoutNanos) + "m"; } else if (timeoutNanos < cutoff * 1000L * 1000L * 1000L) { return unit.toSeconds(timeoutNanos) + "S"; } else if (timeoutNanos < cutoff * 1000L * 1000L * 1000L * 60L) { return unit.toMinutes(timeoutNanos) + "M"; } else { return unit.toHours(timeoutNanos) + "H"; } } @Override public Long parseAsciiString(String serialized) { checkArgument(serialized.length() > 0, "empty timeout"); checkArgument(serialized.length() <= 9, "bad timeout format"); long value = Long.parseLong(serialized.substring(0, serialized.length() - 1)); char unit = serialized.charAt(serialized.length() - 1); switch (unit) { case 'n': return value; case 'u': return TimeUnit.MICROSECONDS.toNanos(value); case 'm': return TimeUnit.MILLISECONDS.toNanos(value); case 'S': return TimeUnit.SECONDS.toNanos(value); case 'M': return TimeUnit.MINUTES.toNanos(value); case 'H': return TimeUnit.HOURS.toNanos(value); default: throw new IllegalArgumentException(String.format("Invalid timeout unit: %s", unit)); } } } /** * Returns a transport out of a PickResult, or {@code null} if the result is "buffer". */ @Nullable static ClientTransport getTransportFromPickResult(PickResult result, boolean isWaitForReady) { final ClientTransport transport; Subchannel subchannel = result.getSubchannel(); if (subchannel != null) { transport = ((TransportProvider) subchannel.getInternalSubchannel()).obtainActiveTransport(); } else { transport = null; } if (transport != null) { final ClientStreamTracer.Factory streamTracerFactory = result.getStreamTracerFactory(); if (streamTracerFactory == null) { return transport; } return new ClientTransport() { @Override public ClientStream newStream( MethodDescriptor<?, ?> method, Metadata headers, CallOptions callOptions, ClientStreamTracer[] tracers) { StreamInfo info = StreamInfo.newBuilder().setCallOptions(callOptions).build(); ClientStreamTracer streamTracer = streamTracerFactory.newClientStreamTracer(info, headers); checkState(tracers[tracers.length - 1] == NOOP_TRACER, "lb tracer already assigned"); tracers[tracers.length - 1] = streamTracer; return transport.newStream(method, headers, callOptions, tracers); } @Override public void ping(PingCallback callback, Executor executor) { transport.ping(callback, executor); } @Override public InternalLogId getLogId() { return transport.getLogId(); } @Override public ListenableFuture<SocketStats> getStats() { return transport.getStats(); } }; } if (!result.getStatus().isOk()) { if (result.isDrop()) { return new FailingClientTransport(result.getStatus(), RpcProgress.DROPPED); } if (!isWaitForReady) { return new FailingClientTransport(result.getStatus(), RpcProgress.PROCESSED); } } return null; } /** Gets stream tracers based on CallOptions. */ public static ClientStreamTracer[] getClientStreamTracers( CallOptions callOptions, Metadata headers, int previousAttempts, boolean isTransparentRetry) { List<ClientStreamTracer.Factory> factories = callOptions.getStreamTracerFactories(); ClientStreamTracer[] tracers = new ClientStreamTracer[factories.size() + 1]; StreamInfo streamInfo = StreamInfo.newBuilder() .setCallOptions(callOptions) .setPreviousAttempts(previousAttempts) .setIsTransparentRetry(isTransparentRetry) .build(); for (int i = 0; i < factories.size(); i++) { tracers[i] = factories.get(i).newClientStreamTracer(streamInfo, headers); } // Reserved to be set later by the lb as per the API contract of ClientTransport.newStream(). // See also GrpcUtil.getTransportFromPickResult() tracers[tracers.length - 1] = NOOP_TRACER; return tracers; } /** Quietly closes all messages in MessageProducer. */ static void closeQuietly(MessageProducer producer) { InputStream message; while ((message = producer.next()) != null) { closeQuietly(message); } } /** * Closes a Closeable, ignoring IOExceptions. * This method exists because Guava's {@code Closeables.closeQuietly()} is beta. */ public static void closeQuietly(@Nullable Closeable message) { if (message == null) { return; } try { message.close(); } catch (IOException ioException) { // do nothing except log log.log(Level.WARNING, "exception caught in closeQuietly", ioException); } } /** * Checks whether the given item exists in the iterable. This is copied from Guava Collect's * {@code Iterables.contains()} because Guava Collect is not Android-friendly thus core can't * depend on it. */ static <T> boolean iterableContains(Iterable<T> iterable, T item) { if (iterable instanceof Collection) { Collection<?> collection = (Collection<?>) iterable; try { return collection.contains(item); } catch (NullPointerException e) { return false; } catch (ClassCastException e) { return false; } } for (T i : iterable) { if (Objects.equal(i, item)) { return true; } } return false; } private GrpcUtil() {} }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.script; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptResponse; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.cache.RemovalListener; import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.env.Environment; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.watcher.FileChangesListener; import org.elasticsearch.watcher.FileWatcher; import org.elasticsearch.watcher.ResourceWatcherService; import java.io.Closeable; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.concurrent.ConcurrentMap; import static java.util.Collections.unmodifiableMap; public class ScriptService extends AbstractComponent implements Closeable, ClusterStateListener { static final String DISABLE_DYNAMIC_SCRIPTING_SETTING = "script.disable_dynamic"; public static final Setting<Integer> SCRIPT_CACHE_SIZE_SETTING = Setting.intSetting("script.cache.max_size", 100, 0, Property.NodeScope); public static final Setting<TimeValue> SCRIPT_CACHE_EXPIRE_SETTING = Setting.positiveTimeSetting("script.cache.expire", TimeValue.timeValueMillis(0), Property.NodeScope); public static final Setting<Boolean> SCRIPT_AUTO_RELOAD_ENABLED_SETTING = Setting.boolSetting("script.auto_reload_enabled", true, Property.NodeScope); public static final Setting<Integer> SCRIPT_MAX_SIZE_IN_BYTES = Setting.intSetting("script.max_size_in_bytes", 65535, Property.NodeScope); public static final Setting<Integer> SCRIPT_MAX_COMPILATIONS_PER_MINUTE = Setting.intSetting("script.max_compilations_per_minute", 15, 0, Property.Dynamic, Property.NodeScope); private final Collection<ScriptEngineService> scriptEngines; private final Map<String, ScriptEngineService> scriptEnginesByLang; private final Map<String, ScriptEngineService> scriptEnginesByExt; private final ConcurrentMap<CacheKey, CompiledScript> staticCache = ConcurrentCollections.newConcurrentMap(); private final Cache<CacheKey, CompiledScript> cache; private final Path scriptsDirectory; private final ScriptModes scriptModes; private final ScriptContextRegistry scriptContextRegistry; private final ScriptMetrics scriptMetrics = new ScriptMetrics(); private ClusterState clusterState; private int totalCompilesPerMinute; private long lastInlineCompileTime; private double scriptsPerMinCounter; private double compilesAllowedPerNano; public ScriptService(Settings settings, Environment env, ResourceWatcherService resourceWatcherService, ScriptEngineRegistry scriptEngineRegistry, ScriptContextRegistry scriptContextRegistry, ScriptSettings scriptSettings) throws IOException { super(settings); Objects.requireNonNull(scriptEngineRegistry); Objects.requireNonNull(scriptContextRegistry); Objects.requireNonNull(scriptSettings); if (Strings.hasLength(settings.get(DISABLE_DYNAMIC_SCRIPTING_SETTING))) { throw new IllegalArgumentException(DISABLE_DYNAMIC_SCRIPTING_SETTING + " is not a supported setting, replace with fine-grained script settings. \n" + "Dynamic scripts can be enabled for all languages and all operations by replacing `script.disable_dynamic: false` with `script.inline: true` and `script.stored: true` in elasticsearch.yml"); } this.scriptEngines = scriptEngineRegistry.getRegisteredLanguages().values(); this.scriptContextRegistry = scriptContextRegistry; int cacheMaxSize = SCRIPT_CACHE_SIZE_SETTING.get(settings); CacheBuilder<CacheKey, CompiledScript> cacheBuilder = CacheBuilder.builder(); if (cacheMaxSize >= 0) { cacheBuilder.setMaximumWeight(cacheMaxSize); } TimeValue cacheExpire = SCRIPT_CACHE_EXPIRE_SETTING.get(settings); if (cacheExpire.getNanos() != 0) { cacheBuilder.setExpireAfterAccess(cacheExpire); } logger.debug("using script cache with max_size [{}], expire [{}]", cacheMaxSize, cacheExpire); this.cache = cacheBuilder.removalListener(new ScriptCacheRemovalListener()).build(); Map<String, ScriptEngineService> enginesByLangBuilder = new HashMap<>(); Map<String, ScriptEngineService> enginesByExtBuilder = new HashMap<>(); for (ScriptEngineService scriptEngine : scriptEngines) { String language = scriptEngineRegistry.getLanguage(scriptEngine.getClass()); enginesByLangBuilder.put(language, scriptEngine); enginesByExtBuilder.put(scriptEngine.getExtension(), scriptEngine); } this.scriptEnginesByLang = unmodifiableMap(enginesByLangBuilder); this.scriptEnginesByExt = unmodifiableMap(enginesByExtBuilder); this.scriptModes = new ScriptModes(scriptSettings, settings); // add file watcher for static scripts scriptsDirectory = env.scriptsFile(); if (logger.isTraceEnabled()) { logger.trace("Using scripts directory [{}] ", scriptsDirectory); } FileWatcher fileWatcher = new FileWatcher(scriptsDirectory); fileWatcher.addListener(new ScriptChangesListener()); if (SCRIPT_AUTO_RELOAD_ENABLED_SETTING.get(settings)) { // automatic reload is enabled - register scripts resourceWatcherService.add(fileWatcher); } else { // automatic reload is disable just load scripts once fileWatcher.init(); } this.lastInlineCompileTime = System.nanoTime(); this.setMaxCompilationsPerMinute(SCRIPT_MAX_COMPILATIONS_PER_MINUTE.get(settings)); } void registerClusterSettingsListeners(ClusterSettings clusterSettings) { clusterSettings.addSettingsUpdateConsumer(SCRIPT_MAX_COMPILATIONS_PER_MINUTE, this::setMaxCompilationsPerMinute); } @Override public void close() throws IOException { IOUtils.close(scriptEngines); } private ScriptEngineService getScriptEngineServiceForLang(String lang) { ScriptEngineService scriptEngineService = scriptEnginesByLang.get(lang); if (scriptEngineService == null) { throw new IllegalArgumentException("script_lang not supported [" + lang + "]"); } return scriptEngineService; } private ScriptEngineService getScriptEngineServiceForFileExt(String fileExtension) { ScriptEngineService scriptEngineService = scriptEnginesByExt.get(fileExtension); if (scriptEngineService == null) { throw new IllegalArgumentException("script file extension not supported [" + fileExtension + "]"); } return scriptEngineService; } void setMaxCompilationsPerMinute(Integer newMaxPerMinute) { this.totalCompilesPerMinute = newMaxPerMinute; // Reset the counter to allow new compilations this.scriptsPerMinCounter = totalCompilesPerMinute; this.compilesAllowedPerNano = ((double) totalCompilesPerMinute) / TimeValue.timeValueMinutes(1).nanos(); } /** * Checks if a script can be executed and compiles it if needed, or returns the previously compiled and cached script. */ public CompiledScript compile(Script script, ScriptContext scriptContext, Map<String, String> params) { if (script == null) { throw new IllegalArgumentException("The parameter script (Script) must not be null."); } if (scriptContext == null) { throw new IllegalArgumentException("The parameter scriptContext (ScriptContext) must not be null."); } String lang = script.getLang(); ScriptEngineService scriptEngineService = getScriptEngineServiceForLang(lang); if (canExecuteScript(lang, script.getType(), scriptContext) == false) { throw new IllegalStateException("scripts of type [" + script.getType() + "], operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are disabled"); } // TODO: fix this through some API or something, that's wrong // special exception to prevent expressions from compiling as update or mapping scripts boolean expression = "expression".equals(script.getLang()); boolean notSupported = scriptContext.getKey().equals(ScriptContext.Standard.UPDATE.getKey()); if (expression && notSupported) { throw new UnsupportedOperationException("scripts of type [" + script.getType() + "]," + " operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are not supported"); } return compileInternal(script, params); } /** * Check whether there have been too many compilations within the last minute, throwing a circuit breaking exception if so. * This is a variant of the token bucket algorithm: https://en.wikipedia.org/wiki/Token_bucket * * It can be thought of as a bucket with water, every time the bucket is checked, water is added proportional to the amount of time that * elapsed since the last time it was checked. If there is enough water, some is removed and the request is allowed. If there is not * enough water the request is denied. Just like a normal bucket, if water is added that overflows the bucket, the extra water/capacity * is discarded - there can never be more water in the bucket than the size of the bucket. */ void checkCompilationLimit() { long now = System.nanoTime(); long timePassed = now - lastInlineCompileTime; lastInlineCompileTime = now; scriptsPerMinCounter += (timePassed) * compilesAllowedPerNano; // It's been over the time limit anyway, readjust the bucket to be level if (scriptsPerMinCounter > totalCompilesPerMinute) { scriptsPerMinCounter = totalCompilesPerMinute; } // If there is enough tokens in the bucket, allow the request and decrease the tokens by 1 if (scriptsPerMinCounter >= 1) { scriptsPerMinCounter -= 1.0; } else { // Otherwise reject the request throw new CircuitBreakingException("[script] Too many dynamic script compilations within one minute, max: [" + totalCompilesPerMinute + "/min]; please use on-disk, indexed, or scripts with parameters instead; " + "this limit can be changed by the [" + SCRIPT_MAX_COMPILATIONS_PER_MINUTE.getKey() + "] setting"); } } /** * Compiles a script straight-away, or returns the previously compiled and cached script, * without checking if it can be executed based on settings. */ CompiledScript compileInternal(Script script, Map<String, String> params) { if (script == null) { throw new IllegalArgumentException("The parameter script (Script) must not be null."); } String lang = script.getLang(); ScriptType type = script.getType(); //script.getScript() could return either a name or code for a script, //but we check for a file script name first and an indexed script name second String name = script.getScript(); if (logger.isTraceEnabled()) { logger.trace("Compiling lang: [{}] type: [{}] script: {}", lang, type, name); } ScriptEngineService scriptEngineService = getScriptEngineServiceForLang(lang); if (type == ScriptType.FILE) { CacheKey cacheKey = new CacheKey(scriptEngineService, name, null, params); //On disk scripts will be loaded into the staticCache by the listener CompiledScript compiledScript = staticCache.get(cacheKey); if (compiledScript == null) { throw new IllegalArgumentException("Unable to find on disk file script [" + name + "] using lang [" + lang + "]"); } return compiledScript; } //script.getScript() will be code if the script type is inline String code = script.getScript(); if (type == ScriptType.STORED) { //The look up for an indexed script must be done every time in case //the script has been updated in the index since the last look up. final IndexedScript indexedScript = new IndexedScript(lang, name); name = indexedScript.id; code = getScriptFromClusterState(indexedScript.lang, indexedScript.id); } CacheKey cacheKey = new CacheKey(scriptEngineService, type == ScriptType.INLINE ? null : name, code, params); CompiledScript compiledScript = cache.get(cacheKey); if (compiledScript != null) { return compiledScript; } // Synchronize so we don't compile scripts many times during multiple shards all compiling a script synchronized (this) { // Retrieve it again in case it has been put by a different thread compiledScript = cache.get(cacheKey); if (compiledScript == null) { try { // Either an un-cached inline script or indexed script // If the script type is inline the name will be the same as the code for identification in exceptions // but give the script engine the chance to be better, give it separate name + source code // for the inline case, then its anonymous: null. String actualName = (type == ScriptType.INLINE) ? null : name; if (logger.isTraceEnabled()) { logger.trace("compiling script, type: [{}], lang: [{}], params: [{}]", type, lang, params); } // Check whether too many compilations have happened checkCompilationLimit(); compiledScript = new CompiledScript(type, name, lang, scriptEngineService.compile(actualName, code, params)); } catch (ScriptException good) { // TODO: remove this try-catch completely, when all script engines have good exceptions! throw good; // its already good } catch (Exception exception) { throw new GeneralScriptException("Failed to compile " + type + " script [" + name + "] using lang [" + lang + "]", exception); } // Since the cache key is the script content itself we don't need to // invalidate/check the cache if an indexed script changes. scriptMetrics.onCompilation(); cache.put(cacheKey, compiledScript); } return compiledScript; } } private String validateScriptLanguage(String scriptLang) { Objects.requireNonNull(scriptLang); if (scriptEnginesByLang.containsKey(scriptLang) == false) { throw new IllegalArgumentException("script_lang not supported [" + scriptLang + "]"); } return scriptLang; } String getScriptFromClusterState(String scriptLang, String id) { scriptLang = validateScriptLanguage(scriptLang); ScriptMetaData scriptMetadata = clusterState.metaData().custom(ScriptMetaData.TYPE); if (scriptMetadata == null) { throw new ResourceNotFoundException("Unable to find script [" + scriptLang + "/" + id + "] in cluster state"); } String script = scriptMetadata.getScript(scriptLang, id); if (script == null) { throw new ResourceNotFoundException("Unable to find script [" + scriptLang + "/" + id + "] in cluster state"); } return script; } void validateStoredScript(String id, String scriptLang, BytesReference scriptBytes) { validateScriptSize(id, scriptBytes.length()); String script = ScriptMetaData.parseStoredScript(scriptBytes); if (Strings.hasLength(scriptBytes)) { //Just try and compile it try { ScriptEngineService scriptEngineService = getScriptEngineServiceForLang(scriptLang); //we don't know yet what the script will be used for, but if all of the operations for this lang with //indexed scripts are disabled, it makes no sense to even compile it. if (isAnyScriptContextEnabled(scriptLang, ScriptType.STORED)) { Object compiled = scriptEngineService.compile(id, script, Collections.emptyMap()); if (compiled == null) { throw new IllegalArgumentException("Unable to parse [" + script + "] lang [" + scriptLang + "] (ScriptService.compile returned null)"); } } else { logger.warn( "skipping compile of script [{}], lang [{}] as all scripted operations are disabled for indexed scripts", script, scriptLang); } } catch (ScriptException good) { // TODO: remove this when all script engines have good exceptions! throw good; // its already good! } catch (Exception e) { throw new IllegalArgumentException("Unable to parse [" + script + "] lang [" + scriptLang + "]", e); } } else { throw new IllegalArgumentException("Unable to find script in : " + scriptBytes.utf8ToString()); } } public void storeScript(ClusterService clusterService, PutStoredScriptRequest request, ActionListener<PutStoredScriptResponse> listener) { String scriptLang = validateScriptLanguage(request.scriptLang()); //verify that the script compiles validateStoredScript(request.id(), scriptLang, request.script()); clusterService.submitStateUpdateTask("put-script-" + request.id(), new AckedClusterStateUpdateTask<PutStoredScriptResponse>(request, listener) { @Override protected PutStoredScriptResponse newResponse(boolean acknowledged) { return new PutStoredScriptResponse(acknowledged); } @Override public ClusterState execute(ClusterState currentState) throws Exception { return innerStoreScript(currentState, scriptLang, request); } }); } static ClusterState innerStoreScript(ClusterState currentState, String validatedScriptLang, PutStoredScriptRequest request) { ScriptMetaData scriptMetadata = currentState.metaData().custom(ScriptMetaData.TYPE); ScriptMetaData.Builder scriptMetadataBuilder = new ScriptMetaData.Builder(scriptMetadata); scriptMetadataBuilder.storeScript(validatedScriptLang, request.id(), request.script()); MetaData.Builder metaDataBuilder = MetaData.builder(currentState.getMetaData()) .putCustom(ScriptMetaData.TYPE, scriptMetadataBuilder.build()); return ClusterState.builder(currentState).metaData(metaDataBuilder).build(); } public void deleteStoredScript(ClusterService clusterService, DeleteStoredScriptRequest request, ActionListener<DeleteStoredScriptResponse> listener) { String scriptLang = validateScriptLanguage(request.scriptLang()); clusterService.submitStateUpdateTask("delete-script-" + request.id(), new AckedClusterStateUpdateTask<DeleteStoredScriptResponse>(request, listener) { @Override protected DeleteStoredScriptResponse newResponse(boolean acknowledged) { return new DeleteStoredScriptResponse(acknowledged); } @Override public ClusterState execute(ClusterState currentState) throws Exception { return innerDeleteScript(currentState, scriptLang, request); } }); } static ClusterState innerDeleteScript(ClusterState currentState, String validatedLang, DeleteStoredScriptRequest request) { ScriptMetaData scriptMetadata = currentState.metaData().custom(ScriptMetaData.TYPE); ScriptMetaData.Builder scriptMetadataBuilder = new ScriptMetaData.Builder(scriptMetadata); scriptMetadataBuilder.deleteScript(validatedLang, request.id()); MetaData.Builder metaDataBuilder = MetaData.builder(currentState.getMetaData()) .putCustom(ScriptMetaData.TYPE, scriptMetadataBuilder.build()); return ClusterState.builder(currentState).metaData(metaDataBuilder).build(); } public String getStoredScript(ClusterState state, GetStoredScriptRequest request) { ScriptMetaData scriptMetadata = state.metaData().custom(ScriptMetaData.TYPE); if (scriptMetadata != null) { return scriptMetadata.getScript(request.lang(), request.id()); } else { return null; } } /** * Compiles (or retrieves from cache) and executes the provided script */ public ExecutableScript executable(Script script, ScriptContext scriptContext, Map<String, String> params) { return executable(compile(script, scriptContext, params), script.getParams()); } /** * Executes a previously compiled script provided as an argument */ public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> vars) { return getScriptEngineServiceForLang(compiledScript.lang()).executable(compiledScript, vars); } /** * Compiles (or retrieves from cache) and executes the provided search script */ public SearchScript search(SearchLookup lookup, Script script, ScriptContext scriptContext, Map<String, String> params) { CompiledScript compiledScript = compile(script, scriptContext, params); return search(lookup, compiledScript, script.getParams()); } /** * Binds provided parameters to a compiled script returning a * {@link SearchScript} ready for execution */ public SearchScript search(SearchLookup lookup, CompiledScript compiledScript, Map<String, Object> params) { return getScriptEngineServiceForLang(compiledScript.lang()).search(compiledScript, lookup, params); } private boolean isAnyScriptContextEnabled(String lang, ScriptType scriptType) { for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { if (canExecuteScript(lang, scriptType, scriptContext)) { return true; } } return false; } private boolean canExecuteScript(String lang, ScriptType scriptType, ScriptContext scriptContext) { assert lang != null; if (scriptContextRegistry.isSupportedContext(scriptContext) == false) { throw new IllegalArgumentException("script context [" + scriptContext.getKey() + "] not supported"); } return scriptModes.getScriptEnabled(lang, scriptType, scriptContext); } public ScriptStats stats() { return scriptMetrics.stats(); } private void validateScriptSize(String identifier, int scriptSizeInBytes) { int allowedScriptSizeInBytes = SCRIPT_MAX_SIZE_IN_BYTES.get(settings); if (scriptSizeInBytes > allowedScriptSizeInBytes) { String message = LoggerMessageFormat.format( "Limit of script size in bytes [{}] has been exceeded for script [{}] with size [{}]", allowedScriptSizeInBytes, identifier, scriptSizeInBytes); throw new IllegalArgumentException(message); } } @Override public void clusterChanged(ClusterChangedEvent event) { clusterState = event.state(); } /** * A small listener for the script cache that calls each * {@code ScriptEngineService}'s {@code scriptRemoved} method when the * script has been removed from the cache */ private class ScriptCacheRemovalListener implements RemovalListener<CacheKey, CompiledScript> { @Override public void onRemoval(RemovalNotification<CacheKey, CompiledScript> notification) { if (logger.isDebugEnabled()) { logger.debug("removed {} from cache, reason: {}", notification.getValue(), notification.getRemovalReason()); } scriptMetrics.onCacheEviction(); } } private class ScriptChangesListener implements FileChangesListener { private Tuple<String, String> getScriptNameExt(Path file) { Path scriptPath = scriptsDirectory.relativize(file); int extIndex = scriptPath.toString().lastIndexOf('.'); if (extIndex <= 0) { return null; } String ext = scriptPath.toString().substring(extIndex + 1); if (ext.isEmpty()) { return null; } String scriptName = scriptPath.toString().substring(0, extIndex).replace(scriptPath.getFileSystem().getSeparator(), "_"); return new Tuple<>(scriptName, ext); } @Override public void onFileInit(Path file) { Tuple<String, String> scriptNameExt = getScriptNameExt(file); if (scriptNameExt == null) { logger.debug("Skipped script with invalid extension : [{}]", file); return; } if (logger.isTraceEnabled()) { logger.trace("Loading script file : [{}]", file); } ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2()); if (engineService == null) { logger.warn("No script engine found for [{}]", scriptNameExt.v2()); } else { try { //we don't know yet what the script will be used for, but if all of the operations for this lang // with file scripts are disabled, it makes no sense to even compile it and cache it. if (isAnyScriptContextEnabled(engineService.getType(), ScriptType.FILE)) { logger.info("compiling script file [{}]", file.toAbsolutePath()); try (InputStreamReader reader = new InputStreamReader(Files.newInputStream(file), StandardCharsets.UTF_8)) { String script = Streams.copyToString(reader); String name = scriptNameExt.v1(); CacheKey cacheKey = new CacheKey(engineService, name, null, Collections.emptyMap()); // pass the actual file name to the compiler (for script engines that care about this) Object executable = engineService.compile(file.getFileName().toString(), script, Collections.emptyMap()); CompiledScript compiledScript = new CompiledScript(ScriptType.FILE, name, engineService.getType(), executable); staticCache.put(cacheKey, compiledScript); scriptMetrics.onCompilation(); } } else { logger.warn("skipping compile of script file [{}] as all scripted operations are disabled for file scripts", file.toAbsolutePath()); } } catch (Exception e) { logger.warn((Supplier<?>) () -> new ParameterizedMessage("failed to load/compile script [{}]", scriptNameExt.v1()), e); } } } @Override public void onFileCreated(Path file) { onFileInit(file); } @Override public void onFileDeleted(Path file) { Tuple<String, String> scriptNameExt = getScriptNameExt(file); if (scriptNameExt != null) { ScriptEngineService engineService = getScriptEngineServiceForFileExt(scriptNameExt.v2()); assert engineService != null; logger.info("removing script file [{}]", file.toAbsolutePath()); staticCache.remove(new CacheKey(engineService, scriptNameExt.v1(), null, Collections.emptyMap())); } } @Override public void onFileChanged(Path file) { onFileInit(file); } } /** * The type of a script, more specifically where it gets loaded from: * - provided dynamically at request time * - loaded from an index * - loaded from file */ public enum ScriptType { INLINE(0, "inline", "inline", false), STORED(1, "id", "stored", false), FILE(2, "file", "file", true); private final int val; private final ParseField parseField; private final String scriptType; private final boolean defaultScriptEnabled; public static ScriptType readFrom(StreamInput in) throws IOException { int scriptTypeVal = in.readVInt(); for (ScriptType type : values()) { if (type.val == scriptTypeVal) { return type; } } throw new IllegalArgumentException("Unexpected value read for ScriptType got [" + scriptTypeVal + "] expected one of [" + INLINE.val + "," + FILE.val + "," + STORED.val + "]"); } public static void writeTo(ScriptType scriptType, StreamOutput out) throws IOException{ if (scriptType != null) { out.writeVInt(scriptType.val); } else { out.writeVInt(INLINE.val); //Default to inline } } ScriptType(int val, String name, String scriptType, boolean defaultScriptEnabled) { this.val = val; this.parseField = new ParseField(name); this.scriptType = scriptType; this.defaultScriptEnabled = defaultScriptEnabled; } public ParseField getParseField() { return parseField; } public boolean getDefaultScriptEnabled() { return defaultScriptEnabled; } public String getScriptType() { return scriptType; } @Override public String toString() { return name().toLowerCase(Locale.ROOT); } } private static final class CacheKey { final String lang; final String name; final String code; final Map<String, String> params; private CacheKey(final ScriptEngineService service, final String name, final String code, final Map<String, String> params) { this.lang = service.getType(); this.name = name; this.code = code; this.params = params; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; CacheKey cacheKey = (CacheKey)o; if (!lang.equals(cacheKey.lang)) return false; if (name != null ? !name.equals(cacheKey.name) : cacheKey.name != null) return false; if (code != null ? !code.equals(cacheKey.code) : cacheKey.code != null) return false; return params.equals(cacheKey.params); } @Override public int hashCode() { int result = lang.hashCode(); result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (code != null ? code.hashCode() : 0); result = 31 * result + params.hashCode(); return result; } } private static class IndexedScript { private final String lang; private final String id; IndexedScript(String lang, String script) { this.lang = lang; final String[] parts = script.split("/"); if (parts.length == 1) { this.id = script; } else { if (parts.length != 3) { throw new IllegalArgumentException("Illegal index script format [" + script + "]" + " should be /lang/id"); } else { if (!parts[1].equals(this.lang)) { throw new IllegalStateException("Conflicting script language, found [" + parts[1] + "] expected + ["+ this.lang + "]"); } this.id = parts[2]; } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.management; import java.util.HashMap; import java.util.Map; import java.util.Properties; import javax.management.MBeanServer; import javax.management.ObjectName; import org.apache.camel.CamelContext; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.util.StringHelper; /** * @version */ public class ManagedCamelContextTest extends ManagementTestSupport { @Override protected CamelContext createCamelContext() throws Exception { CamelContext context = super.createCamelContext(); // to force a different management name than the camel id context.getManagementNameStrategy().setNamePattern("19-#name#"); return context; } public void testManagedCamelContext() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); assertTrue("Should be registered", mbeanServer.isRegistered(on)); String name = (String) mbeanServer.getAttribute(on, "CamelId"); assertEquals("camel-1", name); String managementName = (String) mbeanServer.getAttribute(on, "ManagementName"); assertEquals("19-camel-1", managementName); String uptime = (String) mbeanServer.getAttribute(on, "Uptime"); assertNotNull(uptime); String status = (String) mbeanServer.getAttribute(on, "State"); assertEquals("Started", status); Boolean messageHistory = (Boolean) mbeanServer.getAttribute(on, "MessageHistory"); assertEquals(Boolean.TRUE, messageHistory); Integer total = (Integer) mbeanServer.getAttribute(on, "TotalRoutes"); assertEquals(2, total.intValue()); Integer started = (Integer) mbeanServer.getAttribute(on, "StartedRoutes"); assertEquals(2, started.intValue()); // invoke operations MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); mbeanServer.invoke(on, "sendBody", new Object[]{"direct:start", "Hello World"}, new String[]{"java.lang.String", "java.lang.Object"}); assertMockEndpointsSatisfied(); resetMocks(); mock.expectedBodiesReceived("Hello World"); mbeanServer.invoke(on, "sendStringBody", new Object[]{"direct:start", "Hello World"}, new String[]{"java.lang.String", "java.lang.String"}); assertMockEndpointsSatisfied(); Object reply = mbeanServer.invoke(on, "requestBody", new Object[]{"direct:foo", "Hello World"}, new String[]{"java.lang.String", "java.lang.Object"}); assertEquals("Bye World", reply); reply = mbeanServer.invoke(on, "requestStringBody", new Object[]{"direct:foo", "Hello World"}, new String[]{"java.lang.String", "java.lang.String"}); assertEquals("Bye World", reply); resetMocks(); mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); mock.expectedHeaderReceived("foo", 123); Map<String, Object> headers = new HashMap<String, Object>(); headers.put("foo", 123); mbeanServer.invoke(on, "sendBodyAndHeaders", new Object[]{"direct:start", "Hello World", headers}, new String[]{"java.lang.String", "java.lang.Object", "java.util.Map"}); assertMockEndpointsSatisfied(); resetMocks(); mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); mock.expectedHeaderReceived("foo", 123); reply = mbeanServer.invoke(on, "requestBodyAndHeaders", new Object[]{"direct:start", "Hello World", headers}, new String[]{"java.lang.String", "java.lang.Object", "java.util.Map"}); assertEquals("Hello World", reply); assertMockEndpointsSatisfied(); // stop Camel mbeanServer.invoke(on, "stop", null, null); } public void testManagedCamelContextCreateEndpoint() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); assertNull(context.hasEndpoint("seda:bar")); // create a new endpoint Object reply = mbeanServer.invoke(on, "createEndpoint", new Object[]{"seda:bar"}, new String[]{"java.lang.String"}); assertEquals(Boolean.TRUE, reply); assertNotNull(context.hasEndpoint("seda:bar")); ObjectName seda = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=endpoints,name=\"seda://bar\""); boolean registered = mbeanServer.isRegistered(seda); assertTrue("Should be registered " + seda, registered); // create it again reply = mbeanServer.invoke(on, "createEndpoint", new Object[]{"seda:bar"}, new String[]{"java.lang.String"}); assertEquals(Boolean.FALSE, reply); registered = mbeanServer.isRegistered(seda); assertTrue("Should be registered " + seda, registered); } public void testManagedCamelContextRemoveEndpoint() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); assertNull(context.hasEndpoint("seda:bar")); // create a new endpoint Object reply = mbeanServer.invoke(on, "createEndpoint", new Object[]{"seda:bar"}, new String[]{"java.lang.String"}); assertEquals(Boolean.TRUE, reply); assertNotNull(context.hasEndpoint("seda:bar")); ObjectName seda = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=endpoints,name=\"seda://bar\""); boolean registered = mbeanServer.isRegistered(seda); assertTrue("Should be registered " + seda, registered); // remove it Object num = mbeanServer.invoke(on, "removeEndpoints", new Object[]{"seda:*"}, new String[]{"java.lang.String"}); assertEquals(1, num); assertNull(context.hasEndpoint("seda:bar")); registered = mbeanServer.isRegistered(seda); assertFalse("Should not be registered " + seda, registered); // remove it again num = mbeanServer.invoke(on, "removeEndpoints", new Object[]{"seda:*"}, new String[]{"java.lang.String"}); assertEquals(0, num); assertNull(context.hasEndpoint("seda:bar")); registered = mbeanServer.isRegistered(seda); assertFalse("Should not be registered " + seda, registered); } public void testFindComponentsInClasspath() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); assertTrue("Should be registered", mbeanServer.isRegistered(on)); @SuppressWarnings("unchecked") Map<String, Properties> info = (Map<String, Properties>) mbeanServer.invoke(on, "findComponents", null, null); assertNotNull(info); assertEquals(22, info.size()); Properties prop = info.get("seda"); assertNotNull(prop); assertEquals("seda", prop.get("name")); assertEquals("org.apache.camel", prop.get("groupId")); assertEquals("camel-core", prop.get("artifactId")); } public void testManagedCamelContextCreateRouteStaticEndpointJson() throws Exception { // JMX tests dont work well on AIX CI servers (hangs them) if (isPlatform("aix")) { return; } MBeanServer mbeanServer = getMBeanServer(); ObjectName on = ObjectName.getInstance("org.apache.camel:context=19-camel-1,type=context,name=\"camel-1\""); // get the json String json = (String) mbeanServer.invoke(on, "createRouteStaticEndpointJson", null, null); assertNotNull(json); assertEquals(7, StringHelper.countChar(json, '{')); assertEquals(7, StringHelper.countChar(json, '}')); assertTrue(json.contains("{ \"uri\": \"direct://start\" }")); assertTrue(json.contains("{ \"uri\": \"direct://foo\" }")); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start").to("mock:result"); from("direct:foo").transform(constant("Bye World")); } }; } }
/* * * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * / */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.constraint; import com.google.common.collect.ImmutableSet; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.server.resourcemanager.MockNodes; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.MockRMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.TestUtils; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; /** * Test functionality of AllocationTagsManager. */ public class TestAllocationTagsManager { private RMContext rmContext; @Before public void setup() { MockRM rm = new MockRM(); rm.start(); MockNodes.resetHostIds(); List<RMNode> rmNodes = MockNodes.newNodes(2, 4, Resource.newInstance(4096, 4)); for (RMNode rmNode : rmNodes) { rm.getRMContext().getRMNodes().putIfAbsent(rmNode.getNodeID(), rmNode); } rmContext = rm.getRMContext(); } @Test public void testMultipleAddRemoveContainer() { AllocationTagsManager atm = new AllocationTagsManager(rmContext); NodeId nodeId = NodeId.fromString("host1:123"); ContainerId cid1 = TestUtils.getMockContainerId(1, 1); ContainerId cid2 = TestUtils.getMockContainerId(1, 2); ContainerId cid3 = TestUtils.getMockContainerId(1, 3); Set<String> tags1 = ImmutableSet.of("mapper", "reducer"); Set<String> tags2 = ImmutableSet.of("mapper"); Set<String> tags3 = ImmutableSet.of("zk"); // node - mapper : 2 // - reduce : 1 atm.addContainer(nodeId, cid1, tags1); atm.addContainer(nodeId, cid2, tags2); atm.addContainer(nodeId, cid3, tags3); Assert.assertEquals(2L, (long) atm.getAllocationTagsWithCount(nodeId).get("mapper")); Assert.assertEquals(1L, (long) atm.getAllocationTagsWithCount(nodeId).get("reducer")); // remove container1 atm.removeContainer(nodeId, cid1, tags1); Assert.assertEquals(1L, (long) atm.getAllocationTagsWithCount(nodeId).get("mapper")); Assert.assertNull(atm.getAllocationTagsWithCount(nodeId).get("reducer")); // remove the same container again, the reducer no longer exists, // make sure there is no NPE here atm.removeContainer(nodeId, cid1, tags1); Assert.assertNull(atm.getAllocationTagsWithCount(nodeId).get("mapper")); Assert.assertNull(atm.getAllocationTagsWithCount(nodeId).get("reducer")); } @Test public void testAllocationTagsManagerSimpleCases() throws InvalidAllocationTagsQueryException { AllocationTagsManager atm = new AllocationTagsManager(rmContext); /** * Construct test case: * Node1 (rack0): * container_1_1 (mapper/reducer/app_1) * container_1_3 (service/app_1) * * Node2 (rack0): * container_1_2 (mapper/reducer/app_1) * container_1_4 (reducer/app_1) * container_2_1 (service/app_2) */ // 3 Containers from app1 atm.addContainer(NodeId.fromString("host1:123"), TestUtils.getMockContainerId(1, 1), ImmutableSet.of("mapper", "reducer")); atm.addContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(1, 2), ImmutableSet.of("mapper", "reducer")); atm.addContainer(NodeId.fromString("host1:123"), TestUtils.getMockContainerId(1, 3), ImmutableSet.of("service")); atm.addContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(1, 4), ImmutableSet.of("reducer")); // 1 Container from app2 atm.addContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(2, 3), ImmutableSet.of("service")); // Get Node Cardinality of app1 on node1, with tag "mapper" Assert.assertEquals(1, atm.getNodeCardinalityByOp(NodeId.fromString("host1:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of("mapper")), Long::max)); // Get Rack Cardinality of app1 on rack0, with tag "mapper" Assert.assertEquals(2, atm.getRackCardinality("rack0", TestUtils.getMockApplicationId(1), "mapper")); // Get Node Cardinality of app1 on node2, with tag "mapper/reducer", op=min Assert.assertEquals(1, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of("mapper", "reducer")), Long::min)); // Get Node Cardinality of app1 on node2, with tag "mapper/reducer", op=max Assert.assertEquals(2, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of("mapper", "reducer")), Long::max)); // Get Node Cardinality of app1 on node2, with tag "mapper/reducer", op=sum Assert.assertEquals(3, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of("mapper", "reducer")), Long::sum)); // Get Node Cardinality by passing single tag. Assert.assertEquals(1, atm.getNodeCardinality(NodeId.fromString("host2:123"), TestUtils.getMockApplicationId(1), "mapper")); Assert.assertEquals(2, atm.getNodeCardinality(NodeId.fromString("host2:123"), TestUtils.getMockApplicationId(1), "reducer")); // Get Node Cardinality of app1 on node2, with tag "no_existed/reducer", // op=min Assert.assertEquals(0, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of("no_existed", "reducer")), Long::min)); // Get Node Cardinality of app1 on node2, with tag "<applicationId>", op=max // (Expect this returns #containers from app1 on node2) Assert.assertEquals(2, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), null), Long::max)); // Get Node Cardinality of app1 on node2, with empty tag set, op=max Assert.assertEquals(2, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), null), Long::max)); // Get Cardinality of app1 on node2, with empty tag set, op=max Assert.assertEquals(2, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of()), Long::max)); // Get Node Cardinality of all apps on node2, with empty tag set, op=sum Assert.assertEquals(4, atm.getNodeCardinalityByOp( NodeId.fromString("host2:123"), AllocationTags.createGlobalAllocationTags(ImmutableSet.of()), Long::sum)); // Get Node Cardinality of app_1 on node2, with empty tag set, op=sum Assert.assertEquals(3, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of()), Long::sum)); // Get Node Cardinality of app_1 on node2, with empty tag set, op=sum Assert.assertEquals(1, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(2), ImmutableSet.of()), Long::sum)); // Finish all containers: atm.removeContainer(NodeId.fromString("host1:123"), TestUtils.getMockContainerId(1, 1), ImmutableSet.of("mapper", "reducer")); atm.removeContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(1, 2), ImmutableSet.of("mapper", "reducer")); atm.removeContainer(NodeId.fromString("host1:123"), TestUtils.getMockContainerId(1, 3), ImmutableSet.of("service")); atm.removeContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(1, 4), ImmutableSet.of("reducer")); atm.removeContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(2, 3), ImmutableSet.of("service")); // Expect all cardinality to be 0 // Get Cardinality of app1 on node1, with tag "mapper" Assert.assertEquals(0, atm.getNodeCardinalityByOp(NodeId.fromString("host1:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of("mapper")), Long::max)); // Get Node Cardinality of app1 on node2, with tag "mapper/reducer", op=min Assert.assertEquals(0, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of("mapper", "reducer")), Long::min)); // Get Node Cardinality of app1 on node2, with tag "mapper/reducer", op=max Assert.assertEquals(0, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of("mapper", "reducer")), Long::max)); // Get Node Cardinality of app1 on node2, with tag "mapper/reducer", op=sum Assert.assertEquals(0, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of("mapper", "reducer")), Long::sum)); // Get Node Cardinality of app1 on node2, with tag "<applicationId>", op=max // (Expect this returns #containers from app1 on node2) Assert.assertEquals(0, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of(TestUtils.getMockApplicationId(1).toString())), Long::max)); Assert.assertEquals(0, atm.getNodeCardinality(NodeId.fromString("host2:123"), TestUtils.getMockApplicationId(1), TestUtils.getMockApplicationId(1).toString())); // Get Node Cardinality of app1 on node2, with empty tag set, op=max Assert.assertEquals(0, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of()), Long::max)); // Get Node Cardinality of all apps on node2, with empty tag set, op=sum Assert.assertEquals(0, atm.getNodeCardinalityByOp( NodeId.fromString("host2:123"), AllocationTags.createGlobalAllocationTags(ImmutableSet.of()), Long::sum)); // Get Node Cardinality of app_1 on node2, with empty tag set, op=sum Assert.assertEquals(0, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of()), Long::sum)); // Get Node Cardinality of app_2 on node2, with empty tag set, op=sum Assert.assertEquals(0, atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of()), Long::sum)); } @Test public void testAllocationTagsManagerRackMapping() throws InvalidAllocationTagsQueryException { AllocationTagsManager atm = new AllocationTagsManager(rmContext); /** * Construct Rack test case: * Node1 (rack0): * container_1_1 (mapper/reducer/app_1) * container_1_4 (reducer/app_2) * * Node2 (rack0): * container_1_2 (mapper/reducer/app_2) * container_1_3 (service/app_1) * * Node5 (rack1): * container_2_1 (service/app_2) */ // 3 Containers from app1 atm.addContainer(NodeId.fromString("host1:123"), TestUtils.getMockContainerId(1, 1), ImmutableSet.of("mapper", "reducer")); atm.addContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(2, 2), ImmutableSet.of("mapper", "reducer")); atm.addContainer(NodeId.fromString("host1:123"), TestUtils.getMockContainerId(2, 4), ImmutableSet.of("reducer")); atm.addContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(1, 3), ImmutableSet.of("service")); // 1 Container from app2 atm.addContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(2, 3), ImmutableSet.of("service")); // Get Rack Cardinality of app1 on rack0, with tag "mapper" Assert.assertEquals(1, atm.getRackCardinality("rack0", TestUtils.getMockApplicationId(1), "mapper")); // Get Rack Cardinality of app2 on rack0, with tag "reducer" Assert.assertEquals(2, atm.getRackCardinality("rack0", TestUtils.getMockApplicationId(2), "reducer")); // Get Rack Cardinality of all apps on rack0, with tag "reducer" Assert.assertEquals(3, atm.getRackCardinality("rack0", null, "reducer")); // Get Rack Cardinality of app_1 on rack0, with empty tag set, op=max Assert.assertEquals(1, atm.getRackCardinalityByOp("rack0", AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of()), Long::max)); // Get Rack Cardinality of app_1 on rack0, with empty tag set, op=min Assert.assertEquals(1, atm.getRackCardinalityByOp("rack0", AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(1), ImmutableSet.of()), Long::min)); // Get Rack Cardinality of all apps on rack0, with empty tag set, op=min Assert.assertEquals(3, atm.getRackCardinalityByOp("rack0", AllocationTags.createGlobalAllocationTags(ImmutableSet.of()), Long::max)); } @Test public void testAllocationTagsManagerMemoryAfterCleanup() { /** * Make sure YARN cleans up all memory once container/app finishes. */ AllocationTagsManager atm = new AllocationTagsManager(rmContext); // Add a bunch of containers atm.addContainer(NodeId.fromString("host1:123"), TestUtils.getMockContainerId(1, 1), ImmutableSet.of("mapper", "reducer")); atm.addContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(1, 2), ImmutableSet.of("mapper", "reducer")); atm.addContainer(NodeId.fromString("host1:123"), TestUtils.getMockContainerId(1, 3), ImmutableSet.of("service")); atm.addContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(1, 4), ImmutableSet.of("reducer")); atm.addContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(2, 3), ImmutableSet.of("service")); // Remove all these containers atm.removeContainer(NodeId.fromString("host1:123"), TestUtils.getMockContainerId(1, 1), ImmutableSet.of("mapper", "reducer")); atm.removeContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(1, 2), ImmutableSet.of("mapper", "reducer")); atm.removeContainer(NodeId.fromString("host1:123"), TestUtils.getMockContainerId(1, 3), ImmutableSet.of("service")); atm.removeContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(1, 4), ImmutableSet.of("reducer")); atm.removeContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(2, 3), ImmutableSet.of("service")); // Check internal data structure Assert.assertEquals(0, atm.getGlobalNodeMapping().getTypeToTagsWithCount().size()); Assert.assertEquals(0, atm.getPerAppNodeMappings().size()); Assert.assertEquals(0, atm.getGlobalRackMapping().getTypeToTagsWithCount().size()); Assert.assertEquals(0, atm.getPerAppRackMappings().size()); } @Test public void testQueryCardinalityWithIllegalParameters() { /** * Make sure YARN cleans up all memory once container/app finishes. */ AllocationTagsManager atm = new AllocationTagsManager(rmContext); // Add a bunch of containers atm.addContainer(NodeId.fromString("host1:123"), TestUtils.getMockContainerId(1, 1), ImmutableSet.of("mapper", "reducer")); atm.addContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(1, 2), ImmutableSet.of("mapper", "reducer")); atm.addContainer(NodeId.fromString("host1:123"), TestUtils.getMockContainerId(1, 3), ImmutableSet.of("service")); atm.addContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(1, 4), ImmutableSet.of("reducer")); atm.addContainer(NodeId.fromString("host2:123"), TestUtils.getMockContainerId(2, 3), ImmutableSet.of("service")); // No node-id boolean caughtException = false; try { atm.getNodeCardinalityByOp(null, AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(2), ImmutableSet.of("mapper")), Long::min); } catch (InvalidAllocationTagsQueryException e1) { caughtException = true; } Assert.assertTrue("should fail because of nodeId specified", caughtException); // No op caughtException = false; try { atm.getNodeCardinalityByOp(NodeId.fromString("host2:123"), AllocationTags.createSingleAppAllocationTags( TestUtils.getMockApplicationId(2), ImmutableSet.of("mapper")), null); } catch (InvalidAllocationTagsQueryException e1) { caughtException = true; } Assert.assertTrue("should fail because of nodeId specified", caughtException); } @Test public void testNodeAllocationTagsAggregation() throws InvalidAllocationTagsQueryException { RMContext mockContext = Mockito.spy(rmContext); ApplicationId app1 = TestUtils.getMockApplicationId(1); ApplicationId app2 = TestUtils.getMockApplicationId(2); ApplicationId app3 = TestUtils.getMockApplicationId(3); NodeId host1 = NodeId.fromString("host1:123"); NodeId host2 = NodeId.fromString("host2:123"); NodeId host3 = NodeId.fromString("host3:123"); ConcurrentMap<ApplicationId, RMApp> allApps = new ConcurrentHashMap<>(); allApps.put(app1, new MockRMApp(123, 1000, RMAppState.NEW, "userA", ImmutableSet.of(""))); allApps.put(app2, new MockRMApp(124, 1001, RMAppState.NEW, "userA", ImmutableSet.of(""))); allApps.put(app3, new MockRMApp(125, 1002, RMAppState.NEW, "userA", ImmutableSet.of(""))); Mockito.when(mockContext.getRMApps()).thenReturn(allApps); AllocationTagsManager atm = new AllocationTagsManager(mockContext); /** * Node1 (rack0) * app1/A(2) * app1/B(1) * app2/A(3) * app3/A(1) * * Node2 (rack0) * app2/A(1) * app2/B(2) * app1/C(1) * app3/B(1) * * Node3 (rack1): * app2/D(1) * app3/D(1) */ atm.addContainer(host1, TestUtils.getMockContainerId(1, 1), ImmutableSet.of("A", "B")); atm.addContainer(host1, TestUtils.getMockContainerId(1, 2), ImmutableSet.of("A")); atm.addContainer(host1, TestUtils.getMockContainerId(2, 1), ImmutableSet.of("A")); atm.addContainer(host1, TestUtils.getMockContainerId(2, 2), ImmutableSet.of("A")); atm.addContainer(host1, TestUtils.getMockContainerId(2, 3), ImmutableSet.of("A")); atm.addContainer(host1, TestUtils.getMockContainerId(3, 1), ImmutableSet.of("A")); atm.addContainer(host2, TestUtils.getMockContainerId(1, 3), ImmutableSet.of("C")); atm.addContainer(host2, TestUtils.getMockContainerId(2, 4), ImmutableSet.of("A")); atm.addContainer(host2, TestUtils.getMockContainerId(2, 5), ImmutableSet.of("B")); atm.addContainer(host2, TestUtils.getMockContainerId(2, 6), ImmutableSet.of("B")); atm.addContainer(host2, TestUtils.getMockContainerId(3, 2), ImmutableSet.of("B")); atm.addContainer(host3, TestUtils.getMockContainerId(2, 7), ImmutableSet.of("D")); atm.addContainer(host3, TestUtils.getMockContainerId(3, 3), ImmutableSet.of("D")); // Target applications, current app: app1 // all apps: app1, app2, app3 TargetApplications ta = new TargetApplications(app1, ImmutableSet.of(app1, app2, app3)); //******************************** // 1) self (app1) //******************************** AllocationTags tags = AllocationTags .createSingleAppAllocationTags(app1, ImmutableSet.of("A", "C")); Assert.assertEquals(2, atm.getNodeCardinalityByOp(host1, tags, Long::max)); Assert.assertEquals(0, atm.getNodeCardinalityByOp(host1, tags, Long::min)); Assert.assertEquals(1, atm.getNodeCardinalityByOp(host2, tags, Long::max)); Assert.assertEquals(0, atm.getNodeCardinalityByOp(host2, tags, Long::min)); Assert.assertEquals(0, atm.getNodeCardinalityByOp(host3, tags, Long::max)); Assert.assertEquals(0, atm.getNodeCardinalityByOp(host3, tags, Long::min)); //******************************** // 2) not-self (app2, app3) //******************************** /** * Verify max/min cardinality of tag "A" on host1 from all applications * other than app1. This returns the max/min cardinality of tag "A" of * app2 or app3 on this node. * * Node1 (rack0) * app1/A(1) * app1/B(1) * app2/A(3) * app3/A(1) * * app2_app3/A(4) * app2_app3/B(0) * * expecting to return max=3, min=1 * */ tags = AllocationTags.createOtherAppAllocationTags(app1, ImmutableSet.of("A", "B")); Assert.assertEquals(4, atm.getNodeCardinalityByOp(host1, tags, Long::max)); Assert.assertEquals(0, atm.getNodeCardinalityByOp(host1, tags, Long::min)); Assert.assertEquals(4, atm.getNodeCardinalityByOp(host1, tags, Long::sum)); //******************************** // 3) app-id/app2 (app2) //******************************** tags = AllocationTags .createSingleAppAllocationTags(app2, ImmutableSet.of("A", "B")); Assert.assertEquals(3, atm.getNodeCardinalityByOp(host1, tags, Long::max)); Assert.assertEquals(0, atm.getNodeCardinalityByOp(host1, tags, Long::min)); Assert.assertEquals(2, atm.getNodeCardinalityByOp(host2, tags, Long::max)); Assert.assertEquals(1, atm.getNodeCardinalityByOp(host2, tags, Long::min)); Assert.assertEquals(3, atm.getNodeCardinalityByOp(host2, tags, Long::sum)); //******************************** // 4) all (app1, app2, app3) //******************************** tags = AllocationTags .createGlobalAllocationTags(ImmutableSet.of("A")); Assert.assertEquals(6, atm.getNodeCardinalityByOp(host1, tags, Long::sum)); Assert.assertEquals(1, atm.getNodeCardinalityByOp(host2, tags, Long::sum)); Assert.assertEquals(0, atm.getNodeCardinalityByOp(host3, tags, Long::sum)); tags = AllocationTags .createGlobalAllocationTags(ImmutableSet.of("A", "B")); Assert.assertEquals(7, atm.getNodeCardinalityByOp(host1, tags, Long::sum)); Assert.assertEquals(4, atm.getNodeCardinalityByOp(host2, tags, Long::sum)); Assert.assertEquals(0, atm.getNodeCardinalityByOp(host3, tags, Long::sum)); Assert.assertEquals(6, atm.getNodeCardinalityByOp(host1, tags, Long::max)); Assert.assertEquals(3, atm.getNodeCardinalityByOp(host2, tags, Long::max)); Assert.assertEquals(0, atm.getNodeCardinalityByOp(host3, tags, Long::max)); Assert.assertEquals(1, atm.getNodeCardinalityByOp(host1, tags, Long::min)); Assert.assertEquals(1, atm.getNodeCardinalityByOp(host2, tags, Long::min)); Assert.assertEquals(0, atm.getNodeCardinalityByOp(host3, tags, Long::min)); } }
/* * Copyright 2008-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package griffon.swing.editors; import griffon.core.editors.AbstractPropertyEditor; import griffon.metadata.PropertyEditorFor; import java.awt.Color; import java.awt.LinearGradientPaint; import java.awt.MultipleGradientPaint; import java.lang.reflect.Field; import java.util.List; import java.util.Map; import static griffon.util.GriffonNameUtils.isBlank; /** * @author Andres Almiray * @since 2.0.0 */ @PropertyEditorFor(LinearGradientPaint.class) public class LinearGradientPaintPropertyEditor extends AbstractPropertyEditor { public String getAsText() { if (null == getValue()) return null; LinearGradientPaint p = (LinearGradientPaint) getValue(); return new StringBuilder() .append(p.getStartPoint().getX()) .append(", ") .append(p.getStartPoint().getY()) .append(", ") .append(p.getEndPoint().getX()) .append(", ") .append(p.getEndPoint().getY()) .append(", ") .append(formatFractions(p.getFractions())) .append(", ") .append(formatColors(p.getColors())) .append(", ") .append(p.getCycleMethod().name()) .toString(); } protected String formatFractions(float[] fractions) { StringBuilder b = new StringBuilder("["); boolean first = true; for (float f : fractions) { if (first) { first = false; } else { b.append(":"); } b.append(f); } return b.append("]").toString(); } protected String formatColors(Color[] colors) { StringBuilder b = new StringBuilder("["); boolean first = true; for (Color c : colors) { if (first) { first = false; } else { b.append(":"); } b.append(ColorPropertyEditor.format(c)); } return b.append("]").toString(); } protected void setValueInternal(Object value) { if (null == value) { super.setValueInternal(null); } else if (value instanceof CharSequence) { handleAsString(String.valueOf(value)); } else if (value instanceof List) { handleAsList((List) value); } else if (value instanceof Map) { handleAsMap((Map) value); } else if (value instanceof LinearGradientPaint) { super.setValueInternal(value); } else { throw illegalValue(value, LinearGradientPaint.class); } } protected void handleAsString(String str) { if (isBlank(str)) { super.setValueInternal(null); return; } float x1 = 0; float y1 = 0; float x2 = 0; float y2 = 0; float[] fractions = null; Color[] colors = null; MultipleGradientPaint.CycleMethod cyclicMethod = MultipleGradientPaint.CycleMethod.NO_CYCLE; String[] parts = str.split(","); switch (parts.length) { case 7: cyclicMethod = parseCyclicMethod(str, parts[6]); case 6: x1 = parseValue(parts[0]); y1 = parseValue(parts[1]); x2 = parseValue(parts[2]); y2 = parseValue(parts[3]); fractions = parseFractions(str, parts[4].trim()); colors = parseColors(str, parts[5].trim()); if (fractions.length != colors.length) { throw illegalValue(str, LinearGradientPaint.class); } super.setValueInternal(new LinearGradientPaint(x1, y1, x2, y2, fractions, colors, cyclicMethod)); break; default: throw illegalValue(str, LinearGradientPaint.class); } } protected void handleAsList(List<?> list) { if(list.isEmpty()) { super.setValueInternal(null); return; } float x1 = 0; float y1 = 0; float x2 = 0; float y2 = 0; float[] fractions = null; Color[] colors = null; MultipleGradientPaint.CycleMethod cyclicMethod = MultipleGradientPaint.CycleMethod.NO_CYCLE; switch (list.size()) { case 7: cyclicMethod = parseCyclicMethod(list, String.valueOf(list.get(6)).trim()); case 6: x1 = parseValue(list.get(0)); y1 = parseValue(list.get(1)); x2 = parseValue(list.get(2)); y2 = parseValue(list.get(3)); fractions = parseFractions(list, list.get(4)); colors = parseColors(list, list.get(5)); if (fractions.length != colors.length) { throw illegalValue(list, LinearGradientPaint.class); } super.setValueInternal(new LinearGradientPaint(x1, y1, x2, y2, fractions, colors, cyclicMethod)); break; default: throw illegalValue(list, LinearGradientPaint.class); } } protected void handleAsMap(Map<?, ?> map) { if(map.isEmpty()) { super.setValueInternal(null); return; } float x1 = (Float) getMapValue(map, "x1", 0f); float y1 = (Float) getMapValue(map, "y1", 0f); float x2 = (Float) getMapValue(map, "x2", 0f); float y2 = (Float) getMapValue(map, "y2", 0f); MultipleGradientPaint.CycleMethod cyclicMethod = MultipleGradientPaint.CycleMethod.NO_CYCLE; float[] fractions = parseFractions(map, map.get("fractions")); Color[] colors = parseColors(map, map.get("colors")); if (fractions.length != colors.length) { throw illegalValue(map, LinearGradientPaint.class); } Object cyclicValue = map.get("cycle"); if (null != cyclicValue) { cyclicMethod = parseCyclicMethod(map, String.valueOf(cyclicValue)); } super.setValueInternal(new LinearGradientPaint(x1, y1, x2, y2, fractions, colors, cyclicMethod)); } protected float[] parseFractions(Object source, Object obj) { if (obj instanceof CharSequence) { return parseFractions(source, String.valueOf(obj).trim()); } else if (obj instanceof List) { return parseFractions(source, (List) obj); } throw illegalValue(source, LinearGradientPaint.class); } protected float[] parseFractions(Object source, String str) { if (!str.startsWith("[") || !str.endsWith("]")) { throw illegalValue(source, LinearGradientPaint.class); } String[] strs = str.substring(1, str.length() - 1).split(":"); float[] fractions = new float[strs.length]; for (int i = 0; i < strs.length; i++) { fractions[i] = parseValue(strs[i]); } return fractions; } protected float[] parseFractions(Object source, List<?> list) { float[] fractions = new float[list.size()]; for (int i = 0; i < list.size(); i++) { fractions[i] = parseValue(list.get(i)); } return fractions; } protected Color[] parseColors(Object source, Object obj) { if (obj instanceof CharSequence) { return parseColors(source, String.valueOf(obj).trim()); } else if (obj instanceof List) { return parseColors(source, (List) obj); } throw illegalValue(source, LinearGradientPaint.class); } protected Color[] parseColors(Object source, String str) { if (!str.startsWith("[") || !str.endsWith("]")) { throw illegalValue(source, LinearGradientPaint.class); } String[] strs = str.substring(1, str.length() - 1).split(":"); Color[] colors = new Color[strs.length]; ColorPropertyEditor colorEditor = new ColorPropertyEditor(); for (int i = 0; i < strs.length; i++) { try { colorEditor.setValueInternal(strs[i]); colors[i] = (Color) colorEditor.getValue(); } catch (Exception e) { throw illegalValue(strs[i], LinearGradientPaint.class); } } return colors; } protected Color[] parseColors(Object source, List<?> list) { Color[] colors = new Color[list.size()]; ColorPropertyEditor colorEditor = new ColorPropertyEditor(); for (int i = 0; i < list.size(); i++) { try { colorEditor.setValueInternal(list.get(i)); colors[i] = (Color) colorEditor.getValue(); } catch (Exception e) { throw illegalValue(list.get(i), LinearGradientPaint.class, e); } } return colors; } protected MultipleGradientPaint.CycleMethod parseCyclicMethod(Object source, String str) { try { Field cyclicMethodField = MultipleGradientPaint.CycleMethod.class.getDeclaredField(str.toUpperCase().trim()); return (MultipleGradientPaint.CycleMethod) cyclicMethodField.get(null); } catch (NoSuchFieldException | IllegalAccessException e) { throw illegalValue(source, LinearGradientPaint.class, e); } } protected float parse(String val) { try { return Float.parseFloat(val.trim()); } catch (NumberFormatException e) { throw illegalValue(val, LinearGradientPaint.class, e); } } protected float parseValue(Object value) { if (value instanceof CharSequence) { return parse(String.valueOf(value)); } else if (value instanceof Number) { return parse((Number) value); } throw illegalValue(value, LinearGradientPaint.class); } protected float parse(Number val) { return val.floatValue(); } protected Object getMapValue(Map<?, ?> map, String key, Object defaultValue) { Object val = map.get(key); if (null == val) { return defaultValue; } else if (val instanceof CharSequence) { return parse(String.valueOf(val)); } else if (val instanceof Number) { return parse((Number) val); } throw illegalValue(map, LinearGradientPaint.class); } }
package com.resmed.refresh.model.json; import com.google.gson.annotations.SerializedName; import java.util.List; public class Record { @SerializedName("Advices") private List<Advice> advices; @SerializedName("EndDate") private String endDate = ""; @SerializedName("FirmwareVersion") private String firmwareVersion; @SerializedName("Hypnogram") private List<Integer> hypnogram; @SerializedName("HypnogramSamplePeriod") private int hypnogramSamplePeriod; @SerializedName("Light") private List<Float> light; @SerializedName("LightSamplePeriod") private int lightSamplePeriod; @SerializedName("Location") private Location location; @SerializedName("Noise") private List<Float> noise; @SerializedName("NoiseSamplePeriod") private int noiseSamplePeriod; @SerializedName("PreSleepQuestions") private List<PreSleepQuestion> preSleepQuestions; @SerializedName("RecordId") private Long recordId; @SerializedName("RM20Version") private String rm20Version; @SerializedName("SleepEvents") private List<SleepEvent> sleepEvents; @SerializedName("StartDate") private String startDate = ""; @SerializedName("SynopsisData") private SynopsisData synopsisData = new SynopsisData(); @SerializedName("Temperature") private List<Float> temperature; @SerializedName("TemperatureSamplePeriod") private int temperatureSamplePeriod; @SerializedName("UserProfile") private UserProfile userProfile; public List<Advice> getAdvices() { return this.advices; } public String getEndDate() { return this.endDate; } public List<Integer> getHypnogram() { return this.hypnogram; } public int getHypnogramSamplePeriod() { return this.hypnogramSamplePeriod; } public List<Float> getLight() { return this.light; } public int getLightSamplePeriod() { return this.lightSamplePeriod; } public Location getLocation() { return this.location; } public List<Float> getNoise() { return this.noise; } public int getNoiseSamplePeriod() { return this.noiseSamplePeriod; } public List<PreSleepQuestion> getPreSleepQuestions() { return this.preSleepQuestions; } public Long getRecordId() { return this.recordId; } public List<SleepEvent> getSleepEvents() { return this.sleepEvents; } public String getStartDate() { return this.startDate; } public SynopsisData getSynopsisData() { return this.synopsisData; } public List<Float> getTemperature() { return this.temperature; } public int getTemperatureSamplePeriod() { return this.temperatureSamplePeriod; } public UserProfile getUserProfile() { return this.userProfile; } public void setAdvices(List<Advice> paramList) { this.advices = paramList; } public void setEndDate(String paramString) { this.endDate = paramString; } public void setFirmwareVersion(String paramString) { this.firmwareVersion = paramString; } public void setHypnogram(List<Integer> paramList) { this.hypnogram = paramList; } public void setHypnogramSamplePeriod(int paramInt) { this.hypnogramSamplePeriod = paramInt; } public void setLight(List<Float> paramList) { this.light = paramList; } public void setLightSamplePeriod(int paramInt) { this.lightSamplePeriod = paramInt; } public void setLocation(Location paramLocation) { this.location = paramLocation; } public void setNoise(List<Float> paramList) { this.noise = paramList; } public void setNoiseSamplePeriod(int paramInt) { this.noiseSamplePeriod = paramInt; } public void setPreSleepQuestions(List<PreSleepQuestion> paramList) { this.preSleepQuestions = paramList; } public void setRecordId(Long paramLong) { this.recordId = paramLong; } public void setRm20Version(String paramString) { this.rm20Version = paramString; } public void setSleepEvents(List<SleepEvent> paramList) { this.sleepEvents = paramList; } public void setStartDate(String paramString) { this.startDate = paramString; } public void setSynopsisData(SynopsisData paramSynopsisData) { this.synopsisData = paramSynopsisData; } public void setTemperature(List<Float> paramList) { this.temperature = paramList; } public void setTemperatureSamplePeriod(int paramInt) { this.temperatureSamplePeriod = paramInt; } public void setUserProfile(UserProfile paramUserProfile) { this.userProfile = paramUserProfile; } } /* Location: [...] * Java compiler version: 6 (50.0) * JD-Core Version: 0.7.1 */
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.ui.laf.intellij; import com.intellij.ui.Gray; import com.intellij.util.ui.EmptyIcon; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.components.BorderLayoutPanel; import javax.swing.*; import javax.swing.plaf.ComponentUI; import javax.swing.plaf.basic.*; import java.awt.*; import java.awt.event.FocusAdapter; import java.awt.event.FocusEvent; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; /** * @author Konstantin Bulenkov */ public class MacIntelliJComboBoxUI extends BasicComboBoxUI { private static final Icon DEFAULT_ICON = EmptyIcon.create(MacIntelliJIconCache.getIcon("comboRight")); private final JComboBox myComboBox; public MacIntelliJComboBoxUI(JComboBox comboBox) { myComboBox = comboBox; currentValuePane = new CellRendererPane() { @Override public void paintComponent(Graphics g, Component c, Container p, int x, int y, int w, int h, boolean shouldValidate) { c.setBackground(myComboBox.isEnabled() ? Gray.xFF : Gray.xF8); super.paintComponent(g, c, p, x, y, w, h, shouldValidate); } }; } @SuppressWarnings({"MethodOverridesStaticMethodOfSuperclass", "UnusedDeclaration"}) public static ComponentUI createUI(JComponent c) { return new MacIntelliJComboBoxUI((JComboBox)c); } @Override protected JButton createArrowButton() { final Color bg = myComboBox.getBackground(); final Color fg = myComboBox.getForeground(); JButton button = new BasicArrowButton(SwingConstants.SOUTH, bg, fg, fg, fg) { @Override public void paint(Graphics g2) { Icon icon = MacIntelliJIconCache.getIcon("comboRight", false, myComboBox.hasFocus(), myComboBox.isEnabled()); icon.paintIcon(this, g2, 0, 0); } @Override public Dimension getPreferredSize() { return JBUI.size(DEFAULT_ICON.getIconWidth(), DEFAULT_ICON.getIconHeight()); } }; button.setBorder(BorderFactory.createEmptyBorder()); button.setOpaque(false); return button; } @Override public Dimension getMinimumSize(JComponent c) { return getSizeWithIcon(super.getMinimumSize(c)); } private static Dimension getSizeWithIcon(Dimension d) { return new Dimension(Math.max(d.width + 7, DEFAULT_ICON.getIconWidth()), Math.max(d.height, DEFAULT_ICON.getIconHeight())); } @Override public Dimension getPreferredSize(JComponent c) { return getSizeWithIcon(super.getPreferredSize(c)); } @Override protected ComboBoxEditor createEditor() { final ComboBoxEditor comboBoxEditor = new BasicComboBoxEditor.UIResource() { @Override protected JTextField createEditorComponent() { return new JTextField() { { setOpaque(false); setBorder(JBUI.Borders.empty(1, 0)); } @Override public Color getBackground() { if (!isEnabled()) { return Gray.xF8; } return super.getBackground(); } public void setText(String s) { if (getText().equals(s)) { return; } super.setText(s); } @Override public Dimension getPreferredSize() { Dimension size = super.getPreferredSize(); return new Dimension(size.width, DEFAULT_ICON.getIconHeight() - 6); } }; } }; if (comboBoxEditor.getEditorComponent() != null) { comboBoxEditor.getEditorComponent().addKeyListener(new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { process(e); } @Override public void keyReleased(KeyEvent e) { process(e); } private void process(KeyEvent e) { final int code = e.getKeyCode(); if ((code == KeyEvent.VK_UP || code == KeyEvent.VK_DOWN) && e.getModifiers() == 0) { comboBox.dispatchEvent(e); } } }); comboBoxEditor.getEditorComponent().addFocusListener(new FocusAdapter() { @Override public void focusGained(FocusEvent e) { update(); } @Override public void focusLost(FocusEvent e) { update(); } void update() { if (comboBox != null) { comboBox.revalidate(); comboBox.repaint(); } } }); } return comboBoxEditor; } @Override protected Rectangle rectangleForCurrentValue() { Rectangle rect = super.rectangleForCurrentValue(); rect.height=Math.min(rect.height, DEFAULT_ICON.getIconHeight()-8); rect.y+=4; rect.x+=8; rect.width-=8; return rect; } @Override protected Dimension getDefaultSize() { return super.getDefaultSize(); } @Override protected LayoutManager createLayoutManager() { return new LayoutManager() { @Override public void addLayoutComponent(String name, Component comp) { } @Override public void removeLayoutComponent(Component comp) { } @Override public Dimension preferredLayoutSize(Container parent) { return parent.getPreferredSize(); } @Override public Dimension minimumLayoutSize(Container parent) { return parent.getMinimumSize(); } @Override public void layoutContainer(Container parent) { JComboBox cb = (JComboBox)parent; int width = cb.getWidth(); int height = cb.getHeight(); Insets insets = getInsets(); int buttonHeight = height - (insets.top + insets.bottom); int buttonWidth = DEFAULT_ICON.getIconWidth(); if (arrowButton != null) { Insets arrowInsets = arrowButton.getInsets(); buttonWidth = arrowButton.getPreferredSize().width + arrowInsets.left + arrowInsets.right; } Rectangle cvb; if (arrowButton != null) { arrowButton.setBounds(width - (insets.right + buttonWidth), insets.top, buttonWidth, buttonHeight); } if ( editor != null ) { cvb = rectangleForCurrentValue(); editor.setBounds(cvb); } } }; } @Override protected ComboPopup createPopup() { return new BasicComboPopup(myComboBox) { @Override protected void configurePopup() { super.configurePopup(); setBorderPainted(false); setBorder(JBUI.Borders.empty()); setBackground(Gray.xFF); } @Override protected void configureList() { super.configureList(); wrapRenderer(); } @Override protected PropertyChangeListener createPropertyChangeListener() { final PropertyChangeListener listener = super.createPropertyChangeListener(); return new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { listener.propertyChange(evt); if ("renderer".equals(evt.getPropertyName())) { wrapRenderer(); } } }; } class ComboBoxRendererWraper implements ListCellRenderer { private final ListCellRenderer myRenderer; public ComboBoxRendererWraper(ListCellRenderer renderer) { myRenderer = renderer; } BorderLayoutPanel myPanel = JBUI.Panels.simplePanel().withBorder(JBUI.Borders.empty(0, 8)); @Override public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { Component c = myRenderer.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); myPanel.removeAll(); myPanel.add(c); myPanel.setBackground(c.getBackground()); return myPanel; } } private void wrapRenderer() { ListCellRenderer renderer = list.getCellRenderer(); if (!(renderer instanceof ComboBoxRendererWraper)) { list.setCellRenderer(new ComboBoxRendererWraper(renderer)); } } }; } @Override public void paintCurrentValueBackground(Graphics g, Rectangle bounds, boolean hasFocus) { g.setColor(myComboBox.isEnabled() ? Gray.xFF : Gray.xF8); g.fillRect(bounds.x, bounds.y, bounds.width, bounds.height); } public void paintCurrentValue(Graphics g,Rectangle bounds,boolean hasFocus) { super.paintCurrentValue(g, bounds, comboBox.isPopupVisible()); } @Override public void paint(Graphics g, JComponent c) { Rectangle r = arrowButton.getBounds(); int stop = r.x; Insets clip = getInsets(); Graphics gg = g.create(clip.left, r.y, stop - clip.left, DEFAULT_ICON.getIconHeight()); boolean enabled = c.isEnabled(); boolean hasFocus = c.hasFocus(); Icon icon = MacIntelliJIconCache.getIcon("comboLeft", false, hasFocus, enabled); icon.paintIcon(c,gg,0,0); int x = icon.getIconWidth(); icon = MacIntelliJIconCache.getIcon("comboMiddle", false, hasFocus, enabled); while (x < stop) { icon.paintIcon(c, gg, x, 0); x+=icon.getIconWidth(); } gg.dispose(); icon = MacIntelliJIconCache.getIcon("comboRight", false, hasFocus, enabled); icon.paintIcon(c, g, r.x, r.y); if ( !comboBox.isEditable() ) { paintCurrentValue(g, rectangleForCurrentValue(), false); } } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *******************************************************************************/ package org.apache.wink.example.locking.resources; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Collection; import java.util.Date; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.EntityTag; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Request; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.Response.Status; import org.apache.wink.common.annotations.Workspace; import org.apache.wink.example.locking.legacy.DefectBean; import org.apache.wink.example.locking.legacy.DefectsBean; import org.apache.wink.example.locking.store.DataStore; /** * <p> * This example demonstrates usage of Preconditions to create an <a * href="http://en.wikipedia.org/wiki/Optimistic_concurrency_control">Optimistic * Concurrency Control</a> functionality. */ @Path("defects") @Workspace(workspaceTitle = "QA Defects", collectionTitle = "Defects") public class DefectResource { public static final String DEFECT = "defect"; public static final String DEFECT_URL = "/{" + DEFECT + "}"; /** * memory store */ private DataStore store = DataStore.getInstance(); /** * Returns the collection of defects. * <p> * If the store wasn't modified from the last call, the method returns 304 * (NOT_MODIFIED). */ @GET @Produces(MediaType.APPLICATION_XML) public Response getDefectsCollection(@Context Request request) { // verify that the store was modified since the last call Date lastModifiedIgnoreMillis = store.getLastModifiedIgnoreMillis(); ResponseBuilder precondition = request.evaluatePreconditions(lastModifiedIgnoreMillis); if (precondition != null) { // the collection wasn't modified, 304 will be returned return precondition.build(); } // the collection was modified, retrieve it from the store Collection<DefectBean> defects = store.getDefects(); // return the collection and add its last modified date on the response return Response.ok(new DefectsBean(defects)).lastModified(lastModifiedIgnoreMillis).build(); } /** * Returns a single defect. * <p> * If the defect with the given id doesn't exist in the store, 404 NOT_FOUND * is returned * <p> * If IF_NONE_MATCH header present, the defect will be returned only if it * was modified since the previous call. */ @GET @Path(DEFECT_URL) @Produces(MediaType.APPLICATION_XML) public Response getDefect(@Context Request request, @PathParam(DEFECT) String defectId) { // get defect from the store DefectBean bean = store.getDefect(defectId); if (bean == null) { // defect was not found return Response.status(Status.NOT_FOUND).build(); } // create defect's etag EntityTag defectBeanEtag = new EntityTag(String.valueOf(bean.hashCode())); // evaluate the precondition ResponseBuilder precondition = request.evaluatePreconditions(defectBeanEtag); if (precondition != null) { // defect was not modified, return 304 return precondition.build(); } // create response the defect and its entity tag return Response.ok(bean).tag(defectBeanEtag).build(); } /** * Adds a new defect to the collection. The created defect is returned along * with its etag to the client. */ @POST @Consumes(MediaType.APPLICATION_XML) @Produces(MediaType.APPLICATION_XML) public Response addDefect(@Context UriInfo uriInfo, DefectBean bean) throws IOException, URISyntaxException { // verify that bean was sent if (bean == null) { throw new WebApplicationException(Status.BAD_REQUEST); } // set unique Id in the new defect bean: // - Id in the input data is ignored, actually there should be no Id // there, bean.setId(store.getDefectUniqueId()); // add defect bean to the memory store store.putDefect(bean.getId(), bean); // header Location (absolute URI) must exist on the response in case of // status code 201 URI location = new URI(uriInfo.getAbsolutePath() + "/" + bean.getId()); // create entity tag, so the Client can use it for OCC EntityTag entityTag = new EntityTag(String.valueOf(bean.hashCode())); return Response.status(Status.CREATED).entity(bean).location(location).tag(entityTag) .build(); } /** * Updates defect. * <p> * The defect is updated only if the If-Match header is present and * evaluation of precondition succeeds. This is done to ensure the OCC. */ @PUT @Path(DEFECT_URL) @Consumes(MediaType.APPLICATION_XML) @Produces(MediaType.APPLICATION_XML) public Response updateDefect(@Context Request request, @PathParam(DEFECT) String defectId, @HeaderParam(HttpHeaders.IF_MATCH) String ifMatchHeader, DefectBean updatedBean) throws IOException { if (ifMatchHeader == null) { // IF-MATCH header wasn't sent, cannot validate the precondition throw new WebApplicationException(Status.BAD_REQUEST); } // obtain data object from the memory store DefectBean bean = store.getDefect(defectId); if (bean == null) { // not found, return 404 throw new WebApplicationException(Status.NOT_FOUND); } // create defect's etag EntityTag defectBeanEtag = new EntityTag(String.valueOf(bean.hashCode())); ResponseBuilder preconditions = request.evaluatePreconditions(defectBeanEtag); if (preconditions != null) { return preconditions.build(); } updatedBean.setId(defectId); // update defect legacy bean to the memory store store.putDefect(defectId, updatedBean); Response response = Response.ok(updatedBean).tag(new EntityTag(String.valueOf(updatedBean.hashCode()))) .build(); return response; } /** * Deletes defect. * <p> * The defect is deleted only if If-Match is present and valid to ensure * OCC. */ @DELETE @Path(DEFECT_URL) @Produces(MediaType.APPLICATION_XML) public Object deleteDocument(@Context Request request, @PathParam(DEFECT) String defectId, @HeaderParam(HttpHeaders.IF_MATCH) String ifMatchHeader) { if (ifMatchHeader == null) { // IF-MATCH header wasn't sent, cannot validate the precondition throw new WebApplicationException(Status.BAD_REQUEST); } // obtain data object from memory store DefectBean bean = store.getDefect(defectId); if (bean == null) { // defect not found, return 404 throw new WebApplicationException(Status.NOT_FOUND); } // create defect's etag EntityTag defectBeanEtag = new EntityTag(String.valueOf(bean.hashCode()), false); ResponseBuilder preconditions = request.evaluatePreconditions(defectBeanEtag); if (preconditions != null) { return preconditions.build(); } return store.removeDefect(defectId); } }
// ======================================================================== // Copyright 199-2004 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ======================================================================== package org.mortbay.jetty.servlet; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.MalformedURLException; import java.util.Enumeration; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javax.servlet.RequestDispatcher; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.UnavailableException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.mortbay.io.Buffer; import org.mortbay.io.ByteArrayBuffer; import org.mortbay.io.WriterOutputStream; import org.mortbay.io.nio.DirectNIOBuffer; import org.mortbay.io.nio.IndirectNIOBuffer; import org.mortbay.io.nio.NIOBuffer; import org.mortbay.jetty.Connector; import org.mortbay.jetty.HttpConnection; import org.mortbay.jetty.HttpContent; import org.mortbay.jetty.HttpFields; import org.mortbay.jetty.HttpHeaderValues; import org.mortbay.jetty.HttpHeaders; import org.mortbay.jetty.HttpMethods; import org.mortbay.jetty.InclusiveByteRange; import org.mortbay.jetty.MimeTypes; import org.mortbay.jetty.ResourceCache; import org.mortbay.jetty.Response; import org.mortbay.jetty.handler.ContextHandler; import org.mortbay.jetty.nio.NIOConnector; import org.mortbay.log.Log; import org.mortbay.resource.FileResource; import org.mortbay.resource.Resource; import org.mortbay.resource.ResourceFactory; import org.mortbay.util.IO; import org.mortbay.util.MultiPartOutputStream; import org.mortbay.util.TypeUtil; import org.mortbay.util.URIUtil; /* ------------------------------------------------------------ */ /** The default servlet. * This servlet, normally mapped to /, provides the handling for static * content, OPTION and TRACE methods for the context. * The following initParameters are supported, these can be set either * on the servlet itself or as ServletContext initParameters with a prefix * of org.mortbay.jetty.servlet.Default. : * <PRE> * acceptRanges If true, range requests and responses are * supported * * dirAllowed If true, directory listings are returned if no * welcome file is found. Else 403 Forbidden. * * welcomeServlets If true, attempt to dispatch to welcome files * that are servlets, but only after no matching static * resources could be found. * * This must be false if you want directory listings, * but have index.jsp in your welcome file list. * * redirectWelcome If true, welcome files are redirected rather than * forwarded to. * * gzip If set to true, then static content will be served as * gzip content encoded if a matching resource is * found ending with ".gz" * * resourceBase Set to replace the context resource base * * relativeResourceBase * Set with a pathname relative to the base of the * servlet context root. Useful for only serving static content out * of only specific subdirectories. * * aliases If True, aliases of resources are allowed (eg. symbolic * links and caps variations). May bypass security constraints. * * maxCacheSize The maximum total size of the cache or 0 for no cache. * maxCachedFileSize The maximum size of a file to cache * maxCachedFiles The maximum number of files to cache * cacheType Set to "bio", "nio" or "both" to determine the type resource cache. * A bio cached buffer may be used by nio but is not as efficient as an * nio buffer. An nio cached buffer may not be used by bio. * * useFileMappedBuffer * If set to true, it will use mapped file buffer to serve static content * when using NIO connector. Setting this value to false means that * a direct buffer will be used instead of a mapped file buffer. * By default, this is set to true. * * cacheControl If set, all static content will have this value set as the cache-control * header. * * * </PRE> * * * @author Greg Wilkins (gregw) * @author Nigel Canonizado */ public class DefaultServlet extends HttpServlet implements ResourceFactory { private ContextHandler.SContext _context; private boolean _acceptRanges=true; private boolean _dirAllowed=true; private boolean _welcomeServlets=false; private boolean _redirectWelcome=false; private boolean _gzip=true; private Resource _resourceBase; private NIOResourceCache _nioCache; private ResourceCache _bioCache; private MimeTypes _mimeTypes; private String[] _welcomes; private boolean _aliases=false; private boolean _useFileMappedBuffer=false; ByteArrayBuffer _cacheControl; private ServletHandler _servletHandler; private ServletHolder _defaultHolder; /* ------------------------------------------------------------ */ public void init() throws UnavailableException { ServletContext config=getServletContext(); _context = (ContextHandler.SContext)config; _mimeTypes = _context.getContextHandler().getMimeTypes(); _welcomes = _context.getContextHandler().getWelcomeFiles(); if (_welcomes==null) _welcomes=new String[] {"index.jsp","index.html"}; _acceptRanges=getInitBoolean("acceptRanges",_acceptRanges); _dirAllowed=getInitBoolean("dirAllowed",_dirAllowed); _welcomeServlets=getInitBoolean("welcomeServlets", _welcomeServlets); _redirectWelcome=getInitBoolean("redirectWelcome",_redirectWelcome); _gzip=getInitBoolean("gzip",_gzip); _aliases=getInitBoolean("aliases",_aliases); if (!_aliases && !FileResource.getCheckAliases()) throw new IllegalStateException("Alias checking disabled"); if (_aliases) config.log("Aliases are enabled"); _useFileMappedBuffer=getInitBoolean("useFileMappedBuffer",_useFileMappedBuffer); String rrb = getInitParameter("relativeResourceBase"); if (rrb!=null) { try { _resourceBase = _context.getContextHandler().getResource(URIUtil.SLASH).addPath(rrb); } catch (Exception e) { Log.warn(Log.EXCEPTION,e); throw new UnavailableException(e.toString()); } } String rb=getInitParameter("resourceBase"); if (rrb != null && rb != null) throw new UnavailableException("resourceBase & relativeResourceBase"); if (rb!=null) { try{_resourceBase=Resource.newResource(rb);} catch (Exception e) { Log.warn(Log.EXCEPTION,e); throw new UnavailableException(e.toString()); } } String t=getInitParameter("cacheControl"); if (t!=null) _cacheControl=new ByteArrayBuffer(t); try { if (_resourceBase==null) _resourceBase = _context.getContextHandler().getResource(URIUtil.SLASH); String cache_type =getInitParameter("cacheType"); int max_cache_size=getInitInt("maxCacheSize", -2); int max_cached_file_size=getInitInt("maxCachedFileSize", -2); int max_cached_files=getInitInt("maxCachedFiles", -2); if (cache_type==null || "nio".equals(cache_type)|| "both".equals(cache_type)) { if (max_cache_size==-2 || max_cache_size>0) { _nioCache=new NIOResourceCache(_mimeTypes); if (max_cache_size>0) _nioCache.setMaxCacheSize(max_cache_size); if (max_cached_file_size>=-1) _nioCache.setMaxCachedFileSize(max_cached_file_size); if (max_cached_files>=-1) _nioCache.setMaxCachedFiles(max_cached_files); _nioCache.start(); } } if ("bio".equals(cache_type)|| "both".equals(cache_type)) { if (max_cache_size==-2 || max_cache_size>0) { _bioCache=new ResourceCache(_mimeTypes); if (max_cache_size>0) _bioCache.setMaxCacheSize(max_cache_size); if (max_cached_file_size>=-1) _bioCache.setMaxCachedFileSize(max_cached_file_size); if (max_cached_files>=-1) _bioCache.setMaxCachedFiles(max_cached_files); _bioCache.start(); } } if (_nioCache==null) _bioCache=null; } catch (Exception e) { Log.warn(Log.EXCEPTION,e); throw new UnavailableException(e.toString()); } _servletHandler= (ServletHandler) _context.getContextHandler().getChildHandlerByClass(ServletHandler.class); ServletHolder[] holders = _servletHandler.getServlets(); for (int i=holders.length;i-->0;) if (holders[i].getServletInstance()==this) _defaultHolder=holders[i]; if (Log.isDebugEnabled()) Log.debug("resource base = "+_resourceBase); } /* ------------------------------------------------------------ */ public String getInitParameter(String name) { String value=getServletContext().getInitParameter("org.mortbay.jetty.servlet.Default."+name); if (value==null) value=super.getInitParameter(name); return value; } /* ------------------------------------------------------------ */ private boolean getInitBoolean(String name, boolean dft) { String value=getInitParameter(name); if (value==null || value.length()==0) return dft; return (value.startsWith("t")|| value.startsWith("T")|| value.startsWith("y")|| value.startsWith("Y")|| value.startsWith("1")); } /* ------------------------------------------------------------ */ private int getInitInt(String name, int dft) { String value=getInitParameter(name); if (value==null) value=getInitParameter(name); if (value!=null && value.length()>0) return Integer.parseInt(value); return dft; } /* ------------------------------------------------------------ */ /** get Resource to serve. * Map a path to a resource. The default implementation calls * HttpContext.getResource but derived servlets may provide * their own mapping. * @param pathInContext The path to find a resource for. * @return The resource to serve. */ public Resource getResource(String pathInContext) { if (_resourceBase==null) return null; Resource r=null; try { r = _resourceBase.addPath(pathInContext); if (!_aliases && r.getAlias()!=null) { if (r.exists()) Log.warn("Aliased resource: "+r+"=="+r.getAlias()); return null; } if (Log.isDebugEnabled()) Log.debug("RESOURCE="+r); } catch (IOException e) { Log.ignore(e); } return r; } /* ------------------------------------------------------------ */ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String servletPath=null; String pathInfo=null; Enumeration reqRanges = null; Boolean included =(Boolean)request.getAttribute(Dispatcher.__INCLUDE_JETTY); if (included!=null && included.booleanValue()) { servletPath=(String)request.getAttribute(Dispatcher.__INCLUDE_SERVLET_PATH); pathInfo=(String)request.getAttribute(Dispatcher.__INCLUDE_PATH_INFO); if (servletPath==null) { servletPath=request.getServletPath(); pathInfo=request.getPathInfo(); } } else { included=Boolean.FALSE; servletPath=request.getServletPath(); pathInfo=request.getPathInfo(); // Is this a range request? reqRanges = request.getHeaders(HttpHeaders.RANGE); if (reqRanges!=null && !reqRanges.hasMoreElements()) reqRanges=null; } String pathInContext=URIUtil.addPaths(servletPath,pathInfo); boolean endsWithSlash=pathInContext.endsWith(URIUtil.SLASH); // Can we gzip this request? String pathInContextGz=null; boolean gzip=false; if (!included.booleanValue() && _gzip && reqRanges==null && !endsWithSlash ) { String accept=request.getHeader(HttpHeaders.ACCEPT_ENCODING); if (accept!=null && accept.indexOf("gzip")>=0) gzip=true; } // Find the resource and content Resource resource=null; HttpContent content=null; Connector connector = HttpConnection.getCurrentConnection().getConnector(); ResourceCache cache=(connector instanceof NIOConnector) ?_nioCache:_bioCache; try { // Try gzipped content first if (gzip) { pathInContextGz=pathInContext+".gz"; resource=getResource(pathInContextGz); if (resource==null || !resource.exists()|| resource.isDirectory()) { gzip=false; pathInContextGz=null; } else if (cache!=null) { content=cache.lookup(pathInContextGz,resource); if (content!=null) resource=content.getResource(); } if (resource==null || !resource.exists()|| resource.isDirectory()) { gzip=false; pathInContextGz=null; } } // find resource if (!gzip) { if (cache==null) resource=getResource(pathInContext); else { content=cache.lookup(pathInContext,this); if (content!=null) resource=content.getResource(); else resource=getResource(pathInContext); } } if (Log.isDebugEnabled()) Log.debug("resource="+resource+(content!=null?" content":"")); // Handle resource if (resource==null || !resource.exists()) response.sendError(HttpServletResponse.SC_NOT_FOUND); else if (!resource.isDirectory()) { if (endsWithSlash && _aliases && pathInContext.length()>1) { String q=request.getQueryString(); pathInContext=pathInContext.substring(0,pathInContext.length()-1); if (q!=null&&q.length()!=0) pathInContext+="?"+q; response.sendRedirect(response.encodeRedirectURL(URIUtil.addPaths( _context.getContextPath(),pathInContext))); } else { // ensure we have content if (content==null) content=new UnCachedContent(resource); if (included.booleanValue() || passConditionalHeaders(request,response, resource,content)) { if (gzip) { response.setHeader(HttpHeaders.CONTENT_ENCODING,"gzip"); String mt=_context.getMimeType(pathInContext); if (mt!=null) response.setContentType(mt); } sendData(request,response,included.booleanValue(),resource,content,reqRanges); } } } else { String welcome=null; if (!endsWithSlash || (pathInContext.length()==1 && request.getAttribute("org.mortbay.jetty.nullPathInfo")!=null)) { StringBuffer buf=request.getRequestURL(); int param=buf.lastIndexOf(";"); if (param<0) buf.append('/'); else buf.insert(param,'/'); String q=request.getQueryString(); if (q!=null&&q.length()!=0) { buf.append('?'); buf.append(q); } response.setContentLength(0); response.sendRedirect(response.encodeRedirectURL(buf.toString())); } // else look for a welcome file else if (null!=(welcome=getWelcomeFile(pathInContext))) { if (_redirectWelcome) { // Redirect to the index response.setContentLength(0); String q=request.getQueryString(); if (q!=null&&q.length()!=0) response.sendRedirect(response.encodeRedirectURL(URIUtil.addPaths( _context.getContextPath(),welcome)+"?"+q)); else response.sendRedirect(response.encodeRedirectURL(URIUtil.addPaths( _context.getContextPath(),welcome))); } else { // Forward to the index RequestDispatcher dispatcher=request.getRequestDispatcher(welcome); if (dispatcher!=null) { if (included.booleanValue()) dispatcher.include(request,response); else { request.setAttribute("org.mortbay.jetty.welcome",welcome); dispatcher.forward(request,response); } } } } else { content=new UnCachedContent(resource); if (included.booleanValue() || passConditionalHeaders(request,response, resource,content)) sendDirectory(request,response,resource,pathInContext.length()>1); } } } catch(IllegalArgumentException e) { Log.warn(Log.EXCEPTION,e); if(!response.isCommitted()) response.sendError(500, e.getMessage()); } finally { if (content!=null) content.release(); else if (resource!=null) resource.release(); } } /* ------------------------------------------------------------ */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { doGet(request,response); } /* ------------------------------------------------------------ */ /* (non-Javadoc) * @see javax.servlet.http.HttpServlet#doTrace(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) */ protected void doTrace(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { resp.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); } /* ------------------------------------------------------------ */ /** * Finds a matching welcome file for the supplied {@link Resource}. This will be the first entry in the list of * configured {@link #_welcomes welcome files} that existing within the directory referenced by the <code>Resource</code>. * If the resource is not a directory, or no matching file is found, then it may look for a valid servlet mapping. * If there is none, then <code>null</code> is returned. * The list of welcome files is read from the {@link ContextHandler} for this servlet, or * <code>"index.jsp" , "index.html"</code> if that is <code>null</code>. * @param pathInContext The path in Context in which we are looking for a welcome * @return The path of the matching welcome file in context * @throws IOException * @throws MalformedURLException */ private String getWelcomeFile(String pathInContext) throws MalformedURLException, IOException { if (_welcomes==null) return null; String welcome_servlet=null; for (int i=0;i<_welcomes.length;i++) { String welcome_in_context=URIUtil.addPaths(pathInContext,_welcomes[i]); Resource welcome=getResource(welcome_in_context); if (welcome!=null && welcome.exists()) return _welcomes[i]; if (_welcomeServlets && welcome_servlet==null) { Map.Entry entry=_servletHandler.getHolderEntry(welcome_in_context); if (entry!=null && entry.getValue()!=_defaultHolder) welcome_servlet=welcome_in_context; } } return welcome_servlet; } /* ------------------------------------------------------------ */ /* Check modification date headers. */ protected boolean passConditionalHeaders(HttpServletRequest request,HttpServletResponse response, Resource resource, HttpContent content) throws IOException { try { if (!request.getMethod().equals(HttpMethods.HEAD) ) { String ifms=request.getHeader(HttpHeaders.IF_MODIFIED_SINCE); if (ifms!=null) { if (content!=null) { Buffer mdlm=content.getLastModified(); if (mdlm!=null) { if (ifms.equals(mdlm.toString())) { response.reset(); response.setStatus(HttpServletResponse.SC_NOT_MODIFIED); response.flushBuffer(); return false; } } } long ifmsl=request.getDateHeader(HttpHeaders.IF_MODIFIED_SINCE); if (ifmsl!=-1) { if (resource.lastModified()/1000 <= ifmsl/1000) { response.reset(); response.setStatus(HttpServletResponse.SC_NOT_MODIFIED); response.flushBuffer(); return false; } } } // Parse the if[un]modified dates and compare to resource long date=request.getDateHeader(HttpHeaders.IF_UNMODIFIED_SINCE); if (date!=-1) { if (resource.lastModified()/1000 > date/1000) { response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED); return false; } } } } catch(IllegalArgumentException iae) { if(!response.isCommitted()) response.sendError(400, iae.getMessage()); throw iae; } return true; } /* ------------------------------------------------------------------- */ protected void sendDirectory(HttpServletRequest request, HttpServletResponse response, Resource resource, boolean parent) throws IOException { if (!_dirAllowed) { response.sendError(HttpServletResponse.SC_FORBIDDEN); return; } byte[] data=null; String base = URIUtil.addPaths(request.getRequestURI(),URIUtil.SLASH); String dir = resource.getListHTML(base,parent); if (dir==null) { response.sendError(HttpServletResponse.SC_FORBIDDEN, "No directory"); return; } data=dir.getBytes("UTF-8"); response.setContentType("text/html; charset=UTF-8"); response.setContentLength(data.length); response.getOutputStream().write(data); } /* ------------------------------------------------------------ */ protected void sendData(HttpServletRequest request, HttpServletResponse response, boolean include, Resource resource, HttpContent content, Enumeration reqRanges) throws IOException { long content_length=resource.length(); // Get the output stream (or writer) OutputStream out =null; try{out = response.getOutputStream();} catch(IllegalStateException e) {out = new WriterOutputStream(response.getWriter());} if ( reqRanges == null || !reqRanges.hasMoreElements()) { // if there were no ranges, send entire entity if (include) { resource.writeTo(out,0,content_length); } else { // See if a direct methods can be used? if (out instanceof HttpConnection.Output) { if (response instanceof Response) { writeOptionHeaders(((Response)response).getHttpFields()); ((HttpConnection.Output)out).sendContent(content); } else if (content.getBuffer()!=null) { writeHeaders(response,content,content_length); ((HttpConnection.Output)out).sendContent(content.getBuffer()); } else { writeHeaders(response,content,content_length); resource.writeTo(out,0,content_length); } } else { // Write content normally writeHeaders(response,content,content_length); resource.writeTo(out,0,content_length); } } } else { // Parse the satisfiable ranges List ranges =InclusiveByteRange.satisfiableRanges(reqRanges,content_length); // if there are no satisfiable ranges, send 416 response if (ranges==null || ranges.size()==0) { writeHeaders(response, content, content_length); response.setStatus(HttpServletResponse.SC_REQUESTED_RANGE_NOT_SATISFIABLE); response.setHeader(HttpHeaders.CONTENT_RANGE, InclusiveByteRange.to416HeaderRangeString(content_length)); resource.writeTo(out,0,content_length); return; } // if there is only a single valid range (must be satisfiable // since were here now), send that range with a 216 response if ( ranges.size()== 1) { InclusiveByteRange singleSatisfiableRange = (InclusiveByteRange)ranges.get(0); long singleLength = singleSatisfiableRange.getSize(content_length); writeHeaders(response,content,singleLength ); response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); response.setHeader(HttpHeaders.CONTENT_RANGE, singleSatisfiableRange.toHeaderRangeString(content_length)); resource.writeTo(out,singleSatisfiableRange.getFirst(content_length),singleLength); return; } // multiple non-overlapping valid ranges cause a multipart // 216 response which does not require an overall // content-length header // writeHeaders(response,content,-1); String mimetype=content.getContentType().toString(); MultiPartOutputStream multi = new MultiPartOutputStream(out); response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT); // If the request has a "Request-Range" header then we need to // send an old style multipart/x-byteranges Content-Type. This // keeps Netscape and acrobat happy. This is what Apache does. String ctp; if (request.getHeader(HttpHeaders.REQUEST_RANGE)!=null) ctp = "multipart/x-byteranges; boundary="; else ctp = "multipart/byteranges; boundary="; response.setContentType(ctp+multi.getBoundary()); InputStream in=resource.getInputStream(); long pos=0; // calculate the content-length int length=0; String[] header = new String[ranges.size()]; for (int i=0;i<ranges.size();i++) { InclusiveByteRange ibr = (InclusiveByteRange) ranges.get(i); header[i]=ibr.toHeaderRangeString(content_length); length+= ((i>0)?2:0)+ 2+multi.getBoundary().length()+2+ HttpHeaders.CONTENT_TYPE.length()+2+mimetype.length()+2+ HttpHeaders.CONTENT_RANGE.length()+2+header[i].length()+2+ 2+ (ibr.getLast(content_length)-ibr.getFirst(content_length))+1; } length+=2+2+multi.getBoundary().length()+2+2; response.setContentLength(length); for (int i=0;i<ranges.size();i++) { InclusiveByteRange ibr = (InclusiveByteRange) ranges.get(i); multi.startPart(mimetype,new String[]{HttpHeaders.CONTENT_RANGE+": "+header[i]}); long start=ibr.getFirst(content_length); long size=ibr.getSize(content_length); if (in!=null) { // Handle non cached resource if (start<pos) { in.close(); in=resource.getInputStream(); pos=0; } if (pos<start) { in.skip(start-pos); pos=start; } IO.copy(in,multi,size); pos+=size; } else // Handle cached resource (resource).writeTo(multi,start,size); } if (in!=null) in.close(); multi.close(); } return; } /* ------------------------------------------------------------ */ protected void writeHeaders(HttpServletResponse response,HttpContent content,long count) throws IOException { if (content.getContentType()!=null && response.getContentType()==null) response.setContentType(content.getContentType().toString()); if (response instanceof Response) { Response r=(Response)response; HttpFields fields = r.getHttpFields(); if (content.getLastModified()!=null) fields.put(HttpHeaders.LAST_MODIFIED_BUFFER,content.getLastModified(),content.getResource().lastModified()); else if (content.getResource()!=null) { long lml=content.getResource().lastModified(); if (lml!=-1) fields.putDateField(HttpHeaders.LAST_MODIFIED_BUFFER,lml); } if (count != -1) r.setLongContentLength(count); writeOptionHeaders(fields); } else { long lml=content.getResource().lastModified(); if (lml>=0) response.setDateHeader(HttpHeaders.LAST_MODIFIED,lml); if (count != -1) { if (count<Integer.MAX_VALUE) response.setContentLength((int)count); else response.setHeader(HttpHeaders.CONTENT_LENGTH,TypeUtil.toString(count)); } writeOptionHeaders(response); } } /* ------------------------------------------------------------ */ protected void writeOptionHeaders(HttpFields fields) throws IOException { if (_acceptRanges) fields.put(HttpHeaders.ACCEPT_RANGES_BUFFER,HttpHeaderValues.BYTES_BUFFER); if (_cacheControl!=null) fields.put(HttpHeaders.CACHE_CONTROL_BUFFER,_cacheControl); } /* ------------------------------------------------------------ */ protected void writeOptionHeaders(HttpServletResponse response) throws IOException { if (_acceptRanges) response.setHeader(HttpHeaders.ACCEPT_RANGES,"bytes"); if (_cacheControl!=null) response.setHeader(HttpHeaders.CACHE_CONTROL,_cacheControl.toString()); } /* ------------------------------------------------------------ */ /* * @see javax.servlet.Servlet#destroy() */ public void destroy() { try { if (_nioCache!=null) _nioCache.stop(); if (_bioCache!=null) _bioCache.stop(); } catch(Exception e) { Log.warn(Log.EXCEPTION,e); } finally { super.destroy(); } } /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ private class UnCachedContent implements HttpContent { Resource _resource; UnCachedContent(Resource resource) { _resource=resource; } /* ------------------------------------------------------------ */ public Buffer getContentType() { return _mimeTypes.getMimeByExtension(_resource.toString()); } /* ------------------------------------------------------------ */ public Buffer getLastModified() { return null; } /* ------------------------------------------------------------ */ public Buffer getBuffer() { return null; } /* ------------------------------------------------------------ */ public long getContentLength() { return _resource.length(); } /* ------------------------------------------------------------ */ public InputStream getInputStream() throws IOException { return _resource.getInputStream(); } /* ------------------------------------------------------------ */ public Resource getResource() { return _resource; } /* ------------------------------------------------------------ */ public void release() { _resource.release(); _resource=null; } } /* ------------------------------------------------------------ */ /* ------------------------------------------------------------ */ class NIOResourceCache extends ResourceCache { /* ------------------------------------------------------------ */ public NIOResourceCache(MimeTypes mimeTypes) { super(mimeTypes); } /* ------------------------------------------------------------ */ protected void fill(Content content) throws IOException { Buffer buffer=null; Resource resource=content.getResource(); long length=resource.length(); if (_useFileMappedBuffer && resource.getFile()!=null) { buffer = new DirectNIOBuffer(resource.getFile()); } else { InputStream is = resource.getInputStream(); try { Connector connector = HttpConnection.getCurrentConnection().getConnector(); buffer = ((NIOConnector)connector).getUseDirectBuffers()? (NIOBuffer)new DirectNIOBuffer((int)length): (NIOBuffer)new IndirectNIOBuffer((int)length); } catch(OutOfMemoryError e) { Log.warn(e.toString()); Log.debug(e); buffer = new IndirectNIOBuffer((int) length); } buffer.readFrom(is,(int)length); is.close(); } content.setBuffer(buffer); } } }
/* * Copyright (c) 2005 - 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed * under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.wso2.carbon.event.receiver.core.internal; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.log4j.Logger; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.databridge.commons.StreamDefinition; import org.wso2.carbon.event.input.adapter.core.InputEventAdapterSubscription; import org.wso2.carbon.event.input.adapter.core.exception.InputEventAdapterException; import org.wso2.carbon.event.input.adapter.core.exception.InputEventAdapterRuntimeException; import org.wso2.carbon.event.processor.manager.core.EventManagementUtil; import org.wso2.carbon.event.processor.manager.core.EventSync; import org.wso2.carbon.event.processor.manager.core.Manager; import org.wso2.carbon.event.processor.manager.core.config.DistributedConfiguration; import org.wso2.carbon.event.processor.manager.core.config.HAConfiguration; import org.wso2.carbon.event.processor.manager.core.config.Mode; import org.wso2.carbon.event.receiver.core.InputMapper; import org.wso2.carbon.event.receiver.core.config.EventReceiverConfiguration; import org.wso2.carbon.event.receiver.core.config.EventReceiverConstants; import org.wso2.carbon.event.receiver.core.exception.EventReceiverConfigurationException; import org.wso2.carbon.event.receiver.core.exception.EventReceiverProcessingException; import org.wso2.carbon.event.receiver.core.internal.ds.EventReceiverServiceValueHolder; import org.wso2.carbon.event.receiver.core.internal.management.AbstractInputEventDispatcher; import org.wso2.carbon.event.receiver.core.internal.management.InputEventDispatcher; import org.wso2.carbon.event.receiver.core.internal.management.QueueInputEventDispatcher; import org.wso2.carbon.event.receiver.core.internal.util.EventReceiverUtil; import org.wso2.carbon.event.receiver.core.internal.util.helper.EventReceiverConfigurationHelper; import org.wso2.carbon.event.statistics.EventStatisticsMonitor; import org.wso2.carbon.event.stream.core.EventProducer; import org.wso2.carbon.event.stream.core.EventProducerCallback; import org.wso2.siddhi.core.event.Event; import java.util.List; import java.util.concurrent.locks.Lock; public class EventReceiver implements EventProducer { private static final Log log = LogFactory.getLog(EventReceiver.class); private boolean isEventDuplicatedInCluster; private boolean traceEnabled = false; private boolean statisticsEnabled = false; private boolean customMappingEnabled = false; private boolean isWorkerNode = false; private boolean sufficientToSend = false; private Logger trace = Logger.getLogger(EventReceiverConstants.EVENT_TRACE_LOGGER); private EventReceiverConfiguration eventReceiverConfiguration = null; private StreamDefinition exportedStreamDefinition; private InputMapper inputMapper = null; private EventStatisticsMonitor statisticsMonitor; private String beforeTracerPrefix; private String afterTracerPrefix; private AbstractInputEventDispatcher inputEventDispatcher; private Mode mode; public EventReceiver(EventReceiverConfiguration eventReceiverConfiguration, StreamDefinition exportedStreamDefinition, Mode mode) throws EventReceiverConfigurationException { this.eventReceiverConfiguration = eventReceiverConfiguration; int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); if (this.eventReceiverConfiguration != null) { this.traceEnabled = eventReceiverConfiguration.isTraceEnabled(); this.statisticsEnabled = eventReceiverConfiguration.isStatisticsEnabled(); this.customMappingEnabled = eventReceiverConfiguration.getInputMapping().isCustomMappingEnabled(); String mappingType = this.eventReceiverConfiguration.getInputMapping().getMappingType(); this.inputMapper = EventReceiverServiceValueHolder.getMappingFactoryMap().get(mappingType).constructInputMapper(this.eventReceiverConfiguration, exportedStreamDefinition); // The input mapper should not be null. For configurations where custom mapping is disabled, // an input mapper would be created without the mapping details if (this.inputMapper != null) { if (customMappingEnabled) { EventReceiverConfigurationHelper.validateExportedStream(eventReceiverConfiguration, exportedStreamDefinition, this.inputMapper); } this.exportedStreamDefinition = exportedStreamDefinition; } else { throw new EventReceiverConfigurationException("Could not create input mapper for mapping type " + mappingType + " for event receiver :" + eventReceiverConfiguration.getEventReceiverName()); } // Initialize tracer and statistics. if (statisticsEnabled) { this.statisticsMonitor = EventReceiverServiceValueHolder.getEventStatisticsService().getEventStatisticMonitor( tenantId, EventReceiverConstants.EVENT_RECEIVER, eventReceiverConfiguration.getEventReceiverName(), null); } if (traceEnabled) { this.beforeTracerPrefix = "TenantId : " + tenantId + ", " + EventReceiverConstants.EVENT_RECEIVER + " : " + eventReceiverConfiguration.getEventReceiverName() + ", before processing " + System.getProperty("line.separator"); this.afterTracerPrefix = "TenantId : " + tenantId + ", " + EventReceiverConstants.EVENT_RECEIVER + " : " + eventReceiverConfiguration.getEventReceiverName() + ", " + EventReceiverConstants.EVENT_STREAM + " : " + EventReceiverUtil.getExportedStreamIdFrom(eventReceiverConfiguration) + ", after processing " + System.getProperty("line.separator"); } String inputEventAdapterName = eventReceiverConfiguration.getFromAdapterConfiguration().getName(); try { InputEventAdapterSubscription inputEventAdapterSubscription; if (this.customMappingEnabled) { inputEventAdapterSubscription = new MappedEventSubscription(); } else { inputEventAdapterSubscription = new TypedEventSubscription(); } EventReceiverServiceValueHolder.getInputEventAdapterService().create( eventReceiverConfiguration.getFromAdapterConfiguration(), inputEventAdapterSubscription); isEventDuplicatedInCluster = EventReceiverServiceValueHolder.getInputEventAdapterService().isEventDuplicatedInCluster(eventReceiverConfiguration.getFromAdapterConfiguration().getName()); DistributedConfiguration distributedConfiguration = EventReceiverServiceValueHolder.getEventManagementService().getManagementModeInfo().getDistributedConfiguration(); if(distributedConfiguration != null){ this.isWorkerNode = distributedConfiguration.isWorkerNode(); } sufficientToSend = mode != Mode.Distributed || (isWorkerNode && !isEventDuplicatedInCluster); } catch (InputEventAdapterException e) { throw new EventReceiverConfigurationException("Cannot subscribe to input event adapter :" + inputEventAdapterName + ", error in configuration. " + e.getMessage(), e); } catch (InputEventAdapterRuntimeException e) { throw new EventReceiverProcessingException("Cannot subscribe to input event adapter :" + inputEventAdapterName + ", error while connecting by adapter. " + e.getMessage(), e); } this.mode = mode; if (mode == Mode.HA) { HAConfiguration haConfiguration = EventReceiverServiceValueHolder.getEventManagementService().getManagementModeInfo().getHaConfiguration(); Lock readLock = EventReceiverServiceValueHolder.getCarbonEventReceiverManagementService().getReadLock(); inputEventDispatcher = new QueueInputEventDispatcher(tenantId, EventManagementUtil.constructEventSyncId(tenantId, eventReceiverConfiguration.getEventReceiverName(), Manager.ManagerType.Receiver), readLock, exportedStreamDefinition, haConfiguration.getEventSyncReceiverMaxQueueSizeInMb(), haConfiguration.getEventSyncReceiverQueueSize()); inputEventDispatcher.setSendToOther(!isEventDuplicatedInCluster); EventReceiverServiceValueHolder.getEventManagementService().registerEventSync((EventSync) inputEventDispatcher, Manager.ManagerType.Receiver); } else { inputEventDispatcher = new InputEventDispatcher(); } if (mode == Mode.HA && isEventDuplicatedInCluster) { EventReceiverServiceValueHolder.getInputEventAdapterService().start(inputEventAdapterName); } } } public int getTenantId() { return PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); } /** * Returns the stream definition that is exported by this event receiver. * This stream definition will available to any object that consumes the event receiver service * (e.g. EventProcessors) * * @return the {@link StreamDefinition} of the stream that will be * sending out events from this event receiver */ public StreamDefinition getExportedStreamDefinition() { return exportedStreamDefinition; } /** * Returns the event receiver configuration associated with this event receiver * * @return the {@link EventReceiverConfiguration} instance */ public EventReceiverConfiguration getEventReceiverConfiguration() { return this.eventReceiverConfiguration; } protected void processMappedEvent(Object object) { if (traceEnabled) { trace.info(beforeTracerPrefix + object.toString()); } if (object instanceof List) { for (Object obj : (List) object) { try { processMappedEvent(obj); } catch (EventReceiverProcessingException e) { log.error("Dropping event. Error processing event : ", e); } } } else { try { Object convertedEvent = this.inputMapper.convertToMappedInputEvent(object); if (convertedEvent != null) { if (convertedEvent instanceof Event[]) { Event[] arrayOfEvents = (Event[]) convertedEvent; for (Event event : arrayOfEvents) { if (event != null) { sendEvent(event); } } } else { sendEvent((Event) convertedEvent); } } else { log.warn("Dropping the empty/null event, Event does not match with mapping"); } } catch (EventReceiverProcessingException e) { log.error("Dropping event. Error processing event : ", e); } catch (RuntimeException e) { log.error("Dropping event. Unexpected error while processing event : " + e.getMessage(), e); } } } protected void processTypedEvent(Object obj) { if (traceEnabled) { trace.info(beforeTracerPrefix + obj.toString()); } if (obj instanceof List) { for (Object object : (List) obj) { try { processTypedEvent(object); } catch (EventReceiverProcessingException e) { log.error("Dropping event. Error processing event: " + e.getMessage(), e); } } } else { try { Object convertedEvent = this.inputMapper.convertToTypedInputEvent(obj); if (convertedEvent != null) { if (convertedEvent instanceof Event[]) { Event[] arrayOfEvents = (Event[]) convertedEvent; for (Event event : arrayOfEvents) { if (event != null) { sendEvent(event); } } } else { sendEvent((Event) convertedEvent); } } } catch (EventReceiverProcessingException e) { log.error("Dropping event. Error processing event: " + e.getMessage(), e); } } } protected void sendEvent(Event event) { if (traceEnabled) { trace.info(afterTracerPrefix + event); } if (statisticsEnabled) { statisticsMonitor.incrementRequest(); } //in distributed mode if events are duplicated in cluster, send event only if the node is receiver coordinator. Also do not send if this is a manager node. if (sufficientToSend || EventReceiverServiceValueHolder.getCarbonEventReceiverManagementService().isReceiverCoordinator()) { this.inputEventDispatcher.onEvent(event); } } public AbstractInputEventDispatcher getInputEventDispatcher() { return inputEventDispatcher; } protected void defineEventStream(Object definition) throws EventReceiverConfigurationException { if (log.isDebugEnabled()) { log.debug("EventReceiver: " + eventReceiverConfiguration.getEventReceiverName() + ", notifying event definition addition :" + definition.toString()); } if (definition instanceof StreamDefinition) { StreamDefinition inputStreamDefinition = (StreamDefinition) definition; String mappingType = eventReceiverConfiguration.getInputMapping().getMappingType(); this.inputMapper = EventReceiverServiceValueHolder.getMappingFactoryMap().get(mappingType).constructInputMapper(eventReceiverConfiguration, exportedStreamDefinition); } } protected void removeEventStream(Object definition) { if (log.isDebugEnabled()) { log.debug("EventReceiver: " + eventReceiverConfiguration.getEventReceiverName() + ", notifying event definition addition :" + definition.toString()); } this.inputMapper = null; } @Override public String getStreamId() { return exportedStreamDefinition.getStreamId(); } @Override public void setCallBack(EventProducerCallback callBack) { this.inputEventDispatcher.setCallBack(callBack); } public void destroy() { EventReceiverServiceValueHolder.getInputEventAdapterService().destroy(eventReceiverConfiguration.getFromAdapterConfiguration().getName()); if (mode == Mode.HA && inputEventDispatcher instanceof EventSync) { EventReceiverServiceValueHolder.getEventManagementService().unregisterEventSync(((EventSync) inputEventDispatcher).getStreamDefinition().getId(), Manager.ManagerType.Receiver); } } public boolean isEventDuplicatedInCluster() { return isEventDuplicatedInCluster; } private class MappedEventSubscription implements InputEventAdapterSubscription { @Override public void onEvent(Object o) { processMappedEvent(o); } } private class TypedEventSubscription implements InputEventAdapterSubscription { @Override public void onEvent(Object o) { processTypedEvent(o); } } }
/** * Copyright (c) 2007-2013 Alysson Bessani, Eduardo Alchieri, Paulo Sousa, and the authors indicated in the @author tags * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package bftsmart.statemanagement.durability; import java.io.IOException; import java.io.ObjectInputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.net.UnknownHostException; import java.util.Arrays; import java.util.Queue; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.locks.ReentrantLock; import bftsmart.consensus.messages.ConsensusMessage; import bftsmart.reconfiguration.views.View; import bftsmart.statemanagement.ApplicationState; import bftsmart.statemanagement.SMMessage; import bftsmart.statemanagement.StateManager; import bftsmart.tom.server.defaultservices.CommandsInfo; import bftsmart.tom.server.durability.DurabilityCoordinator; import bftsmart.tom.util.TOMUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DurableStateManager extends StateManager { private Logger logger = LoggerFactory.getLogger(this.getClass()); private ReentrantLock lockTimer = new ReentrantLock(); private Timer stateTimer = null; private final static long INIT_TIMEOUT = 40000; private long timeout = INIT_TIMEOUT; private CSTRequestF1 cstRequest; private CSTState stateCkp; private CSTState stateLower; private CSTState stateUpper; private Thread stateThread = null; @Override protected void requestState() { if (tomLayer.requestsTimer != null) { tomLayer.requestsTimer.clearAll(); } int myProcessId = SVController.getStaticConf().getProcessId(); int[] otherProcesses = SVController.getCurrentViewOtherAcceptors(); int globalCkpPeriod = SVController.getStaticConf() .getGlobalCheckpointPeriod(); CSTRequestF1 cst = new CSTRequestF1(waitingCID); cst.defineReplicas(otherProcesses, globalCkpPeriod, myProcessId); this.cstRequest = cst; CSTSMMessage cstMsg = new CSTSMMessage(myProcessId, waitingCID, TOMUtil.SM_REQUEST, cst, null, null, -1, -1); tomLayer.getCommunication().send( SVController.getCurrentViewOtherAcceptors(), cstMsg); logger.info("I just sent a request to the other replicas for the state up to CID " + waitingCID); TimerTask stateTask = new TimerTask() { public void run() { logger.info("Timeout to retrieve state"); CSTSMMessage msg = new CSTSMMessage(-1, waitingCID,TOMUtil.TRIGGER_SM_LOCALLY, null, null, null, -1, -1); triggerTimeout(msg); } }; stateTimer = new Timer("state timer"); timeout = timeout * 2; stateTimer.schedule(stateTask, timeout); } @Override public void stateTimeout() { lockTimer.lock(); logger.debug("(StateManager.stateTimeout) Timeout for the replica that was supposed to send the complete state. Changing desired replica."); if (stateTimer != null) { stateTimer.cancel(); } reset(); requestState(); lockTimer.unlock(); } @Override public void SMRequestDeliver(SMMessage msg, boolean isBFT) { logger.debug("Invoked method"); if (SVController.getStaticConf().isStateTransferEnabled() && dt.getRecoverer() != null) { logger.debug("The state transfer protocol is enabled"); logger.debug("I received a state request for CID " + msg.getCID() + " from replica " + msg.getSender()); CSTSMMessage cstMsg = (CSTSMMessage) msg; CSTRequestF1 cstConfig = cstMsg.getCstConfig(); boolean sendState = cstConfig.getCheckpointReplica() == SVController .getStaticConf().getProcessId(); if (sendState) { logger.debug("I should be the one sending the state"); } logger.info("State asked by replica " + msg.getSender()); int[] targets = {msg.getSender()}; InetSocketAddress address = SVController.getCurrentView().getAddress( SVController.getStaticConf().getProcessId()); String myIp = address.getHostName(); int myId = SVController.getStaticConf().getProcessId(); int port = 4444 + myId; address = new InetSocketAddress(myIp, port); cstConfig.setAddress(address); CSTSMMessage reply = new CSTSMMessage(myId, msg.getCID(), TOMUtil.SM_REPLY, cstConfig, null, SVController.getCurrentView(), tomLayer.getSynchronizer().getLCManager().getLastReg(), tomLayer.execManager.getCurrentLeader()); tomLayer.getCommunication().send(targets, reply); if (stateThread == null) { StateSenderServer stateServer = new StateSenderServer(port); stateServer.setRecoverable(dt.getRecoverer()); stateServer.setRequest(cstConfig); stateThread = new Thread(stateServer); stateThread.start(); } } } @Override public void SMReplyDeliver(SMMessage msg, boolean isBFT) { lockTimer.lock(); CSTSMMessage reply = (CSTSMMessage) msg; if (SVController.getStaticConf().isStateTransferEnabled()) { logger.debug("The state transfer protocol is enabled"); logger.info("I received a state reply for CID " + reply.getCID() + " from replica " + reply.getSender()); logger.info("Received CID: " + reply.getCID() + ". Waiting " + waitingCID); if (waitingCID != -1 && reply.getCID() == waitingCID) { int currentRegency = -1; int currentLeader = -1; View currentView = null; // CertifiedDecision currentProof = null; if (!appStateOnly) { senderRegencies.put(reply.getSender(), reply.getRegency()); senderLeaders.put(reply.getSender(), reply.getLeader()); senderViews.put(reply.getSender(), reply.getView()); // senderProofs.put(msg.getSender(), msg.getState().getCertifiedDecision(SVController)); if (enoughRegencies(reply.getRegency())) { currentRegency = reply.getRegency(); } if (enoughLeaders(reply.getLeader())) { currentLeader = reply.getLeader(); } if (enoughViews(reply.getView())) { currentView = reply.getView(); if (!currentView.isMember(SVController.getStaticConf() .getProcessId())) { logger.warn("Not a member!"); } } // if (enoughProofs(waitingCID, this.tomLayer.getSynchronizer().getLCManager())) currentProof = msg.getState().getCertifiedDecision(SVController); } else { currentLeader = tomLayer.execManager.getCurrentLeader(); currentRegency = tomLayer.getSynchronizer().getLCManager().getLastReg(); currentView = SVController.getCurrentView(); } logger.debug("The reply is for the CID that I want!"); InetSocketAddress address = reply.getCstConfig().getAddress(); Socket clientSocket; ApplicationState stateReceived = null; try { clientSocket = new Socket(address.getHostName(), address.getPort()); ObjectInputStream in = new ObjectInputStream( clientSocket.getInputStream()); stateReceived = (ApplicationState) in.readObject(); } catch (UnknownHostException e) { // TODO Auto-generated catch block logger.error("Failed to connect to address", e); } catch (IOException e) { // TODO Auto-generated catch block logger.error("Failed to connect to address", e); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block logger.error("Failed to deserialize application state object", e); } if (stateReceived instanceof CSTState) { senderStates.put(reply.getSender(), stateReceived); if (reply.getSender() == cstRequest.getCheckpointReplica()) { this.stateCkp = (CSTState) stateReceived; } if (reply.getSender() == cstRequest.getLogLower()) { this.stateLower = (CSTState) stateReceived; } if (reply.getSender() == cstRequest.getLogUpper()) { this.stateUpper = (CSTState) stateReceived; } } if (senderStates.size() == 3) { CommandsInfo[] lowerLog = stateLower.getLogLower(); CommandsInfo[] upperLog = stateUpper.getLogUpper(); logger.info("lowerLog "); if (lowerLog != null) { logger.info("\t" + lowerLog.length); } logger.info("upperLog "); if (upperLog != null) { logger.info("\t" + upperLog.length); } boolean haveState = false; byte[] lowerbytes = TOMUtil.getBytes(lowerLog); logger.debug("Log lower bytes size: " + lowerbytes.length); byte[] upperbytes = TOMUtil.getBytes(upperLog); logger.debug("Log upper bytes size: " + upperbytes.length); byte[] lowerLogHash = TOMUtil.computeHash(lowerbytes); byte[] upperLogHash = TOMUtil.computeHash(upperbytes); // validate lower log if (Arrays.equals(stateCkp.getHashLogLower(), lowerLogHash)) { haveState = true; } else { logger.error("Lower log does not match"); } // validate upper log if (!haveState || !Arrays.equals(stateCkp.getHashLogUpper(), upperLogHash)) { haveState = false; logger.error("Upper log does not match"); } CSTState statePlusLower = new CSTState(stateCkp.getSerializedState(), TOMUtil.getBytes(stateCkp.getSerializedState()), stateLower.getLogLower(), stateCkp.getHashLogLower(), null, null, stateCkp.getCheckpointCID(), stateUpper.getCheckpointCID(), SVController.getStaticConf().getProcessId()); if (haveState) { // validate checkpoint logger.info("validating checkpoint!!!"); dt.getRecoverer().setState(statePlusLower); byte[] currentStateHash = ((DurabilityCoordinator) dt.getRecoverer()).getCurrentStateHash(); if (!Arrays.equals(currentStateHash, stateUpper.getHashCheckpoint())) { logger.error("ckp hash don't match"); haveState = false; } } logger.info("Current regency: " + currentRegency); logger.info("Current leader: " + currentLeader); logger.info("Current view: " + currentView); if (currentRegency > -1 && currentLeader > -1 && currentView != null && haveState && (!isBFT || /*currentProof != null ||*/ appStateOnly)) { logger.info("---- RECEIVED VALID STATE ----"); logger.debug("The state of those replies is good!"); logger.debug("CID State requested: " + reply.getCID()); logger.debug("CID State received: " + stateUpper.getLastCID()); tomLayer.getSynchronizer().getLCManager().setLastReg(currentRegency); tomLayer.getSynchronizer().getLCManager().setNextReg(currentRegency); tomLayer.getSynchronizer().getLCManager().setNewLeader(currentLeader); tomLayer.execManager.setNewLeader(currentLeader); // if (currentProof != null && !appStateOnly) { // // System.out.println("Installing proof for consensus " + waitingCID); // // Consensus cons = execManager.getConsensus(waitingCID); // Epoch e = null; // // for (ConsensusMessage cm : currentProof.getConsMessages()) { // // e = cons.getEpoch(cm.getEpoch(), true, SVController); // if (e.getTimestamp() != cm.getEpoch()) { // // System.out.println("Strange... proof contains messages from more than just one epoch"); // e = cons.getEpoch(cm.getEpoch(), true, SVController); // } // e.addToProof(cm); // // if (cm.getType() == MessageFactory.ACCEPT) { // e.setAccept(cm.getSender(), cm.getValue()); // } // // else if (cm.getType() == MessageFactory.WRITE) { // e.setWrite(cm.getSender(), cm.getValue()); // } // // } // // // if (e != null) { // // byte[] hash = tomLayer.computeHash(currentProof.getDecision()); // e.propValueHash = hash; // e.propValue = currentProof.getDecision(); // e.deserializedPropValue = tomLayer.checkProposedValue(currentProof.getDecision(), false); // cons.decided(e, false); // // System.out.println("Successfully installed proof for consensus " + waitingCID); // // } else { // System.out.println("Failed to install proof for consensus " + waitingCID); // // } // // } // I might have timed out before invoking the state transfer, so // stop my re-transmission of STOP messages for all regencies up to the current one if (currentRegency > 0) { tomLayer.getSynchronizer().removeSTOPretransmissions(currentRegency - 1); } logger.debug("trying to acquire deliverlock"); dt.pauseDecisionDelivery(); logger.debug("acquired"); // this makes the isRetrievingState() evaluates to false waitingCID = -1; dt.update(stateUpper); // Deal with stopped messages that may come from // synchronization phase if (!appStateOnly && execManager.stopped()) { Queue<ConsensusMessage> stoppedMsgs = execManager.getStoppedMsgs(); for (ConsensusMessage stopped : stoppedMsgs) { if (stopped.getNumber() > state.getLastCID()) { execManager.addOutOfContextMessage(stopped); } } execManager.clearStopped(); execManager.restart(); } logger.info("Processing out of context messages"); tomLayer.processOutOfContext(); if (SVController.getCurrentViewId() != currentView.getId()) { logger.info("Installing current view!"); SVController.reconfigureTo(currentView); } isInitializing = false; dt.canDeliver(); dt.resumeDecisionDelivery(); reset(); logger.info("I updated the state!"); tomLayer.requestsTimer.Enabled(true); tomLayer.requestsTimer.startTimer(); if (stateTimer != null) { stateTimer.cancel(); } if (appStateOnly) { appStateOnly = false; tomLayer.getSynchronizer().resumeLC(); } } else if (state == null && (SVController.getCurrentViewN() / 2) < getReplies()) { logger.warn("---- DIDNT RECEIVE STATE ----"); logger.debug("I have more than " + (SVController.getCurrentViewN() / 2) + " messages that are no good!"); waitingCID = -1; reset(); if (stateTimer != null) { stateTimer.cancel(); } if (appStateOnly) { requestState(); } } else if (!haveState) { logger.warn("---- RECEIVED INVALID STATE ----"); logger.debug("The replica from which I expected the state, sent one which doesn't match the hash of the others, or it never sent it at all"); reset(); requestState(); if (stateTimer != null) { stateTimer.cancel(); } } } } } lockTimer.unlock(); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.search; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.progress.ProgressIndicatorProvider; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.impl.java.stubs.index.JavaAnonymousClassBaseRefOccurenceIndex; import com.intellij.psi.impl.java.stubs.index.JavaSuperClassNameOccurenceIndex; import com.intellij.psi.search.EverythingGlobalScope; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.SearchScope; import com.intellij.psi.search.searches.AllClassesSearch; import com.intellij.psi.search.searches.DirectClassInheritorsSearch; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.PsiUtilCore; import com.intellij.util.Processor; import com.intellij.util.QueryExecutor; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashMap; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; /** * @author max */ public class JavaDirectInheritorsSearcher implements QueryExecutor<PsiClass, DirectClassInheritorsSearch.SearchParameters> { @Override public boolean execute(@NotNull final DirectClassInheritorsSearch.SearchParameters p, @NotNull final Processor<PsiClass> consumer) { final PsiClass aClass = p.getClassToProcess(); final SearchScope useScope = ApplicationManager.getApplication().runReadAction(new Computable<SearchScope>() { @Override public SearchScope compute() { return aClass.getUseScope(); } }); final String qualifiedName = ApplicationManager.getApplication().runReadAction(new Computable<String>() { @Override public String compute() { return aClass.getQualifiedName(); } }); final Project project = PsiUtilCore.getProjectInReadAction(aClass); if (CommonClassNames.JAVA_LANG_OBJECT.equals(qualifiedName)) { //[pasynkov]: WTF? //final SearchScope scope = useScope.intersectWith(GlobalSearchScope.notScope(GlobalSearchScope.getScopeRestrictedByFileTypes( // GlobalSearchScope.allScope(psiManager.getProject()), StdFileTypes.JSP, StdFileTypes.JSPX))); return AllClassesSearch.search(useScope, project).forEach(new Processor<PsiClass>() { @Override public boolean process(final PsiClass psiClass) { if (psiClass.isInterface()) { return consumer.process(psiClass); } final PsiClass superClass = psiClass.getSuperClass(); if (superClass != null && CommonClassNames.JAVA_LANG_OBJECT.equals(ApplicationManager.getApplication().runReadAction(new Computable<String>() { public String compute() { return superClass.getQualifiedName(); } }))) { return consumer.process(psiClass); } return true; } }); } final GlobalSearchScope scope = useScope instanceof GlobalSearchScope ? (GlobalSearchScope)useScope : new EverythingGlobalScope(project); final String searchKey = ApplicationManager.getApplication().runReadAction(new Computable<String>() { @Override public String compute() { return aClass.getName(); } }); if (StringUtil.isEmpty(searchKey)) { return true; } Collection<PsiReferenceList> candidates = MethodUsagesSearcher.resolveInReadAction(project, new Computable<Collection<PsiReferenceList>>() { @Override public Collection<PsiReferenceList> compute() { return JavaSuperClassNameOccurenceIndex .getInstance().get(searchKey, project, scope); } }); Map<String, List<PsiClass>> classes = new HashMap<String, List<PsiClass>>(); for (final PsiReferenceList referenceList : candidates) { ProgressIndicatorProvider.checkCanceled(); final PsiClass candidate = (PsiClass)ApplicationManager.getApplication().runReadAction(new Computable<PsiElement>() { @Override public PsiElement compute() { return referenceList.getParent(); } }); if (!checkInheritance(p, aClass, candidate, project)) continue; String fqn = ApplicationManager.getApplication().runReadAction(new Computable<String>() { @Override public String compute() { return candidate.getQualifiedName(); } }); List<PsiClass> list = classes.get(fqn); if (list == null) { list = new ArrayList<PsiClass>(); classes.put(fqn, list); } list.add(candidate); } if (!classes.isEmpty()) { final VirtualFile jarFile = getJarFile(aClass); for (List<PsiClass> sameNamedClasses : classes.values()) { if (!processSameNamedClasses(consumer, sameNamedClasses, jarFile)) return false; } } if (p.includeAnonymous()) { Collection<PsiAnonymousClass> anonymousCandidates = MethodUsagesSearcher.resolveInReadAction(project, new Computable<Collection<PsiAnonymousClass>>() { @Override public Collection<PsiAnonymousClass> compute() { return JavaAnonymousClassBaseRefOccurenceIndex .getInstance() .get(searchKey, project, scope); } }); for (PsiAnonymousClass candidate : anonymousCandidates) { ProgressIndicatorProvider.checkCanceled(); if (!checkInheritance(p, aClass, candidate, project)) continue; if (!consumer.process(candidate)) return false; } boolean isEnum = ApplicationManager.getApplication().runReadAction(new Computable<Boolean>() { @Override public Boolean compute() { return aClass.isEnum(); } }); if (isEnum) { // abstract enum can be subclassed in the body PsiField[] fields = ApplicationManager.getApplication().runReadAction(new Computable<PsiField[]>() { @Override public PsiField[] compute() { return aClass.getFields(); } }); for (final PsiField field : fields) { if (field instanceof PsiEnumConstant) { PsiEnumConstantInitializer initializingClass = ApplicationManager.getApplication().runReadAction(new Computable<PsiEnumConstantInitializer>() { @Override public PsiEnumConstantInitializer compute() { return ((PsiEnumConstant)field).getInitializingClass(); } }); if (initializingClass != null) { if (!consumer.process(initializingClass)) return false; } } } } } return true; } private static boolean checkInheritance(final DirectClassInheritorsSearch.SearchParameters p, final PsiClass aClass, final PsiClass candidate, Project project) { return MethodUsagesSearcher.resolveInReadAction(project, new Computable<Boolean>() { @Override public Boolean compute() { return !p.isCheckInheritance() || candidate.isInheritor(aClass, false); } }); } private static boolean processSameNamedClasses(Processor<PsiClass> consumer, List<PsiClass> sameNamedClasses, final VirtualFile jarFile) { // if there is a class from the same jar, prefer it boolean sameJarClassFound = false; if (jarFile != null && sameNamedClasses.size() > 1) { for (PsiClass sameNamedClass : sameNamedClasses) { boolean fromSameJar = Comparing.equal(getJarFile(sameNamedClass), jarFile); if (fromSameJar) { sameJarClassFound = true; if (!consumer.process(sameNamedClass)) return false; } } } return sameJarClassFound || ContainerUtil.process(sameNamedClasses, consumer); } private static VirtualFile getJarFile(final PsiClass aClass) { return ApplicationManager.getApplication().runReadAction(new Computable<VirtualFile>() { @Override public VirtualFile compute() { return PsiUtil.getJarFile(aClass); } }); } }
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.vcs.log.ui.table; import com.intellij.ide.IdeTooltip; import com.intellij.ide.IdeTooltipManager; import com.intellij.openapi.ui.popup.Balloon; import com.intellij.openapi.util.text.HtmlChunk; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.components.panels.Wrapper; import com.intellij.util.text.DateFormatUtil; import com.intellij.vcs.log.CommitId; import com.intellij.vcs.log.VcsLogBundle; import com.intellij.vcs.log.VcsShortCommitDetails; import com.intellij.vcs.log.data.LoadingDetails; import com.intellij.vcs.log.data.VcsLogData; import com.intellij.vcs.log.graph.EdgePrintElement; import com.intellij.vcs.log.graph.NodePrintElement; import com.intellij.vcs.log.graph.PrintElement; import com.intellij.vcs.log.graph.actions.GraphAction; import com.intellij.vcs.log.graph.actions.GraphAnswer; import com.intellij.vcs.log.paint.GraphCellPainter; import com.intellij.vcs.log.statistics.VcsLogUsageTriggerCollector; import com.intellij.vcs.log.ui.frame.CommitPresentationUtil; import com.intellij.vcs.log.ui.table.column.Commit; import com.intellij.vcs.log.util.VcsLogUiUtil; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.MouseEvent; import java.util.Collection; import java.util.Collections; public abstract class GraphCommitCellController implements VcsLogCellController { @NotNull private final VcsLogData myLogData; @NotNull private final VcsLogGraphTable myTable; @NotNull private final GraphCellPainter myGraphCellPainter; public GraphCommitCellController(@NotNull VcsLogData logData, @NotNull VcsLogGraphTable table, @NotNull GraphCellPainter painter) { myLogData = logData; myTable = table; myGraphCellPainter = painter; } protected abstract int getTooltipXCoordinate(int row); @Nullable protected abstract JComponent getTooltip(@NotNull Object value, @NotNull Point point, int row); @Nullable @Override public Cursor performMouseClick(int row, @NotNull MouseEvent e) { PrintElement printElement = findPrintElement(row, myTable.getPointInCell(e.getPoint(), Commit.INSTANCE)); if (printElement != null) { return performGraphAction(printElement, e, GraphAction.Type.MOUSE_CLICK); } return null; } @Nullable @Override public Cursor performMouseMove(int row, @NotNull MouseEvent e) { Point pointInCell = myTable.getPointInCell(e.getPoint(), Commit.INSTANCE); PrintElement printElement = findPrintElement(row, pointInCell); Cursor cursor = performGraphAction(printElement, e, GraphAction.Type.MOUSE_OVER); // if printElement is null, still need to unselect whatever was selected in a graph if (printElement == null) { if (!showTooltip(row, pointInCell, e.getPoint(), false)) { if (IdeTooltipManager.getInstance().hasCurrent()) { IdeTooltipManager.getInstance().hideCurrent(e); } } } return cursor; } @Override public boolean shouldSelectCell(int row, @NotNull MouseEvent e) { return findPrintElement(row, myTable.getPointInCell(e.getPoint(), Commit.INSTANCE)) == null; } @Nullable private PrintElement findPrintElement(int row, @NotNull Point pointInCell) { Collection<? extends PrintElement> printElements = myTable.getVisibleGraph().getRowInfo(row).getPrintElements(); return myGraphCellPainter.getElementUnderCursor(printElements, pointInCell.x, pointInCell.y); } @Nullable private Cursor performGraphAction(@Nullable PrintElement printElement, @NotNull MouseEvent e, @NotNull GraphAction.Type actionType) { boolean isClickOnGraphElement = actionType == GraphAction.Type.MOUSE_CLICK && printElement != null; if (isClickOnGraphElement) { triggerElementClick(printElement); } Selection previousSelection = myTable.getSelection(); GraphAnswer<Integer> answer = myTable.getVisibleGraph().getActionController().performAction(new GraphAction.GraphActionImpl(printElement, actionType)); return handleGraphAnswer(answer, isClickOnGraphElement, previousSelection, e); } @Nullable Cursor handleGraphAnswer(@Nullable GraphAnswer<Integer> answer, boolean dataCouldChange, @Nullable Selection previousSelection, @Nullable MouseEvent e) { if (dataCouldChange) { myTable.getModel().fireTableDataChanged(); // since fireTableDataChanged clears selection we restore it here if (previousSelection != null) { previousSelection.restore(myTable.getVisibleGraph(), answer == null || (answer.getCommitToJump() != null && answer.doJump()), false); } } if (answer == null) { return null; } if (answer.isRepaintRequired()) myTable.repaint(); if (answer.getCommitToJump() != null) { Integer row = myTable.getModel().getVisiblePack().getVisibleGraph().getVisibleRowIndex(answer.getCommitToJump()); if (row != null && row >= 0 && answer.doJump()) { myTable.jumpToRow(row, true); } else if (e != null) { VcsLogUiUtil.showTooltip(myTable, new Point(e.getX() + 5, e.getY()), Balloon.Position.atRight, getArrowTooltipText(answer.getCommitToJump(), row)); } } return answer.getCursorToSet(); } @NotNull @Nls private String getArrowTooltipText(int commit, @Nullable Integer row) { VcsShortCommitDetails details; if (row != null && row >= 0) { details = myTable.getModel().getCommitMetadata(row); // preload rows around the commit } else { details = myLogData.getMiniDetailsGetter().getCommitData(commit, Collections.singleton(commit)); // preload just the commit } if (details instanceof LoadingDetails) { CommitId commitId = myLogData.getCommitId(commit); if (commitId != null) { if (myLogData.getRoots().size() > 1) { return VcsLogBundle.message("vcs.log.graph.arrow.tooltip.jump.to.hash.in.root", commitId.getHash().toShortString(), commitId.getRoot().getName()); } return VcsLogBundle.message("vcs.log.graph.arrow.tooltip.jump.to.hash", commitId.getHash().toShortString()); } return ""; } else { long time = details.getAuthorTime(); String shortenedSubject = StringUtil.shortenTextWithEllipsis(details.getSubject(), 50, 0, "..."); String commitMessage = HtmlChunk.text("\"" + shortenedSubject + "\"").bold().toString(); return VcsLogBundle.message("vcs.log.graph.arrow.tooltip.jump.to.subject.author.date.time", commitMessage, CommitPresentationUtil.getAuthorPresentation(details), DateFormatUtil.formatDate(time), DateFormatUtil.formatTime(time)); } } private boolean showTooltip(int row, @NotNull Point pointInCell, @NotNull Point point, boolean now) { JComponent tipComponent = getTooltip(myTable.getValueAt(row, myTable.getColumnViewIndex(Commit.INSTANCE)), pointInCell, row); if (tipComponent != null) { myTable.getExpandableItemsHandler().setEnabled(false); IdeTooltip tooltip = new IdeTooltip(myTable, point, new Wrapper(tipComponent)).setPreferredPosition(Balloon.Position.below); IdeTooltipManager.getInstance().show(tooltip, now); return true; } return false; } void showTooltip(int row) { Point topLeftCorner = new Point(myTable.getColumnLeftXCoordinate(myTable.getColumnViewIndex(Commit.INSTANCE)), row * myTable.getRowHeight()); Point pointInCell = new Point(getTooltipXCoordinate(row), myTable.getRowHeight() / 2); showTooltip(row, pointInCell, new Point(topLeftCorner.x + pointInCell.x, topLeftCorner.y + pointInCell.y), true); } private static void triggerElementClick(@NotNull PrintElement printElement) { if (printElement instanceof NodePrintElement) { VcsLogUsageTriggerCollector.triggerClick("node"); } else if (printElement instanceof EdgePrintElement) { if (((EdgePrintElement)printElement).hasArrow()) { VcsLogUsageTriggerCollector.triggerClick("arrow"); } } } }
// Copyright 2015 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.bazel.repository; import com.google.devtools.build.lib.bazel.repository.RepositoryFunction.RepositoryFunctionException; import com.google.devtools.build.lib.events.Reporter; import com.google.devtools.build.lib.packages.AggregatingAttributeMapper; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.Type; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException.Transience; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.Status; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.api.errors.InvalidRefNameException; import org.eclipse.jgit.api.errors.InvalidRemoteException; import org.eclipse.jgit.api.errors.RefNotFoundException; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.storage.file.FileRepositoryBuilder; import java.io.IOException; import java.util.Objects; import java.util.Set; import javax.annotation.Nullable; /** * Clones a Git repository, checks out the provided branch, tag, or commit, and * clones submodules if specified. */ public class GitCloneFunction implements SkyFunction { public static final String NAME = "GIT_CLONE"; private Reporter reporter; public void setReporter(Reporter reporter) { this.reporter = reporter; } private boolean isUpToDate(GitRepositoryDescriptor descriptor) { // Initializing/checking status of/etc submodules cleanly is hard, so don't try for now. if (descriptor.initSubmodules) { return false; } Repository repository = null; try { repository = new FileRepositoryBuilder() .setGitDir(descriptor.directory.getChild(Constants.DOT_GIT).getPathFile()) .setMustExist(true) .build(); ObjectId head = repository.resolve(Constants.HEAD); ObjectId checkout = repository.resolve(descriptor.checkout); if (head != null && checkout != null && head.equals(checkout)) { Status status = Git.wrap(repository).status().call(); if (!status.hasUncommittedChanges()) { // new_git_repository puts (only) BUILD and WORKSPACE, and // git_repository doesn't add any files. Set<String> untracked = status.getUntracked(); if (untracked.isEmpty() || (untracked.size() == 2 && untracked.contains("BUILD") && untracked.contains("WORKSPACE"))) { return true; } } } } catch (GitAPIException | IOException e) { // Any exceptions here, we'll just blow it away and try cloning fresh. // The fresh clone avoids any weirdness due to what's there and has nicer // error reporting. } finally { if (repository != null) { repository.close(); } } return false; } @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws RepositoryFunctionException { GitRepositoryDescriptor descriptor = (GitRepositoryDescriptor) skyKey.argument(); Git git = null; try { if (descriptor.directory.exists()) { if (isUpToDate(descriptor)) { return new HttpDownloadValue(descriptor.directory); } try { FileSystemUtils.deleteTree(descriptor.directory); } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } } git = Git.cloneRepository() .setURI(descriptor.remote) .setDirectory(descriptor.directory.getPathFile()) .setCloneSubmodules(false) .setNoCheckout(true) .setProgressMonitor(new GitProgressMonitor("Cloning " + descriptor.remote, reporter)) .call(); git.checkout() .setCreateBranch(true) .setName("bazel-checkout") .setStartPoint(descriptor.checkout) .call(); // Using CloneCommand.setCloneSubmodules() results in SubmoduleInitCommand and // SubmoduleUpdateCommand to be called recursively for all submodules. This is not // desirable for repositories, such as github.com/rust-lang/rust-installer, which // recursively includes itself as a submodule, which would result in an infinite // loop if submodules are cloned recursively. For now, limit submodules to only // the first level. if (descriptor.initSubmodules && !git.submoduleInit().call().isEmpty()) { git .submoduleUpdate() .setProgressMonitor( new GitProgressMonitor("Cloning submodules for " + descriptor.remote, reporter)) .call(); } } catch (InvalidRemoteException e) { throw new RepositoryFunctionException( new IOException("Invalid Git repository URI: " + e.getMessage()), Transience.PERSISTENT); } catch (RefNotFoundException|InvalidRefNameException e) { throw new RepositoryFunctionException( new IOException("Invalid branch, tag, or commit: " + e.getMessage()), Transience.PERSISTENT); } catch (GitAPIException e) { throw new RepositoryFunctionException( new IOException(e.getMessage()), Transience.TRANSIENT); } finally { if (git != null) { git.close(); } } return new HttpDownloadValue(descriptor.directory); } @Nullable @Override public String extractTag(SkyKey skyKey) { return null; } public static SkyKey key(Rule rule, Path outputDirectory) throws RepositoryFunctionException { AggregatingAttributeMapper mapper = AggregatingAttributeMapper.of(rule); if ((mapper.has("commit", Type.STRING) == mapper.has("tag", Type.STRING)) && (mapper.get("commit", Type.STRING).isEmpty() == mapper.get("tag", Type.STRING).isEmpty())) { throw new RepositoryFunctionException( new EvalException(rule.getLocation(), "One of either commit or tag must be defined"), Transience.PERSISTENT); } String startingPoint; if (mapper.has("commit", Type.STRING) && !mapper.get("commit", Type.STRING).isEmpty()) { startingPoint = mapper.get("commit", Type.STRING); } else { startingPoint = "tags/" + mapper.get("tag", Type.STRING); } return new SkyKey( SkyFunctionName.create(NAME), new GitCloneFunction.GitRepositoryDescriptor( mapper.get("remote", Type.STRING), startingPoint, mapper.get("init_submodules", Type.BOOLEAN), outputDirectory)); } static final class GitRepositoryDescriptor { private String remote; private String checkout; private boolean initSubmodules; private Path directory; public GitRepositoryDescriptor(String remote, String checkout, boolean initSubmodules, Path directory) { this.remote = remote; this.checkout = checkout; this.initSubmodules = initSubmodules; this.directory = directory; } @Override public String toString() { return remote + " -> " + directory + " (" + checkout + ") submodules: " + initSubmodules; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof GitRepositoryDescriptor)) { return false; } GitRepositoryDescriptor other = (GitRepositoryDescriptor) obj; return Objects.equals(remote, other.remote) && Objects.equals(checkout, other.checkout) && Objects.equals(initSubmodules, other.initSubmodules) && Objects.equals(directory, other.directory); } @Override public int hashCode() { return Objects.hash(remote, checkout, initSubmodules, directory); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.server.coordinator; import com.google.common.collect.Iterables; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import org.apache.commons.math3.util.FastMath; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.timeline.DataSegment; import org.joda.time.Interval; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.NavigableSet; import java.util.concurrent.ThreadLocalRandom; import java.util.stream.Collectors; public class CostBalancerStrategy implements BalancerStrategy { private static final EmittingLogger log = new EmittingLogger(CostBalancerStrategy.class); private static final double HALF_LIFE = 24.0; // cost function half-life in hours static final double LAMBDA = Math.log(2) / HALF_LIFE; static final double INV_LAMBDA_SQUARE = 1 / (LAMBDA * LAMBDA); private static final double MILLIS_IN_HOUR = 3_600_000.0; private static final double MILLIS_FACTOR = MILLIS_IN_HOUR / LAMBDA; /** * This defines the unnormalized cost function between two segments. * * See https://github.com/apache/druid/pull/2972 for more details about the cost function. * * intervalCost: segments close together are more likely to be queried together * * multiplier: if two segments belong to the same data source, they are more likely to be involved * in the same queries * * @param segmentA The first DataSegment. * @param segmentB The second DataSegment. * * @return the joint cost of placing the two DataSegments together on one node. */ public static double computeJointSegmentsCost(final DataSegment segmentA, final DataSegment segmentB) { final Interval intervalA = segmentA.getInterval(); final Interval intervalB = segmentB.getInterval(); final double t0 = intervalA.getStartMillis(); final double t1 = (intervalA.getEndMillis() - t0) / MILLIS_FACTOR; final double start = (intervalB.getStartMillis() - t0) / MILLIS_FACTOR; final double end = (intervalB.getEndMillis() - t0) / MILLIS_FACTOR; // constant cost-multiplier for segments of the same datsource final double multiplier = segmentA.getDataSource().equals(segmentB.getDataSource()) ? 2.0 : 1.0; return INV_LAMBDA_SQUARE * intervalCost(t1, start, end) * multiplier; } /** * Computes the joint cost of two intervals X = [x_0 = 0, x_1) and Y = [y_0, y_1) * * cost(X, Y) = \int_{x_0}^{x_1} \int_{y_0}^{y_1} e^{-\lambda |x-y|}dxdy $$ * * lambda = 1 in this particular implementation * * Other values of lambda can be calculated by multiplying inputs by lambda * and multiplying the result by 1 / lambda ^ 2 * * Interval start and end are all relative to x_0. * Therefore this function assumes x_0 = 0, x1 >= 0, and y1 > y0 * * @param x1 end of interval X * @param y0 start of interval Y * @param y1 end o interval Y * * @return joint cost of X and Y */ public static double intervalCost(double x1, double y0, double y1) { if (x1 == 0 || y1 == y0) { return 0; } // cost(X, Y) = cost(Y, X), so we swap X and Y to // have x_0 <= y_0 and simplify the calculations below if (y0 < 0) { // swap X and Y double tmp = x1; x1 = y1 - y0; y1 = tmp - y0; y0 = -y0; } // since x_0 <= y_0, Y must overlap X if y_0 < x_1 if (y0 < x1) { /* * We have two possible cases of overlap: * * X = [ A )[ B )[ C ) or [ A )[ B ) * Y = [ ) [ )[ C ) * * A is empty if y0 = 0 * C is empty if y1 = x1 * * cost(X, Y) = cost(A, Y) + cost(B, C) + cost(B, B) * * cost(A, Y) and cost(B, C) can be calculated using the non-overlapping case, * which reduces the overlapping case to computing * * cost(B, B) = \int_0^{\beta} \int_{0}^{\beta} e^{-|x-y|}dxdy * = 2 \cdot (\beta + e^{-\beta} - 1) * * where \beta is the length of interval B * */ final double beta; // b1 - y0, length of interval B final double gamma; // c1 - y0, length of interval C if (y1 <= x1) { beta = y1 - y0; gamma = x1 - y0; } else { beta = x1 - y0; gamma = y1 - y0; } //noinspection SuspiciousNameCombination return intervalCost(y0, y0, y1) + // cost(A, Y) intervalCost(beta, beta, gamma) + // cost(B, C) 2 * (beta + FastMath.exp(-beta) - 1); // cost(B, B) } else { /* * In the case where there is no overlap: * * Given that x_0 <= y_0, * then x <= y must be true for all x in [x_0, x_1] and y in [y_0, y_1). * * therefore, * * cost(X, Y) = \int_0^{x_1} \int_{y_0}^{y_1} e^{-|x-y|} dxdy * = \int_0^{x_1} \int_{y_0}^{y_1} e^{x-y} dxdy * = (e^{-y_1} - e^{-y_0}) - (e^{x_1-y_1} - e^{x_1-y_0}) * * Note, this expression could be further reduced by factoring out (e^{x_1} - 1), * but we prefer to keep the smaller values x_1 - y_0 and x_1 - y_1 in the exponent * to avoid numerical overflow caused by calculating e^{x_1} */ final double exy0 = FastMath.exp(x1 - y0); final double exy1 = FastMath.exp(x1 - y1); final double ey0 = FastMath.exp(0f - y0); final double ey1 = FastMath.exp(0f - y1); return (ey1 - ey0) - (exy1 - exy0); } } private final ListeningExecutorService exec; public CostBalancerStrategy(ListeningExecutorService exec) { this.exec = exec; } @Override public ServerHolder findNewSegmentHomeReplicator(DataSegment proposalSegment, List<ServerHolder> serverHolders) { ServerHolder holder = chooseBestServer(proposalSegment, serverHolders, false).rhs; if (holder != null && !holder.isServingSegment(proposalSegment)) { return holder; } return null; } @Override public ServerHolder findNewSegmentHomeBalancer(DataSegment proposalSegment, List<ServerHolder> serverHolders) { return chooseBestServer(proposalSegment, serverHolders, true).rhs; } static double computeJointSegmentsCost(final DataSegment segment, final Iterable<DataSegment> segmentSet) { double totalCost = 0; for (DataSegment s : segmentSet) { totalCost += computeJointSegmentsCost(segment, s); } return totalCost; } @Override public BalancerSegmentHolder pickSegmentToMove(final List<ServerHolder> serverHolders) { return ReservoirSegmentSampler.getRandomBalancerSegmentHolder(serverHolders); } @Override public Iterator<ServerHolder> pickServersToDrop(DataSegment toDrop, NavigableSet<ServerHolder> serverHolders) { List<ListenableFuture<Pair<Double, ServerHolder>>> futures = new ArrayList<>(); for (final ServerHolder server : serverHolders) { futures.add( exec.submit( () -> Pair.of(computeCost(toDrop, server, true), server) ) ); } final ListenableFuture<List<Pair<Double, ServerHolder>>> resultsFuture = Futures.allAsList(futures); try { // results is an un-ordered list of a pair consisting of the 'cost' of a segment being on a server and the server List<Pair<Double, ServerHolder>> results = resultsFuture.get(); return results.stream() // Comparator.comapringDouble will order by lowest cost... // reverse it because we want to drop from the highest cost servers first .sorted(Comparator.comparingDouble((Pair<Double, ServerHolder> o) -> o.lhs).reversed()) .map(x -> x.rhs).collect(Collectors.toList()) .iterator(); } catch (Exception e) { log.makeAlert(e, "Cost Balancer Multithread strategy wasn't able to complete cost computation.").emit(); } return Collections.emptyIterator(); } /** * Calculates the initial cost of the Druid segment configuration. * * @param serverHolders A list of ServerHolders for a particular tier. * * @return The initial cost of the Druid tier. */ public double calculateInitialTotalCost(final List<ServerHolder> serverHolders) { double cost = 0; for (ServerHolder server : serverHolders) { // segments are dumped into an array because it's probably better than iterating the iterateAllSegments() result // quadratically in a loop, which can generate garbage in the form of Stream, Spliterator, Iterator, etc. objects // whose total memory volume exceeds the size of the DataSegment array. DataSegment[] segments = server.getServer().iterateAllSegments().toArray(new DataSegment[0]); for (DataSegment s1 : segments) { for (DataSegment s2 : segments) { cost += computeJointSegmentsCost(s1, s2); } } } return cost; } /** * Calculates the cost normalization. This is such that the normalized cost is lower bounded * by 1 (e.g. when each segment gets its own historical node). * * @param serverHolders A list of ServerHolders for a particular tier. * * @return The normalization value (the sum of the diagonal entries in the * pairwise cost matrix). This is the cost of a cluster if each * segment were to get its own historical node. */ public double calculateNormalization(final List<ServerHolder> serverHolders) { double cost = 0; for (ServerHolder server : serverHolders) { for (DataSegment segment : server.getServer().iterateAllSegments()) { cost += computeJointSegmentsCost(segment, segment); } } return cost; } @Override public void emitStats(String tier, CoordinatorStats stats, List<ServerHolder> serverHolderList) { final double initialTotalCost = calculateInitialTotalCost(serverHolderList); final double normalization = calculateNormalization(serverHolderList); final double normalizedInitialCost = initialTotalCost / normalization; stats.addToTieredStat("initialCost", tier, (long) initialTotalCost); stats.addToTieredStat("normalization", tier, (long) normalization); stats.addToTieredStat("normalizedInitialCostTimesOneThousand", tier, (long) (normalizedInitialCost * 1000)); log.info( "[%s]: Initial Total Cost: [%f], Normalization: [%f], Initial Normalized Cost: [%f]", tier, initialTotalCost, normalization, normalizedInitialCost ); } protected double computeCost( final DataSegment proposalSegment, final ServerHolder server, final boolean includeCurrentServer ) { final long proposalSegmentSize = proposalSegment.getSize(); // (optional) Don't include server if it is already serving segment if (!includeCurrentServer && server.isServingSegment(proposalSegment)) { return Double.POSITIVE_INFINITY; } // Don't calculate cost if the server doesn't have enough space or is loading the segment if (proposalSegmentSize > server.getAvailableSize() || server.isLoadingSegment(proposalSegment)) { return Double.POSITIVE_INFINITY; } // The contribution to the total cost of a given server by proposing to move the segment to that server is... double cost = 0d; // the sum of the costs of other (exclusive of the proposalSegment) segments on the server cost += computeJointSegmentsCost( proposalSegment, Iterables.filter(server.getServer().iterateAllSegments(), segment -> !proposalSegment.equals(segment)) ); // plus the costs of segments that will be loaded cost += computeJointSegmentsCost(proposalSegment, server.getPeon().getSegmentsToLoad()); // minus the costs of segments that are marked to be dropped cost -= computeJointSegmentsCost(proposalSegment, server.getPeon().getSegmentsMarkedToDrop()); return cost; } /** * For assignment, we want to move to the lowest cost server that isn't already serving the segment. * * @param proposalSegment A DataSegment that we are proposing to move. * @param serverHolders An iterable of ServerHolders for a particular tier. * * @return A ServerHolder with the new home for a segment. */ protected Pair<Double, ServerHolder> chooseBestServer( final DataSegment proposalSegment, final Iterable<ServerHolder> serverHolders, final boolean includeCurrentServer ) { Pair<Double, ServerHolder> bestServer = Pair.of(Double.POSITIVE_INFINITY, null); List<ListenableFuture<Pair<Double, ServerHolder>>> futures = new ArrayList<>(); for (final ServerHolder server : serverHolders) { futures.add( exec.submit( () -> Pair.of(computeCost(proposalSegment, server, includeCurrentServer), server) ) ); } final ListenableFuture<List<Pair<Double, ServerHolder>>> resultsFuture = Futures.allAsList(futures); final List<Pair<Double, ServerHolder>> bestServers = new ArrayList<>(); bestServers.add(bestServer); try { for (Pair<Double, ServerHolder> server : resultsFuture.get()) { if (server.lhs <= bestServers.get(0).lhs) { if (server.lhs < bestServers.get(0).lhs) { bestServers.clear(); } bestServers.add(server); } } // Randomly choose a server from the best servers bestServer = bestServers.get(ThreadLocalRandom.current().nextInt(bestServers.size())); } catch (Exception e) { log.makeAlert(e, "Cost Balancer Multithread strategy wasn't able to complete cost computation.").emit(); } return bestServer; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.internal; import org.apache.lucene.search.Collector; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhaseContext; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext; import java.util.List; import java.util.Map; public abstract class FilteredSearchContext extends SearchContext { private final SearchContext in; public FilteredSearchContext(SearchContext in) { //inner_hits in percolator ends up with null inner search context super(in == null ? ParseFieldMatcher.EMPTY : in.parseFieldMatcher(), in); this.in = in; } @Override protected void doClose() { in.doClose(); } @Override public void preProcess() { in.preProcess(); } @Override public Query searchFilter(String[] types) { return in.searchFilter(types); } @Override public long id() { return in.id(); } @Override public String source() { return in.source(); } @Override public ShardSearchRequest request() { return in.request(); } @Override public SearchType searchType() { return in.searchType(); } @Override public SearchContext searchType(SearchType searchType) { return in.searchType(searchType); } @Override public SearchShardTarget shardTarget() { return in.shardTarget(); } @Override public int numberOfShards() { return in.numberOfShards(); } @Override public boolean hasTypes() { return in.hasTypes(); } @Override public String[] types() { return in.types(); } @Override public float queryBoost() { return in.queryBoost(); } @Override public SearchContext queryBoost(float queryBoost) { return in.queryBoost(queryBoost); } @Override public long getOriginNanoTime() { return in.getOriginNanoTime(); } @Override protected long nowInMillisImpl() { return in.nowInMillisImpl(); } @Override public ScrollContext scrollContext() { return in.scrollContext(); } @Override public SearchContext scrollContext(ScrollContext scroll) { return in.scrollContext(scroll); } @Override public SearchContextAggregations aggregations() { return in.aggregations(); } @Override public SearchContext aggregations(SearchContextAggregations aggregations) { return in.aggregations(aggregations); } @Override public SearchContextHighlight highlight() { return in.highlight(); } @Override public void highlight(SearchContextHighlight highlight) { in.highlight(highlight); } @Override public void innerHits(InnerHitsContext innerHitsContext) { in.innerHits(innerHitsContext); } @Override public InnerHitsContext innerHits() { return in.innerHits(); } @Override public SuggestionSearchContext suggest() { return in.suggest(); } @Override public void suggest(SuggestionSearchContext suggest) { in.suggest(suggest); } @Override public List<RescoreSearchContext> rescore() { return in.rescore(); } @Override public void addRescore(RescoreSearchContext rescore) { in.addRescore(rescore); } @Override public boolean hasScriptFields() { return in.hasScriptFields(); } @Override public ScriptFieldsContext scriptFields() { return in.scriptFields(); } @Override public boolean sourceRequested() { return in.sourceRequested(); } @Override public boolean hasFetchSourceContext() { return in.hasFetchSourceContext(); } @Override public FetchSourceContext fetchSourceContext() { return in.fetchSourceContext(); } @Override public SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext) { return in.fetchSourceContext(fetchSourceContext); } @Override public ContextIndexSearcher searcher() { return in.searcher(); } @Override public IndexShard indexShard() { return in.indexShard(); } @Override public MapperService mapperService() { return in.mapperService(); } @Override public AnalysisService analysisService() { return in.analysisService(); } @Override public SimilarityService similarityService() { return in.similarityService(); } @Override public ScriptService scriptService() { return in.scriptService(); } @Override public PageCacheRecycler pageCacheRecycler() { return in.pageCacheRecycler(); } @Override public BigArrays bigArrays() { return in.bigArrays(); } @Override public BitsetFilterCache bitsetFilterCache() { return in.bitsetFilterCache(); } @Override public IndexFieldDataService fieldData() { return in.fieldData(); } @Override public long timeoutInMillis() { return in.timeoutInMillis(); } @Override public void timeoutInMillis(long timeoutInMillis) { in.timeoutInMillis(timeoutInMillis); } @Override public int terminateAfter() { return in.terminateAfter(); } @Override public void terminateAfter(int terminateAfter) { in.terminateAfter(terminateAfter); } @Override public SearchContext minimumScore(float minimumScore) { return in.minimumScore(minimumScore); } @Override public Float minimumScore() { return in.minimumScore(); } @Override public SearchContext sort(Sort sort) { return in.sort(sort); } @Override public Sort sort() { return in.sort(); } @Override public SearchContext trackScores(boolean trackScores) { return in.trackScores(trackScores); } @Override public boolean trackScores() { return in.trackScores(); } @Override public SearchContext parsedPostFilter(ParsedQuery postFilter) { return in.parsedPostFilter(postFilter); } @Override public ParsedQuery parsedPostFilter() { return in.parsedPostFilter(); } @Override public Query aliasFilter() { return in.aliasFilter(); } @Override public SearchContext parsedQuery(ParsedQuery query) { return in.parsedQuery(query); } @Override public ParsedQuery parsedQuery() { return in.parsedQuery(); } @Override public Query query() { return in.query(); } @Override public int from() { return in.from(); } @Override public SearchContext from(int from) { return in.from(from); } @Override public int size() { return in.size(); } @Override public SearchContext size(int size) { return in.size(size); } @Override public boolean hasFieldNames() { return in.hasFieldNames(); } @Override public List<String> fieldNames() { return in.fieldNames(); } @Override public void emptyFieldNames() { in.emptyFieldNames(); } @Override public boolean explain() { return in.explain(); } @Override public void explain(boolean explain) { in.explain(explain); } @Override public List<String> groupStats() { return in.groupStats(); } @Override public void groupStats(List<String> groupStats) { in.groupStats(groupStats); } @Override public boolean version() { return in.version(); } @Override public void version(boolean version) { in.version(version); } @Override public int[] docIdsToLoad() { return in.docIdsToLoad(); } @Override public int docIdsToLoadFrom() { return in.docIdsToLoadFrom(); } @Override public int docIdsToLoadSize() { return in.docIdsToLoadSize(); } @Override public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize) { return in.docIdsToLoad(docIdsToLoad, docsIdsToLoadFrom, docsIdsToLoadSize); } @Override public void accessed(long accessTime) { in.accessed(accessTime); } @Override public long lastAccessTime() { return in.lastAccessTime(); } @Override public long keepAlive() { return in.keepAlive(); } @Override public void keepAlive(long keepAlive) { in.keepAlive(keepAlive); } @Override public SearchLookup lookup() { return in.lookup(); } @Override public DfsSearchResult dfsResult() { return in.dfsResult(); } @Override public QuerySearchResult queryResult() { return in.queryResult(); } @Override public FetchSearchResult fetchResult() { return in.fetchResult(); } @Override public MappedFieldType smartNameFieldType(String name) { return in.smartNameFieldType(name); } @Override public ObjectMapper getObjectMapper(String name) { return in.getObjectMapper(name); } @Override public Counter timeEstimateCounter() { return in.timeEstimateCounter(); } @Override public <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory) { return in.getFetchSubPhaseContext(contextFactory); } @Override public Profilers getProfilers() { return in.getProfilers(); } @Override public Map<Class<?>, Collector> queryCollectors() { return in.queryCollectors();} }
/* * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.javascript.jscomp.MakeDeclaredNamesUnique.ContextualRenamer; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.Set; /** * Methods necessary for partially or full decomposing an expression. Initially * this is intended to expanded the locations were inlining can occur, but has * other uses as well. * * For example: * var x = y() + z(); * * Becomes: * var a = y(); * var b = z(); * x = a + b; * * @author johnlenz@google.com (John Lenz) */ class ExpressionDecomposer { /** * @see #canExposeExpression */ enum DecompositionType { UNDECOMPOSABLE, MOVABLE, DECOMPOSABLE } private final AbstractCompiler compiler; private final Supplier<String> safeNameIdSupplier; private final Set<String> knownConstants; public ExpressionDecomposer( AbstractCompiler compiler, Supplier<String> safeNameIdSupplier, Set<String> constNames) { Preconditions.checkNotNull(compiler); Preconditions.checkNotNull(safeNameIdSupplier); Preconditions.checkNotNull(constNames); this.compiler = compiler; this.safeNameIdSupplier = safeNameIdSupplier; this.knownConstants = constNames; } // An arbitrary limit to prevent catch infinite recursion. private static final int MAX_INTERATIONS = 100; /** * If required, rewrite the statement containing the expression. * @param expression The expression to be exposed. * @see #canExposeExpression */ void maybeDecomposeExpression(Node expression) { // If the expression needs to exposed. int i = 0; while (DecompositionType.DECOMPOSABLE == canExposeExpression(expression)) { exposeExpression(expression); i++; if (i > MAX_INTERATIONS) { throw new IllegalStateException( "DecomposeExpression depth exceeded on :\n" + expression.toStringTree()); } } } /** * Perform any rewriting necessary so that the specified expression * is movable. This is a partial expression decomposition. * @see #canExposeExpression */ void exposeExpression(Node expression) { Node expressionRoot = findExpressionRoot(expression); Preconditions.checkState(expressionRoot != null); exposeExpression(expressionRoot, expression); compiler.reportCodeChange(); } // TODO(johnlenz): This is not currently used by the function inliner, // as moving the call out of the expression before the actual function // results in additional variables being introduced. As the variable // inliner is improved, this might be a viable option. /** * Extract the specified expression from its parent expression. * @see #canExposeExpression */ void moveExpression(Node expression) { String resultName = getTempValueName(); // Should this be constant? Node injectionPoint = findInjectionPoint(expression); Preconditions.checkNotNull(injectionPoint); Node injectionPointParent = injectionPoint.getParent(); Preconditions.checkNotNull(injectionPointParent); Preconditions.checkState(NodeUtil.isStatementBlock(injectionPointParent)); // Replace the expression with a reference to the new name. Node expressionParent = expression.getParent(); expressionParent.replaceChild( expression, Node.newString(Token.NAME, resultName)); // Re-add the expression at the appropriate place. Node newExpressionRoot = NodeUtil.newVarNode(resultName, expression); injectionPointParent.addChildBefore(newExpressionRoot, injectionPoint); compiler.reportCodeChange(); } /** * Rewrite the expression such that the sub-expression is in a movable * expression statement while maintaining evaluation order. * * Two types of subexpressions are extracted from the source expression: * 1) subexpressions with side-effects. * 2) conditional expressions, that contain the call, which are transformed * into IF statements. * * The following terms are used: * expressionRoot: The top level node before which the any extracted * expressions should be placed before. * nonconditionalExpr: The node that will be extracted either expres. * */ private void exposeExpression(Node expressionRoot, Node subExpression) { Node nonconditionalExpr = findNonconditionalParent( subExpression, expressionRoot); // Before extraction, record whether there are side-effect boolean hasFollowingSideEffects = NodeUtil.mayHaveSideEffects( nonconditionalExpr); Node exprInjectionPoint = findInjectionPoint(nonconditionalExpr); DecompositionState state = new DecompositionState(); state.sideEffects = hasFollowingSideEffects; state.extractBeforeStatement = exprInjectionPoint; // Extract expressions in the reverse order of their evaluation. for (Node child = nonconditionalExpr, parent = child.getParent(); parent != expressionRoot; child = parent, parent = child.getParent()) { int parentType = parent.getType(); Preconditions.checkState( !isConditionalOp(parent) || child == parent.getFirstChild()); if (parentType == Token.ASSIGN) { if (isSafeAssign(parent, state.sideEffects)) { // It is always safe to inline "foo()" for expressions such as // "a = b = c = foo();" // As the assignment is unaffected by side effect of "foo()" // and the names assigned-to can not influence the state before // the call to foo. // // This is not true of more complex LHS values, such as // a.x = foo(); // next().x = foo(); // in these cases the checks below are necessary. } else { // Alias "next()" in "next().foo" Node left = parent.getFirstChild(); int type = left.getType(); if (left != child) { Preconditions.checkState(NodeUtil.isGet(left)); if (type == Token.GETELEM) { decomposeSubExpressions(left.getLastChild(), null, state); } decomposeSubExpressions(left.getFirstChild(), null, state); } } } else if (parentType == Token.CALL && NodeUtil.isGet(parent.getFirstChild())) { // TODO(johnlenz): In Internet Explorer, non-javascript objects such // as DOM objects can not be decomposed. if (!maybeExternMethod(parent.getFirstChild())) { throw new IllegalStateException( "External object method calls can not be decomposed."); } else { Node functionExpression = parent.getFirstChild(); decomposeSubExpressions( functionExpression.getNext(), child, state); // Now handle the call expression if (isExpressionTreeUnsafe(functionExpression, state.sideEffects)) { // Either there were preexisting side-effects, or this node has // side-effects. state.sideEffects = true; // Rewrite the call so "this" is preserved. Node replacement = rewriteCallExpression(parent, state); // Continue from here. parent = replacement; } } } else if (parentType == Token.OBJECTLIT) { decomposeObjectLiteralKeys( parent.getFirstChild(), child, state); } else { decomposeSubExpressions( parent.getFirstChild(), child, state); } } // Now extract the expression that the decomposition is being performed to // to allow to be moved. All expressions that need to be evaluated before // this have been extracted, so add the expression statement after the // other extracted expressions and the original statement (or replace // the original statement. if (nonconditionalExpr == subExpression) { // Don't extract the call, as that introduces an extra constant VAR // that will simply need to be inlined back. It will be handled as // an EXPRESSION call site type. // Node extractedCall = extractExpression(decomposition, expressionRoot); } else { Node parent = nonconditionalExpr.getParent(); boolean needResult = parent.getType() != Token.EXPR_RESULT; Node extractedConditional = extractConditional( nonconditionalExpr, exprInjectionPoint, needResult); } } /** * @return Whether the node may represent an external method. */ private boolean maybeExternMethod(Node node) { // TODO(johnlenz): Provide some mechanism for determining this. return true; } /** * @return "expression" or the node closest to "expression", that does not * have a conditional ancestor. */ private static Node findNonconditionalParent( Node subExpression, Node expressionRoot) { Node result = subExpression; for (Node child = subExpression, parent = child.getParent(); parent != expressionRoot; child = parent, parent = child.getParent()) { if (isConditionalOp(parent)) { // Only the first child is always executed, if the function may never // be called, don't inline it. if (child != parent.getFirstChild()) { result = parent; } } } return result; } /** * A simple class to track two things: * - whether side effects have been seen. * - the last statement inserted */ private static class DecompositionState { boolean sideEffects; Node extractBeforeStatement; } /** * Decompose an object literal. * @param key The object literal key. * @param stopNode A node after which to stop iterating. */ private void decomposeObjectLiteralKeys( Node key, Node stopNode, DecompositionState state) { if (key == null || key == stopNode) { return; } decomposeObjectLiteralKeys(key.getNext(), stopNode, state); decomposeSubExpressions(key.getFirstChild(), stopNode, state); } /** * @param n The node with which to start iterating. * @param stopNode A node after which to stop iterating. */ private void decomposeSubExpressions( Node n, Node stopNode, DecompositionState state) { if (n == null || n == stopNode) { return; } // Never try to decompose anobject literal key. Preconditions.checkState(!NodeUtil.isObjectLitKey(n, n.getParent())); // Decompose the children in reverse evaluation order. This simplifies // determining if the any of the children following have side-effects. // If they do we need to be more aggressive about removing values // from the expression. decomposeSubExpressions( n.getNext(), stopNode, state); // Now this node. // TODO(johnlenz): Move "safety" code to a shared class. if (isExpressionTreeUnsafe(n, state.sideEffects)) { // Either there were preexisting side-effects, or this node has // side-effects. state.sideEffects = true; state.extractBeforeStatement = extractExpression( n, state.extractBeforeStatement); } } /** * * @param expr The conditional expression to extract. * @param injectionPoint The before which extracted expression, would be * injected. * @param needResult Whether the result of the expression is required. * @return The node that contains the logic of the expression after * extraction. */ private Node extractConditional( Node expr, Node injectionPoint, boolean needResult) { Node parent = expr.getParent(); String tempName = getTempValueName(); // Break down the conditional. Node first = expr.getFirstChild(); Node second = first.getNext(); Node last = expr.getLastChild(); // Isolate the children nodes. expr.detachChildren(); // Transform the conditional to an IF statement. Node cond = null; Node trueExpr = new Node(Token.BLOCK).copyInformationFrom(expr); Node falseExpr = new Node(Token.BLOCK).copyInformationFrom(expr); switch (expr.getType()) { case Token.HOOK: // a = x?y:z --> if (x) {a=y} else {a=z} cond = first; trueExpr.addChildToFront(NodeUtil.newExpr( buildResultExpression(second, needResult, tempName))); falseExpr.addChildToFront(NodeUtil.newExpr( buildResultExpression(last, needResult, tempName))); break; case Token.AND: // a = x&&y --> if (a=x) {a=y} else {} cond = buildResultExpression(first, needResult, tempName); trueExpr.addChildToFront(NodeUtil.newExpr( buildResultExpression(last, needResult, tempName))); break; case Token.OR: // a = x||y --> if (a=x) {} else {a=y} cond = buildResultExpression(first, needResult, tempName); falseExpr.addChildToFront(NodeUtil.newExpr( buildResultExpression(last, needResult, tempName))); break; default: // With a valid tree we should never get here. throw new IllegalStateException("Unexpected."); } Node ifNode; if (falseExpr.hasChildren()) { ifNode = new Node(Token.IF, cond, trueExpr, falseExpr); } else { ifNode = new Node(Token.IF, cond, trueExpr); } ifNode.copyInformationFrom(expr); if (needResult) { Node tempVarNode = NodeUtil.newVarNode(tempName, null) .copyInformationFromForTree(expr); Node injectionPointParent = injectionPoint.getParent(); injectionPointParent.addChildBefore(tempVarNode, injectionPoint); injectionPointParent.addChildAfter(ifNode, tempVarNode); // Replace the expression with the temporary name. Node replacementValueNode = Node.newString(Token.NAME, tempName); parent.replaceChild(expr, replacementValueNode); } else { // Only conditionals that are the direct child of an expression statement // don't need results, for those simply replace the expression statement. Preconditions.checkArgument(parent.getType() == Token.EXPR_RESULT); Node gramps = parent.getParent(); gramps.replaceChild(parent, ifNode); } return ifNode; } /** * Create an expression tree for an expression. * If the result of the expression is needed, then: * ASSIGN * tempName * expr * otherwise, simply: * expr */ private static Node buildResultExpression( Node expr, boolean needResult, String tempName) { if (needResult) { return new Node(Token.ASSIGN, Node.newString(Token.NAME, tempName), expr).copyInformationFromForTree(expr); } else { return expr; } } private boolean isConstantName(Node n, Set<String> knownConstants) { // Non-constant names values may have been changed. return NodeUtil.isName(n) && (NodeUtil.isConstantName(n) || knownConstants.contains(n.getString())); } /** * @param expr The expression to extract. * @param injectionPoint The node before which to added the extracted * expression. * @return The extract statement node. */ private Node extractExpression(Node expr, Node injectionPoint) { Node parent = expr.getParent(); boolean isLhsOfAssignOp = NodeUtil.isAssignmentOp(parent) && !NodeUtil.isAssign(parent) && parent.getFirstChild() == expr; Node firstExtractedNode = null; // Expressions on the LHS of an assignment-op must have any possible // side-effects extracted as the value must be duplicated: // next().foo += 2; // becomes: // var t1 = next(); // t1.foo = t1.foo + 2; if (isLhsOfAssignOp && NodeUtil.isGet(expr)) { for (Node n : expr.children()) { if (n.getType() != Token.STRING && !isConstantName(n, knownConstants)) { Node extractedNode = extractExpression(n, injectionPoint); if (firstExtractedNode == null) { firstExtractedNode = extractedNode; } } } } // The temp is known to be constant. String tempName = getTempConstantValueName(); Node replacementValueNode = Node.newString(Token.NAME, tempName) .copyInformationFrom(expr); Node tempNameValue; // If it is ASSIGN_XXX, keep the assignment in place and extract the // original value of the LHS operand. if (isLhsOfAssignOp) { Preconditions.checkState(NodeUtil.isName(expr) || NodeUtil.isGet(expr)); // Transform "x += 2" into "x = temp + 2" Node opNode = new Node(NodeUtil.getOpFromAssignmentOp(parent)) .copyInformationFrom(parent); Node rightOperand = parent.getLastChild(); parent.setType(Token.ASSIGN); parent.replaceChild(rightOperand, opNode); opNode.addChildToFront(replacementValueNode); opNode.addChildToBack(rightOperand); // The original expression is still being used, so make a clone. tempNameValue = expr.cloneTree(); } else { // Replace the expression with the temporary name. parent.replaceChild(expr, replacementValueNode); // Keep the original node so that CALL expressions can still be found // and inlined properly. tempNameValue = expr; } // Re-add the expression in the declaration of the temporary name. Node tempVarNode = NodeUtil.newVarNode(tempName, tempNameValue); Node injectionPointParent = injectionPoint.getParent(); injectionPointParent.addChildBefore(tempVarNode, injectionPoint); if (firstExtractedNode == null) { firstExtractedNode = tempVarNode; } return firstExtractedNode; } /** * Rewrite the call so "this" is preserved. * a.b(c); * becomes: * var temp1 = a; * var temp0 = temp1.b; * temp0.call(temp1,c); * * @return The replacement node. */ private Node rewriteCallExpression(Node call, DecompositionState state) { Preconditions.checkArgument(call.getType() == Token.CALL); Node first = call.getFirstChild(); Preconditions.checkArgument(NodeUtil.isGet(first)); // Extracts the expression representing the function to call. For example: // "a['b'].c" from "a['b'].c()" Node getVarNode = extractExpression( first, state.extractBeforeStatement); state.extractBeforeStatement = getVarNode; // Extracts the object reference to be used as "this". For example: // "a['b']" from "a['b'].c" Node getExprNode = getVarNode.getFirstChild().getFirstChild(); Preconditions.checkArgument(NodeUtil.isGet(getExprNode)); Node thisVarNode = extractExpression( getExprNode.getFirstChild(), state.extractBeforeStatement); state.extractBeforeStatement = thisVarNode; // Rewrite the CALL expression. Node thisNameNode = thisVarNode.getFirstChild(); Node functionNameNode = getVarNode.getFirstChild(); // CALL // GETPROP // functionName // "call" // thisName // original-parameter1 // original-parameter2 // ... Node newCall = new Node(Token.CALL, new Node(Token.GETPROP, functionNameNode.cloneNode(), Node.newString("call")), thisNameNode.cloneNode(), call.getLineno(), call.getCharno()); // Throw away the call name call.removeFirstChild(); if (call.hasChildren()) { // Add the call parameters to the new call. newCall.addChildrenToBack(call.removeChildren()); } // Replace the call. Node callParent = call.getParent(); callParent.replaceChild(call, newCall); return newCall; } private String tempNamePrefix = "JSCompiler_temp"; /** * Allow the temp name to be overriden to make tests more readable. */ @VisibleForTesting public void setTempNamePrefix(String tempNamePrefix) { this.tempNamePrefix = tempNamePrefix; } /** * Create a unique temp name. */ private String getTempValueName(){ return tempNamePrefix + ContextualRenamer.UNIQUE_ID_SEPARATOR + safeNameIdSupplier.get(); } /** * Create a constant unique temp name. */ private String getTempConstantValueName(){ String name = tempNamePrefix + "_const" + ContextualRenamer.UNIQUE_ID_SEPARATOR + safeNameIdSupplier.get(); this.knownConstants.add(name); return name; } /** * @return For the subExpression, find the nearest statement Node before which * it can be inlined. Null if no such location can be found. */ static Node findInjectionPoint(Node subExpression) { Node expressionRoot = findExpressionRoot(subExpression); Preconditions.checkNotNull(expressionRoot); Node injectionPoint = expressionRoot; Node parent = injectionPoint.getParent(); while (parent.getType() == Token.LABEL) { injectionPoint = parent; parent = injectionPoint.getParent(); } Preconditions.checkState( NodeUtil.isStatementBlock(injectionPoint.getParent())); return injectionPoint; } /** * @return Whether the node is a conditional op. */ private static boolean isConditionalOp(Node n) { switch(n.getType()) { case Token.HOOK: case Token.AND: case Token.OR: return true; default: return false; } } /** * @return The statement containing the expression. null if subExpression * is not contain by in by a Node where inlining is known to be possible. * For example, a WHILE node condition expression. */ static Node findExpressionRoot(Node subExpression) { Node child = subExpression; for (Node parent : child.getAncestors()) { int parentType = parent.getType(); switch (parentType) { // Supported expression roots: // SWITCH and IF can have multiple children, but the CASE, DEFAULT, // or BLOCK will be encountered first for any of the children other // than the condition. case Token.EXPR_RESULT: case Token.IF: case Token.SWITCH: case Token.RETURN: case Token.VAR: Preconditions.checkState(child == parent.getFirstChild()); return parent; // Any of these indicate an unsupported expression: case Token.SCRIPT: case Token.BLOCK: case Token.LABEL: case Token.CASE: case Token.DEFAULT: return null; } child = parent; } throw new IllegalStateException("Unexpected AST structure."); } /** * Determine whether a expression is movable, or can be be made movable be * decomposing the containing expression. * * An subExpression is MOVABLE if it can be replaced with a temporary holding * its results and moved to immediately before the root of the expression. * There are three conditions that must be met for this to occur: * 1) There must be a location to inject a statement for the expression. For * example, this condition can not be met if the expression is a loop * condition or CASE condition. * 2) If the expression can be affect by side-effects, there can not be a * side-effect between original location and the expression root. * 3) If the expression has side-effects, there can not be any other * expression that can be effected between the original location and the * expression root. * * An expression is DECOMPOSABLE if it can be rewritten so that an * subExpression is MOVABLE. * * An expression is decomposed by moving any other sub-expressions that * preventing an subExpression from being MOVABLE. * * @return Whether This is a call that can be moved to an new point in the * AST to allow it to be inlined. */ DecompositionType canExposeExpression(Node subExpression) { Node expressionRoot = findExpressionRoot(subExpression); if (expressionRoot != null) { return isSubexpressionMovable(expressionRoot, subExpression); } return DecompositionType.UNDECOMPOSABLE; } /** * Walk the AST from the call site to the expression root and verify that * the portions of the expression that are evaluated before the call are: * 1) Unaffected by the the side-efects, if any, of the call. * 2) That there are no side-effects, that may influence the call. * * For example, if x has side-effects: * a = 1 + x(); * the call to x can be moved because "a" final value of a can not be * influenced by x(), but in: * a = b + x(); * the call to x can not be moved because the value of b may be modified * by the call to x. * * If x is without side-effects in: * a = b + x(); * the call to x can be moved, but in: * a = (b.foo = c) + x(); * the call to x can not be moved because the value of b.foo may be referenced * by x(). Note: this is true even if b is a local variable; the object that * b refers to may have a global alias. * * @return UNDECOMPOSABLE if the expression can not be moved, DECOMPOSABLE if * decomposition is required before the expression can be moved, otherwise * MOVABLE. */ private DecompositionType isSubexpressionMovable( Node expressionRoot, Node subExpression) { boolean requiresDecomposition = false; boolean seenSideEffects = NodeUtil.mayHaveSideEffects(subExpression); Node child = subExpression; for (Node parent : child.getAncestors()) { if (parent == expressionRoot) { // Done. The walk back to the root of the expression is complete, and // nothing was encountered that blocks the call from being moved. return requiresDecomposition ? DecompositionType.DECOMPOSABLE : DecompositionType.MOVABLE; } int parentType = parent.getType(); if (isConditionalOp(parent)) { // Only the first child is always executed, otherwise it must be // decomposed. if (child != parent.getFirstChild()) { requiresDecomposition = true; } } else { // Only inline the call if none of the preceding siblings in the // expression have side-effects, and are unaffected by the side-effects, // if any, of the call in question. // NOTE: This depends on the siblings being in the same order as they // are evaluated. // SPECIAL CASE: Assignment to a simple name if (isSafeAssign(parent, seenSideEffects)) { // It is always safe to inline "foo()" for expressions such as // "a = b = c = foo();" // As the assignment is unaffected by side effect of "foo()" // and the names assigned-to can not influence the state before // the call to foo. // // This is not true of more complex LHS values, such as // a.x = foo(); // next().x = foo(); // in these cases the checks below are necessary. } else { // Everything else. for (Node n : parent.children()) { if (n == child) { // None of the preceding siblings have side-effects. // This is OK. break; } if (isExpressionTreeUnsafe( n, seenSideEffects)) { seenSideEffects = true; requiresDecomposition = true; } } // In Internet Explorer, DOM objects and other external objects // methods can not be called indirectly, as is required when the // object or its property can be side-effected. For example, // when exposing expression f() (with side-effects) in: x.m(f()) // either the value of x or its property m might have changed, so // both the 'this' value ('x') and the function to be called ('x.m') // need to be preserved. Like so: // var t1 = x, t2 = x.m, t3 = f(); // t2.call(t1, t3); // As IE doesn't support the call to these non-javascript objects // methods in this way. We can't do this. // We don't currently distinguish between these types of objects // in the extern definitions and if we did we would need accurate // type information. // Node first = parent.getFirstChild(); if (requiresDecomposition && parent.getType() == Token.CALL && NodeUtil.isGet(first)) { if (maybeExternMethod(first)) { return DecompositionType.UNDECOMPOSABLE; } else { return DecompositionType.DECOMPOSABLE; } } } } // Continue looking up the expression tree. child = parent; } // With a valid tree we should never get here. throw new IllegalStateException("Unexpected."); } /** * It is always safe to inline "foo()" for expressions such as * "a = b = c = foo();" * As the assignment is unaffected by side effect of "foo()" * and the names assigned-to can not influence the state before * the call to foo. * * It is also safe in cases like where the object is constant: * CONST_NAME.a = foo() * CONST_NAME[CONST_VALUE] = foo(); * * This is not true of more complex LHS values, such as * a.x = foo(); * next().x = foo(); * in these cases the checks below are necessary. * * @param seenSideEffects If true, check to see if node-tree maybe affected by * side-effects, otherwise if the tree has side-effects. @see * isExpressionTreeUnsafe * @return Whether the assignment is safe from side-effects. */ private boolean isSafeAssign(Node n, boolean seenSideEffects) { if (n.getType() == Token.ASSIGN) { Node lhs = n.getFirstChild(); switch (lhs.getType()) { case Token.NAME: return true; case Token.GETPROP: return !isExpressionTreeUnsafe(lhs.getFirstChild(), seenSideEffects); case Token.GETELEM: return !isExpressionTreeUnsafe(lhs.getFirstChild(), seenSideEffects) && !isExpressionTreeUnsafe(lhs.getLastChild(), seenSideEffects); } } return false; } /** * @return Whether anything in the expression tree prevents a call from * being moved. */ private boolean isExpressionTreeUnsafe( Node n, boolean followingSideEffectsExist) { if (followingSideEffectsExist) { // If the call to be inlined has side-effects, check to see if this // expression tree can be affected by any side-effects. // This is a superset of "NodeUtil.mayHaveSideEffects". return NodeUtil.canBeSideEffected(n, this.knownConstants); } else { // The function called doesn't have side-effects but check to see if there // are side-effects that that may affect it. return NodeUtil.mayHaveSideEffects(n); } } }
/* * Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * * (C) Copyright IBM Corp. 1999 All Rights Reserved. * Copyright 1997 The Open Group Research Institute. All rights reserved. */ package sun.security.jgss.spi; import org.ietf.jgss.*; import java.io.InputStream; import java.io.OutputStream; import java.security.Provider; import com.sun.security.jgss.*; /** * This interface is implemented by a mechanism specific instance of a GSS * security context. * A GSSContextSpi object can be thought of having 3 states: * -before initialization * -during initialization with its peer * -after it is established * <p> * The context options can only be requested in state 1. In state 3, * the per message operations are available to the callers. The get * methods for the context options will return the requested options * while in state 1 and 2, and the established values in state 3. * Some mechanisms may allow the access to the per-message operations * and the context flags before the context is fully established. The * isProtReady method is used to indicate that these services are * available. *<p> * <strong> * Context establishment tokens are defined in a mechanism independent * format in section 3.1 of RFC 2743. The GSS-Framework will add * and remove the mechanism independent header portion of this token format * depending on whether a token is received or is being sent. The mechanism * should only generate or expect to read the inner-context token portion.. * <p> * On the other hands, tokens used for per-message calls are generated * entirely by the mechanism. It is possible that the mechanism chooses to * encase inner-level per-message tokens in a header similar to that used * for initial tokens, however, this is upto the mechanism to do. The token * to/from the per-message calls are opaque to the GSS-Framework. * </strong> * <p> * An attempt has been made to allow for reading the peer's tokens from an * InputStream and writing tokens for the peer to an OutputStream. This * allows applications to pass in streams that are obtained from their network * connections and thus minimize the buffer copies that will happen. This * is especially important for tokens generated by wrap() which are * proportional in size to the length of the application data being * wrapped, and are probably also the most frequently used type of tokens. * <p> * It is anticipated that most applications will want to use wrap() in a * fashion where they obtain the application bytes to wrap from a byte[] * but want to output the wrap token straight to an * OutputStream. Similarly, they will want to use unwrap() where they read * the token directly form an InputStream but output it to some byte[] for * the application to process. Unfortunately the high level GSS bindings * do not contain overloaded forms of wrap() and unwrap() that do just * this, however we have accomodated those cases here with the expectation * that this will be rolled into the high level bindings sooner or later. * * @author Mayank Upadhyay */ public interface GSSContextSpi { public Provider getProvider(); // The specification for the following methods mirrors the // specification of the same methods in the GSSContext interface, as // defined in RFC 2853. public void requestLifetime(int lifetime) throws GSSException; public void requestMutualAuth(boolean state) throws GSSException; public void requestReplayDet(boolean state) throws GSSException; public void requestSequenceDet(boolean state) throws GSSException; public void requestCredDeleg(boolean state) throws GSSException; public void requestAnonymity(boolean state) throws GSSException; public void requestConf(boolean state) throws GSSException; public void requestInteg(boolean state) throws GSSException; public void requestDelegPolicy(boolean state) throws GSSException; public void setChannelBinding(ChannelBinding cb) throws GSSException; public boolean getCredDelegState(); public boolean getMutualAuthState(); public boolean getReplayDetState(); public boolean getSequenceDetState(); public boolean getAnonymityState(); public boolean getDelegPolicyState(); public boolean isTransferable() throws GSSException; public boolean isProtReady(); public boolean isInitiator(); public boolean getConfState(); public boolean getIntegState(); public int getLifetime(); public boolean isEstablished(); public GSSNameSpi getSrcName() throws GSSException; public GSSNameSpi getTargName() throws GSSException; public Oid getMech() throws GSSException; public GSSCredentialSpi getDelegCred() throws GSSException; /** * Initiator context establishment call. This method may be * required to be called several times. A CONTINUE_NEEDED return * call indicates that more calls are needed after the next token * is received from the peer. * <p> * This method is called by the GSS-Framework when the application * calls the initSecContext method on the GSSContext implementation * that it has a reference to. * <p> * All overloaded forms of GSSContext.initSecContext() can be handled * with this mechanism level initSecContext. Since the output token * from this method is a fixed size, not exeedingly large, and a one * time deal, an overloaded form that takes an OutputStream has not * been defined. The GSS-Framwork can write the returned byte[] to any * application provided OutputStream. Similarly, any application input * int he form of byte arrays will be wrapped in an input stream by the * GSS-Framework and then passed here. * <p> * <strong> * The GSS-Framework will strip off the leading mechanism independent * GSS-API header. In other words, only the mechanism specific * inner-context token of RFC 2743 section 3.1 will be available on the * InputStream. * </strong> * * @param is contains the inner context token portion of the GSS token * received from the peer. On the first call to initSecContext, there * will be no token hence it will be ignored. * @param mechTokenSize the size of the inner context token as read by * the GSS-Framework from the mechanism independent GSS-API level * header. * @return any inner-context token required to be sent to the peer as * part of a GSS token. The mechanism should not add the mechanism * independent part of the token. The GSS-Framework will add that on * the way out. * @exception GSSException may be thrown */ public byte[] initSecContext(InputStream is, int mechTokenSize) throws GSSException; /** * Acceptor's context establishment call. This method may be * required to be called several times. A CONTINUE_NEEDED return * call indicates that more calls are needed after the next token * is received from the peer. * <p> * This method is called by the GSS-Framework when the application * calls the acceptSecContext method on the GSSContext implementation * that it has a reference to. * <p> * All overloaded forms of GSSContext.acceptSecContext() can be handled * with this mechanism level acceptSecContext. Since the output token * from this method is a fixed size, not exeedingly large, and a one * time deal, an overloaded form that takes an OutputStream has not * been defined. The GSS-Framwork can write the returned byte[] to any * application provided OutputStream. Similarly, any application input * int he form of byte arrays will be wrapped in an input stream by the * GSS-Framework and then passed here. * <p> * <strong> * The GSS-Framework will strip off the leading mechanism independent * GSS-API header. In other words, only the mechanism specific * inner-context token of RFC 2743 section 3.1 will be available on the * InputStream. * </strong> * * @param is contains the inner context token portion of the GSS token * received from the peer. * @param mechTokenSize the size of the inner context token as read by * the GSS-Framework from the mechanism independent GSS-API level * header. * @return any inner-context token required to be sent to the peer as * part of a GSS token. The mechanism should not add the mechanism * independent part of the token. The GSS-Framework will add that on * the way out. * @exception GSSException may be thrown */ public byte[] acceptSecContext(InputStream is, int mechTokenSize) throws GSSException; /** * Queries the context for largest data size to accommodate * the specified protection and for the token to remain less then * maxTokSize. * * @param qop the quality of protection that the context will be * asked to provide. * @param confReq a flag indicating whether confidentiality will be * requested or not * @param outputSize the maximum size of the output token * @return the maximum size for the input message that can be * provided to the wrap() method in order to guarantee that these * requirements are met. * @exception GSSException may be thrown */ public int getWrapSizeLimit(int qop, boolean confReq, int maxTokSize) throws GSSException; /** * Provides per-message token encapsulation. * * @param is the user-provided message to be protected * @param os the token to be sent to the peer. It includes * the message from <i>is</i> with the requested protection. * @param msgPro on input it contains the requested qop and * confidentiality state, on output, the applied values * @exception GSSException may be thrown * @see unwrap */ public void wrap(InputStream is, OutputStream os, MessageProp msgProp) throws GSSException; /** * For apps that want simplicity and don't care about buffer copies. */ public byte[] wrap(byte inBuf[], int offset, int len, MessageProp msgProp) throws GSSException; /** * For apps that care about buffer copies but either cannot use streams * or want to avoid them for whatever reason. (Say, they are using * block ciphers.) * * NOTE: This method is not defined in public class org.ietf.jgss.GSSContext * public int wrap(byte inBuf[], int inOffset, int len, byte[] outBuf, int outOffset, MessageProp msgProp) throws GSSException; */ /** * For apps that want to read from a specific application provided * buffer but want to write directly to the network stream. */ /* * Can be achieved by converting the input buffer to a * ByteInputStream. Provided to keep the API consistent * with unwrap. * * NOTE: This method is not defined in public class org.ietf.jgss.GSSContext * public void wrap(byte inBuf[], int offset, int len, OutputStream os, MessageProp msgProp) throws GSSException; */ /** * Retrieves the message token previously encapsulated in the wrap * call. * * @param is the token from the peer * @param os unprotected message data * @param msgProp will contain the applied qop and confidentiality * of the input token and any informatory status values * @exception GSSException may be thrown * @see wrap */ public void unwrap(InputStream is, OutputStream os, MessageProp msgProp) throws GSSException; /** * For apps that want simplicity and don't care about buffer copies. */ public byte[] unwrap(byte inBuf[], int offset, int len, MessageProp msgProp) throws GSSException; /** * For apps that care about buffer copies but either cannot use streams * or want to avoid them for whatever reason. (Say, they are using * block ciphers.) * * NOTE: This method is not defined in public class org.ietf.jgss.GSSContext * public int unwrap(byte inBuf[], int inOffset, int len, byte[] outBuf, int outOffset, MessageProp msgProp) throws GSSException; */ /** * For apps that care about buffer copies and want to read * straight from the network, but also want the output in a specific * application provided buffer, say to reduce buffer allocation or * subsequent copy. * * NOTE: This method is not defined in public class org.ietf.jgss.GSSContext * public int unwrap(InputStream is, byte[] outBuf, int outOffset, MessageProp msgProp) throws GSSException; */ /** * Applies per-message integrity services. * * @param is the user-provided message * @param os the token to be sent to the peer along with the * message token. The message token <b>is not</b> encapsulated. * @param msgProp on input the desired QOP and output the applied QOP * @exception GSSException */ public void getMIC(InputStream is, OutputStream os, MessageProp msgProp) throws GSSException; public byte[] getMIC(byte []inMsg, int offset, int len, MessageProp msgProp) throws GSSException; /** * Checks the integrity of the supplied tokens. * This token was previously generated by getMIC. * * @param is token generated by getMIC * @param msgStr the message to check integrity for * @param msgProp will contain the applied QOP and confidentiality * states of the token as well as any informatory status codes * @exception GSSException may be thrown */ public void verifyMIC(InputStream is, InputStream msgStr, MessageProp mProp) throws GSSException; public void verifyMIC(byte []inTok, int tokOffset, int tokLen, byte[] inMsg, int msgOffset, int msgLen, MessageProp msgProp) throws GSSException; /** * Produces a token representing this context. After this call * the context will no longer be usable until an import is * performed on the returned token. * * @return exported context token * @exception GSSException may be thrown */ public byte[] export() throws GSSException; /** * Releases context resources and terminates the * context between 2 peer. * * @exception GSSException may be thrown */ public void dispose() throws GSSException; /** * Return the mechanism-specific attribute associated with (@code type}. * * @param type the type of the attribute requested * @return the attribute * @throws GSSException see {@link ExtendedGSSContext#inquireSecContext} * for details */ public Object inquireSecContext(InquireType type) throws GSSException; }
package lemming.data; import org.apache.wicket.extensions.markup.html.repeater.data.table.filter.IFilterStateLocator; import org.apache.wicket.extensions.markup.html.repeater.util.SortParam; import org.apache.wicket.extensions.markup.html.repeater.util.SortableDataProvider; import org.apache.wicket.model.AbstractReadOnlyModel; import org.apache.wicket.model.IModel; import javax.persistence.EntityManager; import javax.persistence.EntityTransaction; import javax.persistence.TypedQuery; import javax.persistence.criteria.*; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; /** * Provides data for data table views. * * @param <T> data type that is provided */ public final class GenericDataProvider<T> extends SortableDataProvider<T, String> implements IFilterStateLocator<T> { /** * The data type that is provided. */ private final Class<T> typeClass; /** * Defindes the default sort order. */ private final SortParam<String> defaultSortParam; /** * The state defined by a filter form. */ private T state; /** * The state defined by a string filter. */ private String filter; /** * The state defined by a filter property. */ private String property; /** * Creates a data provider. * * @param typeClass class type that is provided * @param defaultSortParam default sort param */ public GenericDataProvider(Class<T> typeClass, SortParam<String> defaultSortParam) { this.typeClass = typeClass; this.defaultSortParam = defaultSortParam; } /** * Returns an iterator for a subset of total data. * * @param first first row of data * @param count minimum number of rows retrieved * @return Iterator capable of iterating over row data. */ @Override @SuppressWarnings("unchecked") public Iterator<T> iterator(long first, long count) { EntityManager entityManager = EntityManagerListener.createEntityManager(); CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder(); CriteriaQuery<T> criteriaQuery = criteriaBuilder.createQuery(typeClass); Root<T> root = criteriaQuery.from(typeClass); EntityTransaction transaction = entityManager.getTransaction(); if (getSort() == null) { setSort(defaultSortParam); } Selection<T> selection = getSelection(root); Map<String, Join<?, ?>> joins = CriteriaHelper.getJoins(root, typeClass); Expression<Boolean> restriction = getRestriction(criteriaBuilder, root, joins); List<Order> orderList = getOrder(criteriaBuilder, root, joins); TypedQuery<T> typedQuery = null; if (restriction == null) { typedQuery = entityManager.createQuery(criteriaQuery.select(selection).orderBy(orderList)) .setFirstResult((int) first).setMaxResults((int) count); } else { typedQuery = entityManager.createQuery(criteriaQuery.select(selection).where(restriction) .orderBy(orderList)).setFirstResult((int) first).setMaxResults((int) count); } try { transaction.begin(); Iterator<T> iterator = typedQuery.getResultList().iterator(); transaction.commit(); return iterator; } catch (RuntimeException e) { e.printStackTrace(); if (transaction != null && transaction.isActive()) { transaction.rollback(); } throw e; } finally { entityManager.close(); } } /** * Wraps objects retrieved from an iterator as models. * * @param object object that needs to be wrapped * @return The model representation of an object. */ @Override public IModel<T> model(T object) { return new AbstractReadOnlyModel<T>() { /** * Returns the model object. * * @return The model object. */ public T getObject() { return object; } }; } /** * Returns the total number of items in the collection represented by the DataProvider. * * @return Total number of items. */ @Override public long size() { EntityManager entityManager = EntityManagerListener.createEntityManager(); CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder(); CriteriaQuery<Long> criteriaQuery = criteriaBuilder.createQuery(Long.class); Root<T> root = criteriaQuery.from(typeClass); EntityTransaction transaction = entityManager.getTransaction(); if (getSort() == null) { setSort(defaultSortParam); } Map<String, Join<?, ?>> joins = CriteriaHelper.getJoins(root, typeClass); Expression<Boolean> restriction = getRestriction(criteriaBuilder, root, joins); TypedQuery<Long> typedQuery = null; if (restriction == null) { typedQuery = entityManager.createQuery(criteriaQuery.select(criteriaBuilder.count(root))); } else { typedQuery = entityManager.createQuery(criteriaQuery.select(criteriaBuilder.count(root)) .where(restriction)); } try { transaction.begin(); Long size = typedQuery.getSingleResult(); transaction.commit(); return size; } catch (RuntimeException e) { e.printStackTrace(); if (transaction != null && transaction.isActive()) { transaction.rollback(); } throw e; } finally { entityManager.close(); } } /** * Returns the state defined by a filter form. * * @return A state object. */ @Override public T getFilterState() { return state; } /** * Sets the state defined by a filter form. * * @param state state object */ @Override public void setFilterState(T state) { this.state = state; this.filter = null; this.property = null; } /** * Returns automatically created restrictions for a filter state. * * @param criteriaBuilder contructor for criteria queries * @param root query root referencing entities * @return An expression of type boolean, or null. */ private Expression<Boolean> getFilterStateRestriction(CriteriaBuilder criteriaBuilder, Root<T> root) { if (state != null) { List<Predicate> predicateList = new ArrayList<>(); for (Field field : state.getClass().getDeclaredFields()) { field.setAccessible(true); if (field.getName().equals("serialVersionUID") || field.getName().equals("id") || field.getName().equals("uuid")) { continue; } Object value = null; try { value = field.get(state); } catch (IllegalArgumentException | IllegalAccessException e) { e.printStackTrace(); } if (value != null) { if (value instanceof String) { Expression<String> expression = root.get(field.getName()); String filter = value + "%"; predicateList.add(criteriaBuilder.like(expression, filter)); } else { Expression<String> expression = root.get(field.getName()); predicateList.add(criteriaBuilder.equal(expression, value)); } } } if (!predicateList.isEmpty()) { return criteriaBuilder.or(predicateList.toArray(new Predicate[predicateList.size()])); } } return null; } /** * Returns automatically created restrictions for a filter string. * * @param criteriaBuilder contructor for criteria queries * @param root query root referencing entities * @return An expression of type boolean, or null. */ private Expression<Boolean> getFilterStringRestriction(CriteriaBuilder criteriaBuilder, Root<T> root, Map<String, Join<?, ?>> joins) { if (filter != null && property != null) { return CriteriaHelper.getFilterStringRestriction(criteriaBuilder, root, joins, filter, property, typeClass); } else if (filter != null) { return CriteriaHelper.getFilterStringRestriction(criteriaBuilder, root, joins, filter, null, typeClass); } else { return null; } } /** * Updates the string filter of the DataProvider. * * @param filter string filter */ public void updateFilter(String filter) { this.filter = filter; this.property = null; this.state = null; } /** * Updates the string filter and filter property of the DataProvider. * * @param filter string filter * @param property filter property */ public void updateFilter(String filter, String property) { this.filter = filter; this.property = property; this.state = null; } /** * Return the selection for criteria queries. * * @param root query root referencing entities * @return A selection. */ private Selection<T> getSelection(Root<T> root) { return root; } /** * Returns filter string or filter state restrictions. * * @param criteriaBuilder contructor for criteria queries * @param root query root referencing entities * @return An expression of type boolean, or null. */ private Expression<Boolean> getRestriction(CriteriaBuilder criteriaBuilder, Root<T> root, Map<String, Join<?, ?>> joins) { Expression<Boolean> filterStateRestriction = getFilterStateRestriction(criteriaBuilder, root); Expression<Boolean> filterStringRestriction = getFilterStringRestriction(criteriaBuilder, root, joins); if (filterStateRestriction != null) { return filterStateRestriction; } else if (filterStringRestriction != null) { return filterStringRestriction; } return null; } /** * Returns a list of orders matching sort properties. * * @param criteriaBuilder contructor for criteria queries * @param root query root referencing entities * @param joins map of joins * @return A list of order objects. */ private List<Order> getOrder(CriteriaBuilder criteriaBuilder, Root<T> root, Map<String, Join<?, ?>> joins) { String property = getSort().getProperty(); return CriteriaHelper.getOrder(criteriaBuilder, root, joins, property, getSort().isAscending(), typeClass); } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cloudfront.model; import java.io.Serializable; /** * <p> * Summary of the information about a CloudFront origin access identity. * </p> */ public class CloudFrontOriginAccessIdentitySummary implements Serializable, Cloneable { /** * The ID for the origin access identity. For example: E74FTE3AJFJ256A. */ private String id; /** * The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. */ private String s3CanonicalUserId; /** * The comment for this origin access identity, as originally specified * when created. */ private String comment; /** * Default constructor for a new CloudFrontOriginAccessIdentitySummary object. Callers should use the * setter or fluent setter (with...) methods to initialize this object after creating it. */ public CloudFrontOriginAccessIdentitySummary() {} /** * The ID for the origin access identity. For example: E74FTE3AJFJ256A. * * @return The ID for the origin access identity. For example: E74FTE3AJFJ256A. */ public String getId() { return id; } /** * The ID for the origin access identity. For example: E74FTE3AJFJ256A. * * @param id The ID for the origin access identity. For example: E74FTE3AJFJ256A. */ public void setId(String id) { this.id = id; } /** * The ID for the origin access identity. For example: E74FTE3AJFJ256A. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param id The ID for the origin access identity. For example: E74FTE3AJFJ256A. * * @return A reference to this updated object so that method calls can be chained * together. */ public CloudFrontOriginAccessIdentitySummary withId(String id) { this.id = id; return this; } /** * The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. * * @return The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. */ public String getS3CanonicalUserId() { return s3CanonicalUserId; } /** * The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. * * @param s3CanonicalUserId The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. */ public void setS3CanonicalUserId(String s3CanonicalUserId) { this.s3CanonicalUserId = s3CanonicalUserId; } /** * The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param s3CanonicalUserId The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. * * @return A reference to this updated object so that method calls can be chained * together. */ public CloudFrontOriginAccessIdentitySummary withS3CanonicalUserId(String s3CanonicalUserId) { this.s3CanonicalUserId = s3CanonicalUserId; return this; } /** * The comment for this origin access identity, as originally specified * when created. * * @return The comment for this origin access identity, as originally specified * when created. */ public String getComment() { return comment; } /** * The comment for this origin access identity, as originally specified * when created. * * @param comment The comment for this origin access identity, as originally specified * when created. */ public void setComment(String comment) { this.comment = comment; } /** * The comment for this origin access identity, as originally specified * when created. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param comment The comment for this origin access identity, as originally specified * when created. * * @return A reference to this updated object so that method calls can be chained * together. */ public CloudFrontOriginAccessIdentitySummary withComment(String comment) { this.comment = comment; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getId() != null) sb.append("Id: " + getId() + ","); if (getS3CanonicalUserId() != null) sb.append("S3CanonicalUserId: " + getS3CanonicalUserId() + ","); if (getComment() != null) sb.append("Comment: " + getComment() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode()); hashCode = prime * hashCode + ((getS3CanonicalUserId() == null) ? 0 : getS3CanonicalUserId().hashCode()); hashCode = prime * hashCode + ((getComment() == null) ? 0 : getComment().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CloudFrontOriginAccessIdentitySummary == false) return false; CloudFrontOriginAccessIdentitySummary other = (CloudFrontOriginAccessIdentitySummary)obj; if (other.getId() == null ^ this.getId() == null) return false; if (other.getId() != null && other.getId().equals(this.getId()) == false) return false; if (other.getS3CanonicalUserId() == null ^ this.getS3CanonicalUserId() == null) return false; if (other.getS3CanonicalUserId() != null && other.getS3CanonicalUserId().equals(this.getS3CanonicalUserId()) == false) return false; if (other.getComment() == null ^ this.getComment() == null) return false; if (other.getComment() != null && other.getComment().equals(this.getComment()) == false) return false; return true; } @Override public CloudFrontOriginAccessIdentitySummary clone() { try { return (CloudFrontOriginAccessIdentitySummary) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.yarn; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.GlobalConfiguration; import org.apache.flink.test.testdata.WordCountData; import org.apache.flink.testutils.logging.TestLoggerResource; import org.apache.flink.yarn.cli.FlinkYarnSessionCli; import org.apache.flink.yarn.configuration.YarnConfigOptions; import org.apache.commons.io.FileUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler; import org.junit.After; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.event.Level; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import static org.apache.flink.yarn.util.TestUtils.getTestJarPath; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasItem; import static org.junit.Assert.assertThat; /** * This test starts a MiniYARNCluster with a FIFO scheduler. There are no queues for that scheduler. */ public class YARNSessionFIFOITCase extends YarnTestBase { private static final Logger LOG = LoggerFactory.getLogger(YARNSessionFIFOITCase.class); @Rule public final TestLoggerResource yarTestLoggerResource = new TestLoggerResource(YarnClusterDescriptor.class, Level.WARN); /* Override init with FIFO scheduler. */ @BeforeClass public static void setup() { YARN_CONFIGURATION.setClass( YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class); YARN_CONFIGURATION.setInt(YarnConfiguration.NM_PMEM_MB, 768); YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512); YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-fifo"); startYARNWithConfig(YARN_CONFIGURATION); } @After public void checkForProhibitedLogContents() { ensureNoProhibitedStringInLogFiles(PROHIBITED_STRINGS, WHITELISTED_STRINGS); } @Test(timeout = 60000) public void testDetachedMode() throws Exception { runTest(() -> runDetachedModeTest(Collections.emptyMap())); } /** Test regular operation, including command line parameter parsing. */ void runDetachedModeTest(Map<String, String> securityProperties) throws Exception { runTest( () -> { LOG.info("Starting testDetachedMode()"); File exampleJarLocation = getTestJarPath("StreamingWordCount.jar"); // get temporary file for reading input data for wordcount example File tmpInFile = tmp.newFile(); FileUtils.writeStringToFile(tmpInFile, WordCountData.TEXT); ArrayList<String> args = new ArrayList<>(); args.add("-j"); args.add(flinkUberjar.getAbsolutePath()); args.add("-t"); args.add(flinkLibFolder.getAbsolutePath()); args.add("-jm"); args.add("768m"); args.add("-tm"); args.add("1024m"); if (securityProperties != null) { for (Map.Entry<String, String> property : securityProperties.entrySet()) { args.add("-D" + property.getKey() + "=" + property.getValue()); } } args.add("--name"); args.add("MyCustomName"); args.add("--applicationType"); args.add("Apache Flink 1.x"); args.add("--detached"); Runner clusterRunner = startWithArgs( args.toArray(new String[args.size()]), "JobManager Web Interface:", RunTypes.YARN_SESSION); // before checking any strings outputted by the CLI, first give it time to // return clusterRunner.join(); // actually run a program, otherwise we wouldn't necessarily see any // TaskManagers // be brought up Runner jobRunner = startWithArgs( new String[] { "run", "--detached", exampleJarLocation.getAbsolutePath(), "--input", tmpInFile.getAbsoluteFile().toString() }, "Job has been submitted with JobID", RunTypes.CLI_FRONTEND); jobRunner.join(); // in "new" mode we can only wait after the job is submitted, because TMs // are spun up lazily LOG.info("Waiting until two containers are running"); // wait until two containers are running while (getRunningContainers() < 2) { sleep(500); } // make sure we have two TMs running in either mode long startTime = System.nanoTime(); while (System.nanoTime() - startTime < TimeUnit.NANOSECONDS.convert(10, TimeUnit.SECONDS) && !(verifyStringsInNamedLogFiles( new String[] {"switched from state RUNNING to FINISHED"}, "jobmanager.log"))) { LOG.info("Still waiting for cluster to finish job..."); sleep(500); } LOG.info("Two containers are running. Killing the application"); // kill application "externally". try { YarnClient yc = YarnClient.createYarnClient(); yc.init(YARN_CONFIGURATION); yc.start(); List<ApplicationReport> apps = yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING)); Assert.assertEquals(1, apps.size()); // Only one running ApplicationReport app = apps.get(0); Assert.assertEquals("MyCustomName", app.getName()); Assert.assertEquals("Apache Flink 1.x", app.getApplicationType()); ApplicationId id = app.getApplicationId(); yc.killApplication(id); while (yc.getApplications(EnumSet.of(YarnApplicationState.KILLED)).size() == 0 && yc.getApplications(EnumSet.of(YarnApplicationState.FINISHED)) .size() == 0) { sleep(500); } } catch (Throwable t) { LOG.warn("Killing failed", t); Assert.fail(); } finally { // cleanup the yarn-properties file String confDirPath = System.getenv("FLINK_CONF_DIR"); File configDirectory = new File(confDirPath); LOG.info( "testDetachedPerJobYarnClusterInternal: Using configuration directory " + configDirectory.getAbsolutePath()); // load the configuration LOG.info( "testDetachedPerJobYarnClusterInternal: Trying to load configuration file"); Configuration configuration = GlobalConfiguration.loadConfiguration( configDirectory.getAbsolutePath()); try { File yarnPropertiesFile = FlinkYarnSessionCli.getYarnPropertiesLocation( configuration.getString( YarnConfigOptions.PROPERTIES_FILE_LOCATION)); if (yarnPropertiesFile.exists()) { LOG.info( "testDetachedPerJobYarnClusterInternal: Cleaning up temporary Yarn address reference: {}", yarnPropertiesFile.getAbsolutePath()); yarnPropertiesFile.delete(); } } catch (Exception e) { LOG.warn( "testDetachedPerJobYarnClusterInternal: Exception while deleting the JobManager address file", e); } } LOG.info("Finished testDetachedMode()"); }); } /** * Test querying the YARN cluster. * * <p>This test validates through 666*2 cores in the "cluster". */ @Test public void testQueryCluster() throws Exception { runTest( () -> { LOG.info("Starting testQueryCluster()"); runWithArgs( new String[] {"-q"}, "Summary: totalMemory 8192 totalCores 1332", null, RunTypes.YARN_SESSION, 0); // we have 666*2 cores. LOG.info("Finished testQueryCluster()"); }); } /** * The test cluster has the following resources: - 2 Nodes with 4096 MB each. - * RM_SCHEDULER_MINIMUM_ALLOCATION_MB is 512 * * <p>We allocate: 1 JobManager with 256 MB (will be automatically upgraded to 512 due to min * alloc mb) 5 TaskManagers with 1585 MB * * <p>user sees a total request of: 8181 MB (fits) system sees a total request of: 8437 (doesn't * fit due to min alloc mb) */ @Ignore("The test is too resource consuming (8.5 GB of memory)") @Test public void testResourceComputation() throws Exception { runTest( () -> { LOG.info("Starting testResourceComputation()"); runWithArgs( new String[] { "-j", flinkUberjar.getAbsolutePath(), "-t", flinkLibFolder.getAbsolutePath(), "-jm", "256m", "-tm", "1585m" }, "Number of connected TaskManagers changed to", null, RunTypes.YARN_SESSION, 0); LOG.info("Finished testResourceComputation()"); assertThat( yarTestLoggerResource.getMessages(), hasItem( containsString( "This YARN session requires 8437MB of memory in the cluster. There are currently only 8192MB available."))); }); } /** * The test cluster has the following resources: - 2 Nodes with 4096 MB each. - * RM_SCHEDULER_MINIMUM_ALLOCATION_MB is 512 * * <p>We allocate: 1 JobManager with 256 MB (will be automatically upgraded to 512 due to min * alloc mb) 2 TaskManagers with 3840 MB * * <p>the user sees a total request of: 7936 MB (fits) the system sees a request of: 8192 MB * (fits) HOWEVER: one machine is going to need 3840 + 512 = 4352 MB, which doesn't fit. * * <p>--> check if the system properly rejects allocating this session. */ @Ignore("The test is too resource consuming (8 GB of memory)") @Test public void testfullAlloc() throws Exception { runTest( () -> { LOG.info("Starting testfullAlloc()"); runWithArgs( new String[] { "-j", flinkUberjar.getAbsolutePath(), "-t", flinkLibFolder.getAbsolutePath(), "-jm", "256m", "-tm", "3840m" }, "Number of connected TaskManagers changed to", null, RunTypes.YARN_SESSION, 0); LOG.info("Finished testfullAlloc()"); assertThat( yarTestLoggerResource.getMessages(), hasItem( containsString( "There is not enough memory available in the YARN cluster. The TaskManager(s) require 3840MB each. NodeManagers available: [4096, 4096]\n" + "After allocating the JobManager (512MB) and (1/2) TaskManagers, the following NodeManagers are available: [3584, 256]"))); }); } }
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.scheduling; import com.thoughtworks.go.config.CaseInsensitiveString; import com.thoughtworks.go.config.PipelineConfig; import com.thoughtworks.go.config.materials.MaterialConfigs; import com.thoughtworks.go.config.materials.Materials; import com.thoughtworks.go.config.materials.dependency.DependencyMaterial; import com.thoughtworks.go.config.remote.ConfigOrigin; import com.thoughtworks.go.config.remote.RepoConfigOrigin; import com.thoughtworks.go.domain.EnvironmentVariables; import com.thoughtworks.go.domain.MaterialRevisions; import com.thoughtworks.go.domain.buildcause.BuildCause; import com.thoughtworks.go.domain.materials.Material; import com.thoughtworks.go.domain.materials.MaterialConfig; import com.thoughtworks.go.server.domain.Username; import com.thoughtworks.go.server.materials.*; import com.thoughtworks.go.server.perf.SchedulingPerformanceLogger; import com.thoughtworks.go.server.persistence.MaterialRepository; import com.thoughtworks.go.server.service.*; import com.thoughtworks.go.server.service.result.OperationResult; import com.thoughtworks.go.server.service.result.ServerHealthStateOperationResult; import com.thoughtworks.go.serverhealth.HealthStateScope; import com.thoughtworks.go.serverhealth.HealthStateType; import com.thoughtworks.go.serverhealth.ServerHealthService; import com.thoughtworks.go.serverhealth.ServerHealthState; import com.thoughtworks.go.util.SystemEnvironment; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.HashMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static java.lang.String.format; @Service public class BuildCauseProducerService { private static final Logger LOGGER = LoggerFactory.getLogger(BuildCauseProducerService.class); private SchedulingCheckerService schedulingChecker; private ServerHealthService serverHealthService; private PipelineScheduleQueue pipelineScheduleQueue; private GoConfigService goConfigService; private MaterialChecker materialChecker; private MaterialUpdateStatusNotifier materialUpdateStatusNotifier; private final MaterialUpdateService materialUpdateService; private final SpecificMaterialRevisionFactory specificMaterialRevisionFactory; private final PipelineService pipelineService; private TriggerMonitor triggerMonitor; private final SystemEnvironment systemEnvironment; private final MaterialConfigConverter materialConfigConverter; private final MaterialExpansionService materialExpansionService; private SchedulingPerformanceLogger schedulingPerformanceLogger; @Autowired public BuildCauseProducerService( SchedulingCheckerService schedulingChecker, ServerHealthService serverHealthService, PipelineScheduleQueue pipelineScheduleQueue, GoConfigService goConfigService, MaterialRepository materialRepository, MaterialUpdateStatusNotifier materialUpdateStatusNotifier, MaterialUpdateService materialUpdateService, SpecificMaterialRevisionFactory specificMaterialRevisionFactory, TriggerMonitor triggerMonitor, PipelineService pipelineService, SystemEnvironment systemEnvironment, MaterialConfigConverter materialConfigConverter, MaterialExpansionService materialExpansionService, SchedulingPerformanceLogger schedulingPerformanceLogger) { this.schedulingChecker = schedulingChecker; this.serverHealthService = serverHealthService; this.pipelineScheduleQueue = pipelineScheduleQueue; this.goConfigService = goConfigService; this.materialUpdateStatusNotifier = materialUpdateStatusNotifier; this.materialUpdateService = materialUpdateService; this.specificMaterialRevisionFactory = specificMaterialRevisionFactory; this.pipelineService = pipelineService; this.systemEnvironment = systemEnvironment; this.materialConfigConverter = materialConfigConverter; this.materialExpansionService = materialExpansionService; this.schedulingPerformanceLogger = schedulingPerformanceLogger; this.materialChecker = new MaterialChecker(materialRepository); this.triggerMonitor = triggerMonitor; } public void autoSchedulePipeline(String pipelineName, OperationResult result, long trackingId) { schedulingPerformanceLogger.autoSchedulePipelineStart(trackingId, pipelineName); try { PipelineConfig pipelineConfig = goConfigService.pipelineConfigNamed(new CaseInsensitiveString(pipelineName)); newProduceBuildCause(pipelineConfig, new AutoBuild(goConfigService, pipelineService, pipelineName, systemEnvironment, materialChecker), result, trackingId); } finally { schedulingPerformanceLogger.autoSchedulePipelineFinish(trackingId, pipelineName); } } public void manualSchedulePipeline(Username username, CaseInsensitiveString pipelineName, ScheduleOptions scheduleOptions, OperationResult result) { long trackingId = schedulingPerformanceLogger.manualSchedulePipelineStart(pipelineName.toString()); try { WaitForPipelineMaterialUpdate update = new WaitForPipelineMaterialUpdate(pipelineName, new ManualBuild(username), scheduleOptions); update.start(result); } finally { schedulingPerformanceLogger.manualSchedulePipelineFinish(trackingId, pipelineName.toString()); } } public void timerSchedulePipeline(PipelineConfig pipelineConfig, ServerHealthStateOperationResult result) { long trackingId = schedulingPerformanceLogger.timerSchedulePipelineStart(CaseInsensitiveString.str(pipelineConfig.name())); try { newProduceBuildCause(pipelineConfig, new TimedBuild(), result, trackingId); } finally { schedulingPerformanceLogger.timerSchedulePipelineFinish(trackingId, CaseInsensitiveString.str(pipelineConfig.name())); } } boolean markPipelineAsAlreadyTriggered(PipelineConfig pipelineConfig) { return triggerMonitor.markPipelineAsAlreadyTriggered(pipelineConfig); } void markPipelineAsCanBeTriggered(PipelineConfig pipelineConfig) { triggerMonitor.markPipelineAsCanBeTriggered(pipelineConfig); } ServerHealthState newProduceBuildCause(PipelineConfig pipelineConfig, BuildType buildType, OperationResult result, long trackingId) { final HashMap<String, String> stringStringHashMap = new HashMap<>(); final HashMap<String, String> stringStringHashMap1 = new HashMap<>(); return newProduceBuildCause(pipelineConfig, buildType, new ScheduleOptions(stringStringHashMap, stringStringHashMap1, new HashMap<>()), result, trackingId); } ServerHealthState newProduceBuildCause(PipelineConfig pipelineConfig, BuildType buildType, ScheduleOptions scheduleOptions, OperationResult result, long trackingId) { buildType.canProduce(pipelineConfig, schedulingChecker, serverHealthService, result); if (!result.canContinue()) { return result.getServerHealthState(); } String pipelineName = CaseInsensitiveString.str(pipelineConfig.name()); LOGGER.debug("start producing build cause:{}", pipelineName); try { MaterialRevisions peggedRevisions = specificMaterialRevisionFactory.create(pipelineName, scheduleOptions.getSpecifiedRevisions()); BuildCause previousBuild = pipelineScheduleQueue.mostRecentScheduled(pipelineConfig.name()); Materials materials = materialConfigConverter.toMaterials(pipelineConfig.materialConfigs()); MaterialConfigs expandedMaterialConfigs = materialExpansionService.expandMaterialConfigsForScheduling(pipelineConfig.materialConfigs()); Materials expandedMaterials = materialConfigConverter.toMaterials(expandedMaterialConfigs); BuildCause buildCause = null; boolean materialConfigurationChanged = hasConfigChanged(previousBuild, expandedMaterials); if (previousBuild.hasNeverRun() || materialConfigurationChanged) { LOGGER.debug("Using latest modifications from repository for {}", pipelineConfig.name()); MaterialRevisions revisions = materialChecker.findLatestRevisions(peggedRevisions, materials); if (!revisions.isMissingModifications()) { buildCause = buildType.onModifications(revisions, materialConfigurationChanged, null); if (buildCause != null) { if (!buildCause.materialsMatch(expandedMaterialConfigs)) { LOGGER.warn("Error while scheduling pipeline: {}. Possible Reasons: (1) Upstream pipelines have not been built yet. (2) Materials do not match between configuration and build-cause.", pipelineName); return ServerHealthState.success(HealthStateType.general(HealthStateScope.forPipeline(pipelineName))); } } } } else { LOGGER.debug("Checking if materials are different for {}", pipelineConfig.name()); MaterialRevisions latestRevisions = materialChecker.findLatestRevisions(peggedRevisions, materials); if (!latestRevisions.isMissingModifications()) { MaterialRevisions original = previousBuild.getMaterialRevisions(); MaterialRevisions revisions = materialChecker.findRevisionsSince(peggedRevisions, expandedMaterials, original, latestRevisions); if (!revisions.hasChangedSince(original) || (buildType.shouldCheckWhetherOlderRunsHaveRunWithLatestMaterials() && materialChecker.hasPipelineEverRunWith(pipelineName, latestRevisions))) { LOGGER.debug("Repository for [{}] not modified", pipelineName); buildCause = buildType.onEmptyModifications(pipelineConfig, latestRevisions); } else { LOGGER.debug("Repository for [{}] modified; scheduling...", pipelineName); buildCause = buildType.onModifications(revisions, materialConfigurationChanged, original); } } } if (buildCause != null) { buildCause.addOverriddenVariables(EnvironmentVariables.toEnvironmentVariables(scheduleOptions.getVariables())); updateChangedRevisions(pipelineConfig.name(), buildCause); } if (isGoodReasonToSchedule(pipelineConfig, buildCause, buildType, materialConfigurationChanged)) { pipelineScheduleQueue.schedule(pipelineConfig.name(), buildCause); schedulingPerformanceLogger.sendingPipelineToTheToBeScheduledQueue(trackingId, pipelineName); LOGGER.debug("scheduling pipeline {} with build-cause {}; config origin {}", pipelineName, buildCause, pipelineConfig.getOrigin()); } else { buildType.notifyPipelineNotScheduled(pipelineConfig); } serverHealthService.removeByScope(HealthStateScope.forPipeline(pipelineName)); LOGGER.debug("finished producing buildcause for {}", pipelineName); return ServerHealthState.success(HealthStateType.general(HealthStateScope.forPipeline(pipelineName))); } catch (NoCompatibleUpstreamRevisionsException ncure) { String message = "Error while scheduling pipeline: " + pipelineName + " as no compatible revisions were identified."; LOGGER.debug(message, ncure); return showError(pipelineName, message, ncure.getMessage()); } catch (NoModificationsPresentForDependentMaterialException e) { LOGGER.error(e.getMessage(), e); return ServerHealthState.success(HealthStateType.general(HealthStateScope.forPipeline(pipelineName))); } catch (Exception e) { String message = "Error while scheduling pipeline: " + pipelineName; LOGGER.error(message, e); result.unprocessibleEntity(message, e.getMessage() != null ? e.getMessage() : message, HealthStateType.general(HealthStateScope.forPipeline(pipelineName))); return showError(pipelineName, message, e.getMessage()); } } private boolean isConfigOriginSameAsUpstream(PipelineConfig pipelineConfig, BuildCause buildCause) { if (buildCause.hasDependencyMaterials()) { for(DependencyMaterial material : buildCause.getDependencyMaterials()) { PipelineConfig upstreamConfig = goConfigService.pipelineConfigNamed(material.getPipelineName()); if (pipelineConfig.hasSameConfigOrigin(upstreamConfig)) { return true; } } } return false; } private boolean isGoodReasonToSchedule(PipelineConfig pipelineConfig, BuildCause buildCause, BuildType buildType, boolean materialConfigurationChanged) { if (buildCause == null) return false; boolean validCause = buildType.isValidBuildCause(pipelineConfig, buildCause); if (pipelineConfig.isConfigOriginSameAsOneOfMaterials()) { if (buildCause.isForced()) { // build is manual - skip scm-config consistency return validCause; } if (this.isConfigOriginSameAsUpstream(pipelineConfig, buildCause)) { // configuration is up to date - skip scm-config consistency check return validCause; } // then we need config and material revisions to be consistent if (!buildCause.pipelineConfigAndMaterialRevisionMatch(pipelineConfig)) { return false; } return validCause; } else { return materialConfigurationChanged || validCause; } } private void updateChangedRevisions(CaseInsensitiveString pipelineName, BuildCause buildCause) { materialChecker.updateChangedRevisions(pipelineName, buildCause); } private ServerHealthState showError(String pipelineName, String message, String desc) { if (desc == null) { desc = "Details not available, please check server logs."; } ServerHealthState serverHealthState = ServerHealthState.error(message, desc, HealthStateType.general(HealthStateScope.forPipeline(pipelineName))); serverHealthService.update(serverHealthState); return serverHealthState; } private boolean hasConfigChanged(BuildCause previous, Materials materials) { return !materials.equals(previous.getMaterialRevisions().getMaterials()); } private class WaitForPipelineMaterialUpdate implements MaterialUpdateStatusListener { private PipelineConfig pipelineConfig; private final BuildType buildType; private final ConcurrentMap<String, Material> pendingMaterials; private Material configMaterial; private boolean failed; private ScheduleOptions scheduleOptions; private WaitForPipelineMaterialUpdate(CaseInsensitiveString pipelineName, BuildType buildType, ScheduleOptions scheduleOptions) { this.pipelineConfig = goConfigService.pipelineConfigNamed(pipelineName); this.buildType = buildType; this.scheduleOptions = scheduleOptions; pendingMaterials = new ConcurrentHashMap<>(); if (this.pipelineConfig.isConfigDefinedRemotely()) { // Then we must update config first and then continue as usual. // it is also possible that config will disappear at update RepoConfigOrigin configRepo = (RepoConfigOrigin) this.pipelineConfig.getOrigin(); MaterialConfig materialConfig = configRepo.getMaterial(); configMaterial = materialConfigConverter.toMaterial(materialConfig); pendingMaterials.putIfAbsent(materialConfig.getFingerprint(), configMaterial); } if (scheduleOptions.shouldPerformMDUBeforeScheduling()) { for (MaterialConfig materialConfig : pipelineConfig.materialConfigs()) { pendingMaterials.putIfAbsent(materialConfig.getFingerprint(), materialConfigConverter.toMaterial(materialConfig)); } } } public void start(OperationResult result) { try { buildType.canProduce(pipelineConfig, schedulingChecker, serverHealthService, result); if (!result.canContinue()) { return; } if (!markPipelineAsAlreadyTriggered(pipelineConfig)) { result.conflict("Failed to force pipeline: " + pipelineConfig.name(), "Pipeline already forced", HealthStateType.general(HealthStateScope.forPipeline(CaseInsensitiveString.str(pipelineConfig.name())))); return; } materialUpdateStatusNotifier.registerListenerFor(pipelineConfig, this); if (pendingMaterials.isEmpty()) { produceBuildCauseForPipeline(result, 0); if (!result.canContinue()) { return; } } else { for (Material material : pendingMaterials.values()) { materialUpdateService.updateMaterial(material); } } result.accepted(format("Request to schedule pipeline %s accepted", pipelineConfig.name()), "", HealthStateType.general(HealthStateScope.forPipeline( CaseInsensitiveString.str(pipelineConfig.name())))); } catch (RuntimeException e) { markPipelineAsCanBeTriggered(pipelineConfig); materialUpdateStatusNotifier.removeListenerFor(pipelineConfig); throw e; } } @Override public void onMaterialUpdate(MaterialUpdateCompletedMessage message) { Material material = message.getMaterial(); if (message instanceof MaterialUpdateFailedMessage) { String failureReason = ((MaterialUpdateFailedMessage) message).getReason(); LOGGER.error("not scheduling pipeline {} after manual-trigger because update of material failed with reason {}", pipelineConfig.name(), failureReason); showError(CaseInsensitiveString.str(pipelineConfig.name()), format("Could not trigger pipeline '%s'", pipelineConfig.name()), format("Material update failed for material '%s' because: %s", material.getDisplayName(), failureReason)); failed = true; } else if (this.configMaterial != null && material.isSameFlyweight(this.configMaterial)) { // Then we have just updated configuration material. // A chance to refresh our config instance. // This does not guarantee that this config is from newest revision: // - it might have been invalid // then this instance is still like last time. // We have protection (higher) against that so this will eventually not schedule if (!goConfigService.hasPipelineNamed(this.pipelineConfig.name())) { // pipeline we just triggered got removed from configuration LOGGER.error("not scheduling pipeline {} after manual-trigger because pipeline's {} configuration was removed from origin repository", pipelineConfig.name(), pipelineConfig.name()); showError(CaseInsensitiveString.str(pipelineConfig.name()), format("Could not trigger pipeline '%s'", pipelineConfig.name()), format("Pipeline '%s' configuration has been removed from %s", pipelineConfig.name(), configMaterial.getDisplayName())); failed = true; } else { //TODO #1133 we could also check if last parsing in the origin repository failed PipelineConfig newPipelineConfig = goConfigService.pipelineConfigNamed(this.pipelineConfig.name()); ConfigOrigin oldOrigin = this.pipelineConfig.getOrigin(); ConfigOrigin newOrigin = newPipelineConfig.getOrigin(); if (!oldOrigin.equals(newOrigin)) { LOGGER.debug("Configuration of manually-triggered pipeline {} has been updated.", pipelineConfig.name()); // if all seems good: // In case materials have changed, we should poll new ones as well for (MaterialConfig materialConfig : newPipelineConfig.materialConfigs()) { if (!this.pipelineConfig.materialConfigs().hasMaterialWithFingerprint(materialConfig)) { // this is a material added in recent commit, it wasn't in previous config // wait for it Material newMaterial = materialConfigConverter.toMaterial(materialConfig); pendingMaterials.putIfAbsent(materialConfig.getFingerprint(), newMaterial); // and force update of it materialUpdateService.updateMaterial(newMaterial); LOGGER.info("new material {} in {} was added after manual-trigger. Scheduled update for it.", newMaterial.getDisplayName(), pipelineConfig.name()); } } this.pipelineConfig = newPipelineConfig; } } } pendingMaterials.remove(material.getFingerprint()); if (pendingMaterials.isEmpty()) { produceBuildCauseForPipeline(new ServerHealthStateOperationResult(), message.trackingId()); } } private void produceBuildCauseForPipeline(OperationResult result, long trackingId) { materialUpdateStatusNotifier.removeListenerFor(pipelineConfig); markPipelineAsCanBeTriggered(pipelineConfig); if (!failed) { newProduceBuildCause(pipelineConfig, buildType, scheduleOptions, result, trackingId); } } @Override public boolean isListeningFor(Material material) { return pendingMaterials.containsKey(material.getFingerprint()); } } }
package org.docksidestage.hangar.dbflute.cbean.cq.bs; import java.util.Map; import org.dbflute.cbean.*; import org.dbflute.cbean.chelper.*; import org.dbflute.cbean.coption.*; import org.dbflute.cbean.cvalue.ConditionValue; import org.dbflute.cbean.sqlclause.SqlClause; import org.dbflute.exception.IllegalConditionBeanOperationException; import org.docksidestage.hangar.dbflute.cbean.cq.ciq.*; import org.docksidestage.hangar.dbflute.cbean.*; import org.docksidestage.hangar.dbflute.cbean.cq.*; /** * The base condition-query of WHITE_SELF_REFERENCE. * @author DBFlute(AutoGenerator) */ public class BsWhiteSelfReferenceCQ extends AbstractBsWhiteSelfReferenceCQ { // =================================================================================== // Attribute // ========= protected WhiteSelfReferenceCIQ _inlineQuery; // =================================================================================== // Constructor // =========== public BsWhiteSelfReferenceCQ(ConditionQuery referrerQuery, SqlClause sqlClause, String aliasName, int nestLevel) { super(referrerQuery, sqlClause, aliasName, nestLevel); } // =================================================================================== // InlineView/OrClause // =================== /** * Prepare InlineView query. <br> * {select ... from ... left outer join (select * from WHITE_SELF_REFERENCE) where FOO = [value] ...} * <pre> * cb.query().queryMemberStatus().<span style="color: #CC4747">inline()</span>.setFoo...; * </pre> * @return The condition-query for InlineView query. (NotNull) */ public WhiteSelfReferenceCIQ inline() { if (_inlineQuery == null) { _inlineQuery = xcreateCIQ(); } _inlineQuery.xsetOnClause(false); return _inlineQuery; } protected WhiteSelfReferenceCIQ xcreateCIQ() { WhiteSelfReferenceCIQ ciq = xnewCIQ(); ciq.xsetBaseCB(_baseCB); return ciq; } protected WhiteSelfReferenceCIQ xnewCIQ() { return new WhiteSelfReferenceCIQ(xgetReferrerQuery(), xgetSqlClause(), xgetAliasName(), xgetNestLevel(), this); } /** * Prepare OnClause query. <br> * {select ... from ... left outer join WHITE_SELF_REFERENCE on ... and FOO = [value] ...} * <pre> * cb.query().queryMemberStatus().<span style="color: #CC4747">on()</span>.setFoo...; * </pre> * @return The condition-query for OnClause query. (NotNull) * @throws IllegalConditionBeanOperationException When this condition-query is base query. */ public WhiteSelfReferenceCIQ on() { if (isBaseQuery()) { throw new IllegalConditionBeanOperationException("OnClause for local table is unavailable!"); } WhiteSelfReferenceCIQ inlineQuery = inline(); inlineQuery.xsetOnClause(true); return inlineQuery; } // =================================================================================== // Query // ===== protected ConditionValue _selfReferenceId; public ConditionValue xdfgetSelfReferenceId() { if (_selfReferenceId == null) { _selfReferenceId = nCV(); } return _selfReferenceId; } protected ConditionValue xgetCValueSelfReferenceId() { return xdfgetSelfReferenceId(); } public Map<String, WhiteSelfReferenceCQ> xdfgetSelfReferenceId_ExistsReferrer_WhiteSelfReferenceSelfList() { return xgetSQueMap("selfReferenceId_ExistsReferrer_WhiteSelfReferenceSelfList"); } public String keepSelfReferenceId_ExistsReferrer_WhiteSelfReferenceSelfList(WhiteSelfReferenceCQ sq) { return xkeepSQue("selfReferenceId_ExistsReferrer_WhiteSelfReferenceSelfList", sq); } public Map<String, WhiteSelfReferenceCQ> xdfgetSelfReferenceId_NotExistsReferrer_WhiteSelfReferenceSelfList() { return xgetSQueMap("selfReferenceId_NotExistsReferrer_WhiteSelfReferenceSelfList"); } public String keepSelfReferenceId_NotExistsReferrer_WhiteSelfReferenceSelfList(WhiteSelfReferenceCQ sq) { return xkeepSQue("selfReferenceId_NotExistsReferrer_WhiteSelfReferenceSelfList", sq); } public Map<String, WhiteSelfReferenceCQ> xdfgetSelfReferenceId_SpecifyDerivedReferrer_WhiteSelfReferenceSelfList() { return xgetSQueMap("selfReferenceId_SpecifyDerivedReferrer_WhiteSelfReferenceSelfList"); } public String keepSelfReferenceId_SpecifyDerivedReferrer_WhiteSelfReferenceSelfList(WhiteSelfReferenceCQ sq) { return xkeepSQue("selfReferenceId_SpecifyDerivedReferrer_WhiteSelfReferenceSelfList", sq); } public Map<String, WhiteSelfReferenceCQ> xdfgetSelfReferenceId_QueryDerivedReferrer_WhiteSelfReferenceSelfList() { return xgetSQueMap("selfReferenceId_QueryDerivedReferrer_WhiteSelfReferenceSelfList"); } public String keepSelfReferenceId_QueryDerivedReferrer_WhiteSelfReferenceSelfList(WhiteSelfReferenceCQ sq) { return xkeepSQue("selfReferenceId_QueryDerivedReferrer_WhiteSelfReferenceSelfList", sq); } public Map<String, Object> xdfgetSelfReferenceId_QueryDerivedReferrer_WhiteSelfReferenceSelfListParameter() { return xgetSQuePmMap("selfReferenceId_QueryDerivedReferrer_WhiteSelfReferenceSelfList"); } public String keepSelfReferenceId_QueryDerivedReferrer_WhiteSelfReferenceSelfListParameter(Object pm) { return xkeepSQuePm("selfReferenceId_QueryDerivedReferrer_WhiteSelfReferenceSelfList", pm); } /** * Add order-by as ascend. <br> * SELF_REFERENCE_ID: {PK, NotNull, INTEGER(10)} * @return this. (NotNull) */ public BsWhiteSelfReferenceCQ addOrderBy_SelfReferenceId_Asc() { regOBA("SELF_REFERENCE_ID"); return this; } /** * Add order-by as descend. <br> * SELF_REFERENCE_ID: {PK, NotNull, INTEGER(10)} * @return this. (NotNull) */ public BsWhiteSelfReferenceCQ addOrderBy_SelfReferenceId_Desc() { regOBD("SELF_REFERENCE_ID"); return this; } protected ConditionValue _selfReferenceName; public ConditionValue xdfgetSelfReferenceName() { if (_selfReferenceName == null) { _selfReferenceName = nCV(); } return _selfReferenceName; } protected ConditionValue xgetCValueSelfReferenceName() { return xdfgetSelfReferenceName(); } /** * Add order-by as ascend. <br> * SELF_REFERENCE_NAME: {NotNull, VARCHAR(200)} * @return this. (NotNull) */ public BsWhiteSelfReferenceCQ addOrderBy_SelfReferenceName_Asc() { regOBA("SELF_REFERENCE_NAME"); return this; } /** * Add order-by as descend. <br> * SELF_REFERENCE_NAME: {NotNull, VARCHAR(200)} * @return this. (NotNull) */ public BsWhiteSelfReferenceCQ addOrderBy_SelfReferenceName_Desc() { regOBD("SELF_REFERENCE_NAME"); return this; } protected ConditionValue _parentId; public ConditionValue xdfgetParentId() { if (_parentId == null) { _parentId = nCV(); } return _parentId; } protected ConditionValue xgetCValueParentId() { return xdfgetParentId(); } /** * Add order-by as ascend. <br> * PARENT_ID: {IX, INTEGER(10), FK to WHITE_SELF_REFERENCE} * @return this. (NotNull) */ public BsWhiteSelfReferenceCQ addOrderBy_ParentId_Asc() { regOBA("PARENT_ID"); return this; } /** * Add order-by as descend. <br> * PARENT_ID: {IX, INTEGER(10), FK to WHITE_SELF_REFERENCE} * @return this. (NotNull) */ public BsWhiteSelfReferenceCQ addOrderBy_ParentId_Desc() { regOBD("PARENT_ID"); return this; } // =================================================================================== // SpecifiedDerivedOrderBy // ======================= /** * Add order-by for specified derived column as ascend. * <pre> * cb.specify().derivedPurchaseList().max(new SubQuery&lt;PurchaseCB&gt;() { * public void query(PurchaseCB subCB) { * subCB.specify().columnPurchaseDatetime(); * } * }, <span style="color: #CC4747">aliasName</span>); * <span style="color: #3F7E5E">// order by [alias-name] asc</span> * cb.<span style="color: #CC4747">addSpecifiedDerivedOrderBy_Asc</span>(<span style="color: #CC4747">aliasName</span>); * </pre> * @param aliasName The alias name specified at (Specify)DerivedReferrer. (NotNull) * @return this. (NotNull) */ public BsWhiteSelfReferenceCQ addSpecifiedDerivedOrderBy_Asc(String aliasName) { registerSpecifiedDerivedOrderBy_Asc(aliasName); return this; } /** * Add order-by for specified derived column as descend. * <pre> * cb.specify().derivedPurchaseList().max(new SubQuery&lt;PurchaseCB&gt;() { * public void query(PurchaseCB subCB) { * subCB.specify().columnPurchaseDatetime(); * } * }, <span style="color: #CC4747">aliasName</span>); * <span style="color: #3F7E5E">// order by [alias-name] desc</span> * cb.<span style="color: #CC4747">addSpecifiedDerivedOrderBy_Desc</span>(<span style="color: #CC4747">aliasName</span>); * </pre> * @param aliasName The alias name specified at (Specify)DerivedReferrer. (NotNull) * @return this. (NotNull) */ public BsWhiteSelfReferenceCQ addSpecifiedDerivedOrderBy_Desc(String aliasName) { registerSpecifiedDerivedOrderBy_Desc(aliasName); return this; } // =================================================================================== // Union Query // =========== public void reflectRelationOnUnionQuery(ConditionQuery bqs, ConditionQuery uqs) { WhiteSelfReferenceCQ bq = (WhiteSelfReferenceCQ)bqs; WhiteSelfReferenceCQ uq = (WhiteSelfReferenceCQ)uqs; if (bq.hasConditionQueryWhiteSelfReferenceSelf()) { uq.queryWhiteSelfReferenceSelf().reflectRelationOnUnionQuery(bq.queryWhiteSelfReferenceSelf(), uq.queryWhiteSelfReferenceSelf()); } } // =================================================================================== // Foreign Query // ============= /** * Get the condition-query for relation table. <br> * WHITE_SELF_REFERENCE by my PARENT_ID, named 'whiteSelfReferenceSelf'. * @return The instance of condition-query. (NotNull) */ public WhiteSelfReferenceCQ queryWhiteSelfReferenceSelf() { return xdfgetConditionQueryWhiteSelfReferenceSelf(); } public WhiteSelfReferenceCQ xdfgetConditionQueryWhiteSelfReferenceSelf() { String prop = "whiteSelfReferenceSelf"; if (!xhasQueRlMap(prop)) { xregQueRl(prop, xcreateQueryWhiteSelfReferenceSelf()); xsetupOuterJoinWhiteSelfReferenceSelf(); } return xgetQueRlMap(prop); } protected WhiteSelfReferenceCQ xcreateQueryWhiteSelfReferenceSelf() { String nrp = xresolveNRP("WHITE_SELF_REFERENCE", "whiteSelfReferenceSelf"); String jan = xresolveJAN(nrp, xgetNNLvl()); return xinitRelCQ(new WhiteSelfReferenceCQ(this, xgetSqlClause(), jan, xgetNNLvl()), _baseCB, "whiteSelfReferenceSelf", nrp); } protected void xsetupOuterJoinWhiteSelfReferenceSelf() { xregOutJo("whiteSelfReferenceSelf"); } public boolean hasConditionQueryWhiteSelfReferenceSelf() { return xhasQueRlMap("whiteSelfReferenceSelf"); } protected Map<String, Object> xfindFixedConditionDynamicParameterMap(String property) { return null; } // =================================================================================== // ScalarCondition // =============== public Map<String, WhiteSelfReferenceCQ> xdfgetScalarCondition() { return xgetSQueMap("scalarCondition"); } public String keepScalarCondition(WhiteSelfReferenceCQ sq) { return xkeepSQue("scalarCondition", sq); } // =================================================================================== // MyselfDerived // ============= public Map<String, WhiteSelfReferenceCQ> xdfgetSpecifyMyselfDerived() { return xgetSQueMap("specifyMyselfDerived"); } public String keepSpecifyMyselfDerived(WhiteSelfReferenceCQ sq) { return xkeepSQue("specifyMyselfDerived", sq); } public Map<String, WhiteSelfReferenceCQ> xdfgetQueryMyselfDerived() { return xgetSQueMap("queryMyselfDerived"); } public String keepQueryMyselfDerived(WhiteSelfReferenceCQ sq) { return xkeepSQue("queryMyselfDerived", sq); } public Map<String, Object> xdfgetQueryMyselfDerivedParameter() { return xgetSQuePmMap("queryMyselfDerived"); } public String keepQueryMyselfDerivedParameter(Object pm) { return xkeepSQuePm("queryMyselfDerived", pm); } // =================================================================================== // MyselfExists // ============ protected Map<String, WhiteSelfReferenceCQ> _myselfExistsMap; public Map<String, WhiteSelfReferenceCQ> xdfgetMyselfExists() { return xgetSQueMap("myselfExists"); } public String keepMyselfExists(WhiteSelfReferenceCQ sq) { return xkeepSQue("myselfExists", sq); } // =================================================================================== // MyselfInScope // ============= public Map<String, WhiteSelfReferenceCQ> xdfgetMyselfInScope() { return xgetSQueMap("myselfInScope"); } public String keepMyselfInScope(WhiteSelfReferenceCQ sq) { return xkeepSQue("myselfInScope", sq); } // =================================================================================== // Very Internal // ============= // very internal (for suppressing warn about 'Not Use Import') protected String xCB() { return WhiteSelfReferenceCB.class.getName(); } protected String xCQ() { return WhiteSelfReferenceCQ.class.getName(); } protected String xCHp() { return HpQDRFunction.class.getName(); } protected String xCOp() { return ConditionOption.class.getName(); } protected String xMap() { return Map.class.getName(); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.datalakestore.implementation; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.util.CoreUtils; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import reactor.core.publisher.Flux; final class Utils { static String getValueFromIdByName(String id, String name) { if (id == null) { return null; } Iterator<String> itr = Arrays.stream(id.split("/")).iterator(); while (itr.hasNext()) { String part = itr.next(); if (part != null && !part.trim().isEmpty()) { if (part.equalsIgnoreCase(name)) { if (itr.hasNext()) { return itr.next(); } else { return null; } } } } return null; } static String getValueFromIdByParameterName(String id, String pathTemplate, String parameterName) { if (id == null || pathTemplate == null) { return null; } String parameterNameParentheses = "{" + parameterName + "}"; List<String> idSegmentsReverted = Arrays.asList(id.split("/")); List<String> pathSegments = Arrays.asList(pathTemplate.split("/")); Collections.reverse(idSegmentsReverted); Iterator<String> idItrReverted = idSegmentsReverted.iterator(); int pathIndex = pathSegments.size(); while (idItrReverted.hasNext() && pathIndex > 0) { String idSegment = idItrReverted.next(); String pathSegment = pathSegments.get(--pathIndex); if (!CoreUtils.isNullOrEmpty(idSegment) && !CoreUtils.isNullOrEmpty(pathSegment)) { if (pathSegment.equalsIgnoreCase(parameterNameParentheses)) { if (pathIndex == 0 || (pathIndex == 1 && pathSegments.get(0).isEmpty())) { List<String> segments = new ArrayList<>(); segments.add(idSegment); idItrReverted.forEachRemaining(segments::add); Collections.reverse(segments); if (segments.size() > 0 && segments.get(0).isEmpty()) { segments.remove(0); } return String.join("/", segments); } else { return idSegment; } } } } return null; } static <T, S> PagedIterable<S> mapPage(PagedIterable<T> pageIterable, Function<T, S> mapper) { return new PagedIterableImpl<T, S>(pageIterable, mapper); } private static final class PagedIterableImpl<T, S> extends PagedIterable<S> { private final PagedIterable<T> pagedIterable; private final Function<T, S> mapper; private final Function<PagedResponse<T>, PagedResponse<S>> pageMapper; private PagedIterableImpl(PagedIterable<T> pagedIterable, Function<T, S> mapper) { super( PagedFlux .create( () -> (continuationToken, pageSize) -> Flux.fromStream(pagedIterable.streamByPage().map(getPageMapper(mapper))))); this.pagedIterable = pagedIterable; this.mapper = mapper; this.pageMapper = getPageMapper(mapper); } private static <T, S> Function<PagedResponse<T>, PagedResponse<S>> getPageMapper(Function<T, S> mapper) { return page -> new PagedResponseBase<Void, S>( page.getRequest(), page.getStatusCode(), page.getHeaders(), page.getElements().stream().map(mapper).collect(Collectors.toList()), page.getContinuationToken(), null); } @Override public Stream<S> stream() { return pagedIterable.stream().map(mapper); } @Override public Stream<PagedResponse<S>> streamByPage() { return pagedIterable.streamByPage().map(pageMapper); } @Override public Stream<PagedResponse<S>> streamByPage(String continuationToken) { return pagedIterable.streamByPage(continuationToken).map(pageMapper); } @Override public Stream<PagedResponse<S>> streamByPage(int preferredPageSize) { return pagedIterable.streamByPage(preferredPageSize).map(pageMapper); } @Override public Stream<PagedResponse<S>> streamByPage(String continuationToken, int preferredPageSize) { return pagedIterable.streamByPage(continuationToken, preferredPageSize).map(pageMapper); } @Override public Iterator<S> iterator() { return new IteratorImpl<T, S>(pagedIterable.iterator(), mapper); } @Override public Iterable<PagedResponse<S>> iterableByPage() { return new IterableImpl<PagedResponse<T>, PagedResponse<S>>(pagedIterable.iterableByPage(), pageMapper); } @Override public Iterable<PagedResponse<S>> iterableByPage(String continuationToken) { return new IterableImpl<PagedResponse<T>, PagedResponse<S>>( pagedIterable.iterableByPage(continuationToken), pageMapper); } @Override public Iterable<PagedResponse<S>> iterableByPage(int preferredPageSize) { return new IterableImpl<PagedResponse<T>, PagedResponse<S>>( pagedIterable.iterableByPage(preferredPageSize), pageMapper); } @Override public Iterable<PagedResponse<S>> iterableByPage(String continuationToken, int preferredPageSize) { return new IterableImpl<PagedResponse<T>, PagedResponse<S>>( pagedIterable.iterableByPage(continuationToken, preferredPageSize), pageMapper); } } private static final class IteratorImpl<T, S> implements Iterator<S> { private final Iterator<T> iterator; private final Function<T, S> mapper; private IteratorImpl(Iterator<T> iterator, Function<T, S> mapper) { this.iterator = iterator; this.mapper = mapper; } @Override public boolean hasNext() { return iterator.hasNext(); } @Override public S next() { return mapper.apply(iterator.next()); } @Override public void remove() { iterator.remove(); } } private static final class IterableImpl<T, S> implements Iterable<S> { private final Iterable<T> iterable; private final Function<T, S> mapper; private IterableImpl(Iterable<T> iterable, Function<T, S> mapper) { this.iterable = iterable; this.mapper = mapper; } @Override public Iterator<S> iterator() { return new IteratorImpl<T, S>(iterable.iterator(), mapper); } } }
/* * most of the code is written by the author so blame him all i added was the * wrapper class to the code to seperate it from the application code * edited by marsh poulson 10/26/2013 */ package poulproj08; /* * this class contains all of the different implementations of sorting * algorithms */ public class sorts { public static <T extends Comparable<? super T>> void selectionSort( T[] theArray, int n) { // -------// --------------------------------------------- // Sorts the items in an array into ascending order. // Precondition: theArray is an array of n items. // Postcondition: theArray is sorted into // ascending order. // Calls: indexOfLargest. // --------------------------------------------------- // last = index of the last item in the subarray of // items yet to be sorted // largest = index of the largest item found for (int last = n - 1; last >= 1; last--) { // Invariant: theArray[last+1..n-1] is sorted // and > theArray[0..last] // select largest item in theArray[0..last] int largest = indexOfLargest(theArray, last + 1); // swap largest item theArray[largest] with // theArray[last] T temp = theArray[largest]; theArray[largest] = theArray[last]; theArray[last] = temp; } // end for } // end selectionSort private static <T extends Comparable<? super T>> int indexOfLargest( T[] theArray, int size) { // --------------------------------------------------- // Finds the largest item in an array. // Precondition: theArray is an array of size items; // size >= 1. // Postcondition: Returns the index of the largest // item in the array. // --------------------------------------------------- int indexSoFar = 0; // index of largest item found so far // Invariant: theArray[indexSoFar]>=theArray[0..currIndex-1] for (int currIndex = 1; currIndex < size; ++currIndex) { if (theArray[currIndex].compareTo(theArray[indexSoFar]) > 0) { indexSoFar = currIndex; } // end if } // end for return indexSoFar; // index of largest item } // end indexOfLargest public static <T extends Comparable<? super T>> void bubbleSort( T[] theArray, int n) { // --------------------------------------------------- // Sorts the items in an array into ascending order. // Precondition: theArray is an array of n items. // Postcondition: theArray is sorted into ascending // order. // --------------------------------------------------- boolean sorted = false; // false when swaps occur for (int pass = 1; (pass < n) && !sorted; ++pass) { // Invariant: theArray[n+1-pass..n-1] is sorted // and > theArray[0..n-pass] sorted = true; // assume sorted for (int index = 0; index < n - pass; ++index) { // Invariant: theArray[0..index-1] <= theArray[index] int nextIndex = index + 1; if (theArray[index].compareTo(theArray[nextIndex]) > 0) { // exchange items T temp = theArray[index]; theArray[index] = theArray[nextIndex]; theArray[nextIndex] = temp; sorted = false; // signal exchange } // end if } // end for // Assertion: theArray[0..n-pass-1] < theArray[n-pass] } // end for } // end bubbleSort public static <T extends Comparable<? super T>> void insertionSort( T[] theArray, int n) { // --------------------------------------------------- // Sorts the items in an array into ascending order. // Precondition: theArray is an array of n items. // Postcondition: theArray is sorted into ascending // order. // --------------------------------------------------- // unsorted = first index of the unsorted region, // loc = index of insertion in the sorted region, // nextItem = next item in the unsorted region // initially, sorted region is theArray[0], // unsorted region is theArray[1..n-1]; // in general, sorted region is theArray[0..unsorted-1], // unsorted region is theArray[unsorted..n-1] for (int unsorted = 1; unsorted < n; ++unsorted) { // Invariant: theArray[0..unsorted-1] is sorted // find the right position (loc) in // theArray[0..unsorted] for theArray[unsorted], // which is the first item in the unsorted // region; shift, if necessary, to make room T nextItem = theArray[unsorted]; int loc = unsorted; while ((loc > 0) && (theArray[loc - 1].compareTo(nextItem) > 0)) { // shift theArray[loc-1] to the right theArray[loc] = theArray[loc - 1]; loc--; } // end while // Assertion: theArray[loc] is where nextItem belongs // insert nextItem into sorted region theArray[loc] = nextItem; } // end for } // end insertionSort public static <T extends Comparable<? super T>> void mergesort(T[] theArray) { // Declare temporary array used for merge, must do // unchecked cast from Comparable<?>[] to T[] T[] tempArray = (T[]) new Comparable<?>[theArray.length]; mergesort(theArray, tempArray, 0, theArray.length - 1); } // end mergesort private static <T extends Comparable<? super T>> void merge( T[] theArray, T[] tempArray, int first, int mid, int last) { // --------------------------------------------------------- // Merges two sorted array segments theArray[first..mid] and // theArray[mid+1..last] into one sorted array. // Precondition: first <= mid <= last. The subarrays // theArray[first..mid] and theArray[mid+1..last] are // each sorted in increasing order. // Postcondition: theArray[first..last] is sorted. // Implementation note: This method merges the two // subarrays into a temporary array and copies the result // into the original array theArray. // --------------------------------------------------------- // initialize the local indexes to indicate the subarrays int first1 = first; // beginning of first subarray int last1 = mid; // end of first subarray int first2 = mid + 1; // beginning of second subarray int last2 = last; // end of second subarray // while both subarrays are not empty, copy the // smaller item into the temporary array int index = first1; // next available location in // tempArray while ((first1 <= last1) && (first2 <= last2)) { // Invariant: tempArray[first1..index-1] is in order if (theArray[first1].compareTo(theArray[first2]) < 0) { tempArray[index] = theArray[first1]; first1++; } else { tempArray[index] = theArray[first2]; first2++; } // end if index++; } // end while // finish off the nonempty subarray // finish off the first subarray, if necessary while (first1 <= last1) { // Invariant: tempArray[first1..index-1] is in order tempArray[index] = theArray[first1]; first1++; index++; } // end while // finish off the second subarray, if necessary while (first2 <= last2) { // Invariant: tempArray[first1..index-1] is in order tempArray[index] = theArray[first2]; first2++; index++; } // end while // copy the result back into the original array for (index = first; index <= last; ++index) { theArray[index] = tempArray[index]; } // end for } // end merge public static <T extends Comparable<? super T>> void mergesort( T[] theArray, T[] tempArray, int first, int last) { // --------------------------------------------------------- // Sorts the items in an array into ascending order. // Precondition: theArray[first..last] is an array. // Postcondition: theArray[first..last] is sorted in // ascending order. // Calls: merge. // --------------------------------------------------------- if (first < last) { // sort each half int mid = (first + last) / 2; // index of midpoint // sort left half theArray[first..mid] mergesort(theArray, tempArray, first, mid); // sort right half theArray[mid+1..last] mergesort(theArray, tempArray, mid + 1, last); // merge the two halves merge(theArray, tempArray, first, mid, last); } // end if } // end mergesort private static <T extends Comparable<? super T>> void choosePivot( T[] theArray, int first, int last) { // --------------------------------------------------------- // Chooses a pivot for quicksort's partition algorithm and // swaps it with the first item in an array. // Precondition: theArray[first..last] where first <= last. // Postcondition: theArray[first] is the pivot. // --------------------------------------------------------- // Implementation left as an exercise. } // end choosePivot private static <T extends Comparable<? super T>> int partition( T[] theArray, int first, int last) { // --------------------------------------------------------- // Partitions an array for quicksort. // Precondition: theArray[first..last] where first <= last. // Postcondition: Returns the index of the pivot element of // theArray[first..last]. Upon completion of the method, // this will be the index value lastS1 such that // S1 = theArray[first..lastS1-1] < pivot // theArray[lastS1] == pivot // S2 = theArray[lastS1+1..last] >= pivot // Calls: choosePivot. // --------------------------------------------------------- // tempItem is used to swap elements in the array T tempItem; // place pivot in theArray[first] choosePivot(theArray, first, last); T pivot = theArray[first]; // reference pivot // initially, everything but pivot is in unknown int lastS1 = first; // index of last item in S1 // move one item at a time until unknown region is empty // firstUnknown is the index of first item in unknown region for (int firstUnknown = first + 1; firstUnknown <= last; ++firstUnknown) { // Invariant: theArray[first+1..lastS1] < pivot // theArray[lastS1+1..firstUnknown-1] >= pivot // move item from unknown to proper region if (theArray[firstUnknown].compareTo(pivot) < 0) { // item from unknown belongs in S1 ++lastS1; tempItem = theArray[firstUnknown]; theArray[firstUnknown] = theArray[lastS1]; theArray[lastS1] = tempItem; } // end if // else item from unknown belongs in S2 } // end for // place pivot in proper position and mark its location tempItem = theArray[first]; theArray[first] = theArray[lastS1]; theArray[lastS1] = tempItem; return lastS1; } // end partition public static <T extends Comparable<? super T>> void quickSort( T[] theArray, int first, int last) { // --------------------------------------------------------- // Sorts the items in an array into ascending order. // Precondition: theArray[first..last] is an array. // Postcondition: theArray[first..last] is sorted. // Calls: partition. // --------------------------------------------------------- int pivotIndex; if (first < last) { // create the partition: S1, Pivot, S2 pivotIndex = partition(theArray, first, last); // sort regions S1 and S2 quickSort(theArray, first, pivotIndex - 1); quickSort(theArray, pivotIndex + 1, last); } // end if } // end quickSort }
/** * Copyright (C) 2006-2017 Apache Software Foundation (https://sourceforge.net/p/webdav-servlet, https://github.com/Commonjava/webdav-handler) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.sf.webdav.methods; import java.io.ByteArrayInputStream; import java.io.PrintWriter; import net.sf.webdav.StoredObject; import net.sf.webdav.WebdavStatus; import net.sf.webdav.locking.ResourceLocks; import net.sf.webdav.spi.IMimeTyper; import net.sf.webdav.spi.ITransaction; import net.sf.webdav.spi.IWebdavStore; import net.sf.webdav.spi.WebdavRequest; import net.sf.webdav.spi.WebdavResponse; import net.sf.webdav.testutil.MockTest; import org.jmock.Expectations; import org.junit.Test; public class DoProppatchTest extends MockTest { static IWebdavStore mockStore; static IMimeTyper mockMimeTyper; static WebdavRequest mockReq; static WebdavResponse mockRes; static ITransaction mockTransaction; static byte[] resourceContent = new byte[] { '<', 'h', 'e', 'l', 'l', 'o', '/', '>' }; static ByteArrayInputStream bais = new ByteArrayInputStream( resourceContent ); // static DelegatingServletInputStream dsis = new DelegatingServletInputStream( // bais); @Override public void setupFixtures() throws Exception { mockStore = _mockery.mock( IWebdavStore.class ); mockMimeTyper = _mockery.mock( IMimeTyper.class ); mockReq = _mockery.mock( WebdavRequest.class ); mockRes = _mockery.mock( WebdavResponse.class ); mockTransaction = _mockery.mock( ITransaction.class ); } @Test public void doProppatchIfReadOnly() throws Exception { _mockery.checking( new Expectations() { { one( mockRes ).sendError( WebdavStatus.SC_FORBIDDEN ); } } ); final DoProppatch doProppatch = new DoProppatch( mockStore, new ResourceLocks(), readOnly ); doProppatch.execute( mockTransaction, mockReq, mockRes ); _mockery.assertIsSatisfied(); } @Test public void doProppatchOnNonExistingResource() throws Exception { final String path = "/notExists"; _mockery.checking( new Expectations() { { one( mockReq ).getAttribute( "javax.servlet.include.request_uri" ); will( returnValue( null ) ); one( mockReq ).getPathInfo(); will( returnValue( path ) ); final StoredObject notExistingSo = null; one( mockStore ).getStoredObject( mockTransaction, path ); will( returnValue( notExistingSo ) ); one( mockRes ).sendError( WebdavStatus.SC_NOT_FOUND ); } } ); final DoProppatch doProppatch = new DoProppatch( mockStore, new ResourceLocks(), !readOnly ); doProppatch.execute( mockTransaction, mockReq, mockRes ); _mockery.assertIsSatisfied(); } @Test public void doProppatchOnRequestWithNoContent() throws Exception { final String path = "/testFile"; _mockery.checking( new Expectations() { { one( mockReq ).getAttribute( "javax.servlet.include.request_uri" ); will( returnValue( null ) ); one( mockReq ).getPathInfo(); will( returnValue( path ) ); final StoredObject testFileSo = initFileStoredObject( resourceContent ); one( mockStore ).getStoredObject( mockTransaction, path ); will( returnValue( testFileSo ) ); one( mockReq ).getAttribute( "javax.servlet.include.request_uri" ); will( returnValue( null ) ); one( mockReq ).getPathInfo(); will( returnValue( path ) ); one( mockReq ).getContentLength(); will( returnValue( 0 ) ); one( mockRes ).sendError( WebdavStatus.SC_INTERNAL_SERVER_ERROR ); } } ); final DoProppatch doProppatch = new DoProppatch( mockStore, new ResourceLocks(), !readOnly ); doProppatch.execute( mockTransaction, mockReq, mockRes ); _mockery.assertIsSatisfied(); } @Test public void doProppatchOnResource() throws Exception { final String path = "/testFile"; final PrintWriter pw = new PrintWriter( "/tmp/XMLTestFile" ); _mockery.checking( new Expectations() { { one( mockReq ).getAttribute( "javax.servlet.include.request_uri" ); will( returnValue( null ) ); one( mockReq ).getPathInfo(); will( returnValue( path ) ); final StoredObject testFileSo = initFileStoredObject( resourceContent ); one( mockStore ).getStoredObject( mockTransaction, path ); will( returnValue( testFileSo ) ); one( mockReq ).getAttribute( "javax.servlet.include.request_uri" ); will( returnValue( null ) ); one( mockReq ).getPathInfo(); will( returnValue( path ) ); one( mockReq ).getContentLength(); will( returnValue( 8 ) ); one( mockReq ).getInputStream(); will( returnValue( bais ) ); one( mockRes ).setStatus( WebdavStatus.SC_MULTI_STATUS ); one( mockRes ).setContentType( "text/xml; charset=UTF-8" ); one( mockRes ).getWriter(); will( returnValue( pw ) ); one( mockReq ).getContextPath(); will( returnValue( "" ) ); } } ); final DoProppatch doProppatch = new DoProppatch( mockStore, new ResourceLocks(), !readOnly ); doProppatch.execute( mockTransaction, mockReq, mockRes ); _mockery.assertIsSatisfied(); } }
/* * Copyright (C) SparseWare Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sparseware.bellavista.external.fhir; import com.appnativa.rare.net.ActionLink; import com.appnativa.rare.scripting.Functions; import com.appnativa.util.CharArray; import com.appnativa.util.json.JSONArray; import com.appnativa.util.json.JSONObject; import com.appnativa.util.json.JSONWriter; import com.sparseware.bellavista.ActionPath; import com.sparseware.bellavista.external.fhir.FHIRServer.FHIRResource; import com.sparseware.bellavista.service.HttpHeaders; import com.sparseware.bellavista.service.iHttpConnection; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.io.Writer; import java.net.HttpURLConnection; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; public class Documents extends aFHIRemoteService { public final static String BOUNDARY = "__FF00_SIRF_MULTIPART_BOUNDARY_PART_OOFF__"; public final static String BOUNDARY_START = "\r\n--__FF00_SIRF_MULTIPART_BOUNDARY_PART_OOFF__\r\n"; public final static String BOUNDARY_END = "\r\n--__FF00_SIRF_MULTIPART_BOUNDARY_PART_OOFF__--\r\n"; protected static boolean hasDocuments; protected static String[] COLUMN_NAMES; protected static JSONObject setAttachments; boolean docReferences; public Documents() { this("DocumentReference"); docReferences = true; } public Documents(String resourceName) { super(resourceName); if (COLUMN_NAMES == null) { COLUMN_NAMES = new String[] { "date", "title", "author", "status", "type", "has_attachments", "parent_id", "data_url" }; FHIRResource r = server.getResource(resourceName); hasDocuments = r != null; searchParams = (String) server.getServerConfig().opt("fhir", "vitals", "search_params"); } columnNames = COLUMN_NAMES; } public void categories(iHttpConnection conn, ActionPath path, InputStream data, HttpHeaders headers) throws IOException { noData(conn, path, false, headers); } public void document(iHttpConnection conn, ActionPath path, InputStream data, HttpHeaders headers) throws IOException { ActionLink l = createReadLink(path.shift()); try { read(l.getReader(), conn.getContentWriter(), headers); } finally { l.close(); } } public void document_direct(iHttpConnection conn, ActionPath path, InputStream data, HttpHeaders headers) throws IOException { Map<String, String> map = getQueryParams(conn.getURL(), data); String url = map.get("url"); String content_type = map.get("content_type"); ActionLink l = server.createLink(url); if (content_type != null) { l.setRequestHeader("Accept", content_type); if (content_type.startsWith("text/plain")) { sendTextAsHTML(conn, headers, "", FHIRUtils.getDataAsString(url, content_type)); return; } if (content_type.startsWith("image/")) { sendImageAsHTML(conn, headers, "", content_type, FHIRUtils.getDataAsBase64String(url, content_type)); return; } if (content_type.startsWith("application/json+fhir")) { try { read(l.getReader(), conn.getContentWriter(), headers); } finally { l.close(); } return; } } conn.setConnectionPipe((HttpURLConnection) l.getConnection().getConnectionObject()); } public void list(iHttpConnection conn, ActionPath path, InputStream data, HttpHeaders headers) throws IOException { if (!hasDocuments) { dataNotAvailable(conn, path, true, headers, columnNames, 1); return; } ActionLink l = createSearchLink("patient", server.getPatientID()); try { Object w = FHIRUtils.createWriter(path, conn.getContentWriter(), headers, true); search(l.getReader(), w, headers); } finally { l.close(); } } @Override public void processEntry(JSONObject entry, JSONWriter jw, Writer w, CharArray ca, Object... params) throws IOException { Object v; JSONObject o; boolean parsed = false; if (!entry.optString("resourceType").equals(resourceName)) { return; } do { String dateld = server.getID(entry.optString("id"), false); String date = entry.optString(docReferences ? "created" : "date"); String title = entry.optString(docReferences ? "description" : "title"); String authorld = null; String author = null; String status = null; String type = null; String data_url = null; String has_attachments = null; // String parent_id=null; v = entry.opt("type"); if ((title == null) || (title.length() == 0)) { title = MISSING_INVALID; } if (v instanceof String) { type = (String) v; } else if (v instanceof JSONObject) { type = FHIRUtils.getBestMedicalText((JSONObject) v); } JSONArray a = entry.optJSONArray("content"); if (docReferences) { if ((a != null) &&!a.isEmpty()) { a = resolveContentArray(a, ca); if (a.length() > 1) { has_attachments = "true"; } else { o = a.getJSONObject(0).getJSONObject("attachment"); String s = o.optString("contentType"); if (s.startsWith("text/") || s.startsWith("image/")) { data_url = createAttachmentHREF(dateld, o, ca); } } } } v = entry.opt("status"); if (v instanceof String) { status = (String) v; } else if (v instanceof JSONObject) { status = FHIRUtils.getBestMedicalText((JSONArray) v); } o = entry.optJSONObject("author"); if (o != null) { authorld = server.getID(o.optString("reference"), false); author = o.optString("display"); } if (jw != null) { jw.object(); if (date != null) { if (dateld != null) { jw.key("date").object(); jw.key("linkedData").value(dateld).key("value").value(date); jw.endObject(); } else { jw.key("date").value(date); } } if (title != null) { jw.key("title").value(title); } if (author != null) { if (authorld != null) { jw.key("author").object(); jw.key("linkedData").value(authorld).key("value").value(author); jw.endObject(); } else { jw.key("author").value(author); } } if (status != null) { jw.key("status").value(status); } if (type != null) { jw.key("type").value(type); } if (has_attachments != null) { jw.key("has_attachments").value(has_attachments); } // if(parent_id!=null) { // jw.key("parent_id").value(parent_id); // } if (data_url != null) { jw.key("data_url").value(data_url); } jw.endObject(); } else { if (date != null) { if (dateld != null) { w.write(dateld); w.write((char) '|'); } FHIRUtils.writeQuotedStringIfNecessary(w, date, ca); } w.write((char) '^'); if (title != null) { FHIRUtils.writeQuotedStringIfNecessary(w, title, ca); } w.write((char) '^'); if (author != null) { if (authorld != null) { w.write(authorld); w.write((char) '|'); } FHIRUtils.writeQuotedStringIfNecessary(w, author, ca); } w.write((char) '^'); if (status != null) { FHIRUtils.writeQuotedStringIfNecessary(w, status, ca); } w.write((char) '^'); if (type != null) { FHIRUtils.writeQuotedStringIfNecessary(w, type, ca); } w.write((char) '^'); if (has_attachments != null) { FHIRUtils.writeQuotedStringIfNecessary(w, has_attachments, ca); } w.write((char) '^'); // if(parent_id!=null) { // FHIRUtils.writeQuotedStringIfNecessary(w, parent_id, ca); // } w.write((char) '^'); if (data_url != null) { w.write(data_url); } w.write((char) '^'); w.write((char) '\n'); } parsed = true; } while(false); if (!parsed) { debugLog("Could not parse entry:\n" + entry.toString(2)); } } @Override public void readEntry(JSONObject entry, JSONWriter jw, Writer w, Object... params) throws IOException { if (!entry.optString("resourceType").equals("Composition")) { docReferences = false; } HttpHeaders headers = (HttpHeaders) params[0]; headers.setDefaultResponseHeaders(); JSONArray a = entry.optJSONArray(docReferences ? "content" : "sanction"); if (docReferences) { a = resolveContentArray(a, null); } if ((a != null) && (a.length() > 1)) { headers.mimeMultipart(BOUNDARY); if (docReferences) { JSONObject ao = a.getJSONObject(0).getJSONObject("attachment"); String type = ao.optString("contentType", "text/plain"); if (!type.startsWith("text/")) { a.add(0, getSeeAttachmentsObject()); } } } else { headers.mimeHtml(); } if (docReferences) { processContent(a, w); } else { processSection(entry, w, false); } } public static void writeAttachmentsAsDocument(JSONArray a, Writer w, HttpHeaders headers) throws IOException { JSONObject ao = a.getJSONObject(0); String type = ao.optString("contentType", "text/plain"); if (a.length() > 1) { headers.mimeMultipart(BOUNDARY); if (!type.startsWith("text/")) { a.add(0, getSeeAttachmentsObject()); } processAttachments(a, w); } else { headers.mimeHtml(); FHIRUtils.writeAttachment(ao, w, false); } } public static void writeAttachmentDocumentIndex(String id, JSONObject attachment, Writer w, boolean main, CharArray ca) throws IOException { String title = attachment.optString("title", null); if (title == null) { title = FHIRServer.getInstance().getResourceAsString("bv.text.attachment"); } String mime = attachment.optString("contentType", "text/plain"); String type; if (mime.startsWith("text/")) { type = "document"; } else if (mime.startsWith("image/")) { type = "image"; } else { type = ca.set("custom:").append(mime).toString(); } w.append(type).append('^').append(title); w.append("^").append(attachment.optString("creation")).append("^").append(main ? "true" : "false"); w.append("^"); if (!main) { String url = createAttachmentHREF(id, attachment, ca); if (url != null) { w.write(url); } } w.append("\n"); } public static void processContent(JSONArray a, Writer w) throws IOException { int len = a.length(); if (len > 0) { CharArray ca = new CharArray(); JSONObject content = a.getJSONObject(0); if (len > 1) { writeAttachmentDocumentIndex(content.optString("id", null), content.getJSONObject("attachment"), w, true, ca); for (int i = 1; i < len; i++) { content = a.getJSONObject(i); writeAttachmentDocumentIndex(content.optString("id", null), content.getJSONObject("attachment"), w, false, ca); } } FHIRUtils.writeAttachment(content.getJSONObject("attachment"), w, len > 1); if (len > 1) { w.write(BOUNDARY_END); } } } public static void processAttachments(JSONArray a, Writer w) throws IOException { int len = a.length(); CharArray ca = new CharArray(); if (len > 1) { writeAttachmentDocumentIndex(null, a.getJSONObject(0), w, true, ca); for (int i = 1; i < len; i++) { JSONObject o = a.getJSONObject(i); writeAttachmentDocumentIndex(o.optString("id"), o, w, false, ca); } } FHIRUtils.writeAttachment(a.getJSONObject(0), w, len > 1); if (len > 1) { w.write(BOUNDARY_END); } } @Override protected void read(Reader r, Object writer, HttpHeaders headers, Object... params) throws IOException { JSONObject entry = getReadEntry(r); readEntry(entry, null, (Writer) writer, headers); } static JSONObject getSeeAttachmentsObject() { if (setAttachments == null) { String s = FHIRServer.getInstance().getResourceAsString("bv.text.see_report_attachments"); s = Functions.base64NOLF(s); JSONObject content = new JSONObject(); JSONObject attachment = new JSONObject().put("contentType", "text/html").put("data", s); content.put("attachment", attachment); setAttachments = content; } return setAttachments; } @SuppressWarnings("resource") public static JSONArray resolveContentArray(JSONArray a, CharArray ca) { int len = a.length(); String type = null; JSONObject best = null; String bestKey = null; String s; LinkedHashMap<String, JSONObject> map = new LinkedHashMap<String, JSONObject>(len); if (ca == null) { ca = new CharArray(); } for (int i = 0; i < len; i++) { JSONObject content = a.getJSONObject(i); JSONObject o = content.optJSONObject("format"); s = (o == null) ? null : FHIRUtils.getBestMedicalCode(o); ca.set((s == null) ? "" : s); o = content.getJSONObject("attachment"); s = o.optString("url", String.valueOf(o.hashCode())); ca.append("^").append((s == null) ? "" : s); s = ca.toString(); type = o.optString("contentType", null); if (type != null) { content.put("_type", type); if (type.startsWith("text/html")) { best = content; bestKey = s; } else if ((best == null) && type.startsWith("text/")) { best = content; bestKey = s; } } else if (best == null) { best = content; bestKey = s; } JSONObject econtent = map.get(s); if (econtent != null) { if ((type != null) && type.startsWith("image/")) { String etype = econtent.getJSONObject("attachment").optString("contentType", null); if (etype != null) { if (type.startsWith("image/svg")) { map.put(s, content); } else if (type.startsWith("image/jpeg") &&!etype.startsWith("image/svg")) { map.put(s, content); } else if (type.startsWith("image/png") &&!etype.startsWith("image/jpeg") &&!etype.startsWith("image/svg")) { map.put(s, content); } } else { map.put(s, content); } } } else { map.put(s, content); } if ((s != null) && s.startsWith("text/")) { if (type == null) { type = s; best = content; } else if (s.startsWith("text/html")) { type = s; best = content; } } } a = new JSONArray(); if (bestKey != null) { map.remove(bestKey); a.add(best); } Iterator<JSONObject> it = map.values().iterator(); while(it.hasNext()) { a.add(it.next()); } return a; } public static void writeSectionDocumentIndex(String type, JSONObject section, Writer w, boolean main) throws IOException { w.append(type).append('^').append(section.optString("title")); w.append("^").append(section.optString("date")).append("^").append(main ? "true" : "false"); w.append("^\n"); } public static String createAttachmentHREF(String contenid, JSONObject attachment, CharArray ca) { String url = attachment.optString("url", null); if ((url != null) && url.startsWith("data:")) { url = null; } if (url != null) { if (url.startsWith("/")) { url = url.substring(1); } String s = attachment.optString("contentType", null); ca.set("/hub/main/documents/document_direct?url="); ca.append(Functions.encodeUrl(url)); if (s != null) { ca.append("&content_type=").append(Functions.encode(s)); } url = ca.toString(); } return url; } static void processSection(JSONObject section, Writer w, boolean child) throws IOException { String date = section.optString("date"); JSONArray sections = section.optJSONArray("section"); int len = (sections == null) ? 0 : sections.length(); if (len > 0) { writeSectionDocumentIndex("document", section, w, true); for (int i = 0; i < len; i++) { JSONObject o = sections.getJSONObject(i); writeSectionDocumentIndex("document", o, w, false); } w.write(BOUNDARY_START); w.write("Content-Type: text/html\r\n\r\n"); } else if (child) { w.write(BOUNDARY_START); w.write("Content-Type: text/html\r\n\r\n"); } writeSection(section, w); if (len > 0) { processSections(date, sections, w); w.write(BOUNDARY_END); } } static void processSections(String date, JSONArray sections, Writer w) throws IOException { int len = sections.length(); for (int i = 0; i < len; i++) { JSONObject o = sections.getJSONObject(i); if (!o.containsKey("date")) { o.put("date", date); } processSection(o, w, true); } } static void writeSection(JSONObject section, Writer w) throws IOException { w.append("<html><head><title>").append(section.optString("title")); w.append("</title></head><body>\n"); w.append("<h1 class='fhir_title'>").append("title").append("</h1>\n"); w.append("<h3 class='fhir_section'>").append(section.optString("title")).append("</h3>\n"); w.append("<div class='fhir_section_div'>\n"); Object o = section.opt("text"); String s = ""; if (o instanceof String) { s = (String) o; } else if (o instanceof JSONObject) { s = ((JSONObject) o).optString("div"); } w.append(s); w.append("\n</div>\n"); w.append("\n</body></html>"); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.codeInsight.intentions; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.annotation.Nonnull; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.NotNullFunction; import com.jetbrains.python.PyBundle; import com.jetbrains.python.PyTokenTypes; import com.jetbrains.python.psi.LanguageLevel; import com.jetbrains.python.psi.PyBinaryExpression; import com.jetbrains.python.psi.PyElementGenerator; import com.jetbrains.python.psi.PyElementType; import com.jetbrains.python.psi.PyExpression; import com.jetbrains.python.psi.PyExpressionStatement; import com.jetbrains.python.psi.PyFile; import com.jetbrains.python.psi.PyStringLiteralExpression; import com.jetbrains.python.psi.PyStringLiteralUtil; import com.jetbrains.python.psi.impl.PyBuiltinCache; import com.jetbrains.python.psi.types.PyClassTypeImpl; import com.jetbrains.python.psi.types.PyType; import com.jetbrains.python.psi.types.PyTypeChecker; import com.jetbrains.python.psi.types.TypeEvalContext; /** * @author Alexey.Ivanov */ public class PyStringConcatenationToFormatIntention extends PyBaseIntentionAction { @Nonnull public String getFamilyName() { return PyBundle.message("INTN.string.concatenation.to.format"); } public boolean isAvailable(@Nonnull Project project, Editor editor, PsiFile file) { if(!(file instanceof PyFile)) { return false; } PsiElement element = PsiTreeUtil.getParentOfType(file.findElementAt(editor.getCaretModel().getOffset()), PyBinaryExpression.class, false); if(element == null) { return false; } while(element.getParent() instanceof PyBinaryExpression) { element = element.getParent(); } final Collection<PyElementType> operators = getOperators((PyBinaryExpression) element); for(PyElementType operator : operators) { if(operator != PyTokenTypes.PLUS) { return false; } } final Collection<PyExpression> expressions = getSimpleExpressions((PyBinaryExpression) element); if(expressions.size() == 0) { return false; } final PyBuiltinCache cache = PyBuiltinCache.getInstance(element); for(PyExpression expression : expressions) { if(expression == null) { return false; } if(expression instanceof PyStringLiteralExpression) { continue; } final PyType type = TypeEvalContext.codeAnalysis(file.getProject(), file).getType(expression); final boolean isStringReference = PyTypeChecker.match(cache.getStringType(LanguageLevel.forElement(expression)), type, TypeEvalContext.codeAnalysis(file.getProject(), file)) && type != null; if(!isStringReference) { return false; } } if(LanguageLevel.forElement(element).isAtLeast(LanguageLevel.PYTHON27)) { setText(PyBundle.message("INTN.replace.plus.with.str.format")); } else { setText(PyBundle.message("INTN.replace.plus.with.format.operator")); } return true; } private static Collection<PyExpression> getSimpleExpressions(@Nonnull PyBinaryExpression expression) { List<PyExpression> res = new ArrayList<>(); if(expression.getLeftExpression() instanceof PyBinaryExpression) { res.addAll(getSimpleExpressions((PyBinaryExpression) expression.getLeftExpression())); } else { res.add(expression.getLeftExpression()); } if(expression.getRightExpression() instanceof PyBinaryExpression) { res.addAll(getSimpleExpressions((PyBinaryExpression) expression.getRightExpression())); } else { res.add(expression.getRightExpression()); } return res; } private static Collection<PyElementType> getOperators(@Nonnull PyBinaryExpression expression) { List<PyElementType> res = new ArrayList<>(); if(expression.getLeftExpression() instanceof PyBinaryExpression) { res.addAll(getOperators((PyBinaryExpression) expression.getLeftExpression())); } if(expression.getRightExpression() instanceof PyBinaryExpression) { res.addAll(getOperators((PyBinaryExpression) expression.getRightExpression())); } res.add(expression.getOperator()); return res; } public void doInvoke(@Nonnull Project project, Editor editor, PsiFile file) throws IncorrectOperationException { PsiElement element = PsiTreeUtil.getTopmostParentOfType(file.findElementAt(editor.getCaretModel().getOffset()), PyBinaryExpression.class); if(element == null) { return; } final LanguageLevel languageLevel = LanguageLevel.forElement(element); final boolean useFormatMethod = languageLevel.isAtLeast(LanguageLevel.PYTHON27); NotNullFunction<String, String> escaper = StringUtil.escaper(false, "\"\'\\"); StringBuilder stringLiteral = new StringBuilder(); List<String> parameters = new ArrayList<>(); Pair<String, String> quotes = Pair.create("\"", "\""); boolean quotesDetected = false; final TypeEvalContext context = TypeEvalContext.userInitiated(file.getProject(), file); int paramCount = 0; boolean isUnicode = false; final PyClassTypeImpl unicodeType = PyBuiltinCache.getInstance(element).getObjectType("unicode"); for(PyExpression expression : getSimpleExpressions((PyBinaryExpression) element)) { if(expression instanceof PyStringLiteralExpression) { final PyType type = context.getType(expression); if(type != null && type.equals(unicodeType)) { isUnicode = true; } if(!quotesDetected) { quotes = PyStringLiteralUtil.getQuotes(expression.getText()); quotesDetected = true; } String value = ((PyStringLiteralExpression) expression).getStringValue(); if(!useFormatMethod) { value = value.replace("%", "%%"); } stringLiteral.append(escaper.fun(value)); } else { addParamToString(stringLiteral, paramCount, useFormatMethod); parameters.add(expression.getText()); ++paramCount; } } if(quotes == null) { quotes = Pair.create("\"", "\""); } stringLiteral.insert(0, quotes.getFirst()); if(isUnicode && !quotes.getFirst().toLowerCase().contains("u")) { stringLiteral.insert(0, "u"); } stringLiteral.append(quotes.getSecond()); PyElementGenerator elementGenerator = PyElementGenerator.getInstance(project); if(!parameters.isEmpty()) { if(useFormatMethod) { stringLiteral.append(".format(").append(StringUtil.join(parameters, ",")).append(")"); } else { final String paramString = parameters.size() > 1 ? "(" + StringUtil.join(parameters, ",") + ")" : StringUtil.join(parameters, ","); stringLiteral.append(" % ").append(paramString); } final PyExpression expression = elementGenerator.createFromText(LanguageLevel.getDefault(), PyExpressionStatement.class, stringLiteral.toString()).getExpression(); element.replace(expression); } else { PyStringLiteralExpression stringLiteralExpression = elementGenerator.createStringLiteralAlreadyEscaped(stringLiteral.toString()); element.replace(stringLiteralExpression); } } private static void addParamToString(StringBuilder stringLiteral, int i, boolean useFormatMethod) { if(useFormatMethod) { stringLiteral.append("{").append(i).append("}"); } else { stringLiteral.append("%s"); } } }
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.portal.api; import java.io.IOException; import java.util.Map; import javax.servlet.ServletContext; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.sakaiproject.site.api.ToolConfiguration; import org.sakaiproject.tool.api.ActiveTool; import org.sakaiproject.tool.api.Placement; import org.sakaiproject.tool.api.Session; import org.sakaiproject.tool.api.ToolException; /** * This interface represents a portal and is used mainly by portal handlers that * will not know the details of the portal implimentation. * * @author ieb * @since Sakai 2.4 * @version $Rev$ */ public interface Portal { /** * Error response modes. */ public static final int ERROR_SITE = 0; public static final int ERROR_GALLERY = 1; public static final int ERROR_WORKSITE = 2; /** * Parameter value to allow anonymous users of gallery mode to be sent to * the gateway site as anonymous user (like the /portal URL) instead of * making them log in (like worksite, site, and tool URLs). */ public static final String PARAM_FORCE_LOGIN = "force.login"; public static final String PARAM_FORCE_LOGOUT = "force.logout"; /** * ThreadLocal attribute set while we are processing an error. */ public static final String ATTR_ERROR = "org.sakaiproject.portal.error"; /** * Session attribute root for storing a site's last page visited - just * append the site id. */ public static final String ATTR_SITE_PAGE = "sakai.portal.site."; /** * The default portal name is none is specified. */ public static final String DEFAULT_PORTAL_CONTEXT = "charon"; /** * Configuration option to enable/disable state reset on navigation change */ public static final String CONFIG_AUTO_RESET = "portal.experimental.auto.reset"; /** * Configuration option for default number of site tabs to display to users */ public static final String CONFIG_DEFAULT_TABS = "portal.default.tabs"; /** * Names of tool config/registration attributes that control the rendering * of the tool's titlebar */ public static final String TOOLCONFIG_SHOW_RESET_BUTTON = "reset.button"; public static final String TOOLCONFIG_SHOW_HELP_BUTTON = "help.button"; public static final String TOOLCONFIG_HELP_DOCUMENT_ID = "help.id"; public static final String TOOLCONFIG_HELP_DOCUMENT_URL = "help.url"; /** * Tool property used to indicate if JSR_168 tools are to be pre-rendered * as they are being placed in the context. */ public static final String JSR_168_PRE_RENDER = "sakai:portlet-pre-render"; /** * Tool property to allow the enabling/disabling of the direct url linking UI */ public static final String TOOL_DIRECTURL_ENABLED_PROP = "sakai:tool-directurl-enabled"; /** * prepare the response and send it to the render engine * * @param rcontext * @param res * @param template * @param contentType * @throws IOException */ void sendResponse(PortalRenderContext rcontext, HttpServletResponse res, String template, String contentType) throws IOException; /** * get the placement for the request * * @param req * @param res * @param session * @param placementId * @param doPage * @return * @throws ToolException */ String getPlacement(HttpServletRequest req, HttpServletResponse res, Session session, String placementId, boolean doPage) throws ToolException; /** * perform login * * @param req * @param res * @param session * @param returnPath * @param skipContainer * @throws ToolException */ void doLogin(HttpServletRequest req, HttpServletResponse res, Session session, String returnPath, boolean skipContainer) throws ToolException; /** * Process a logout * * @param req * Request object * @param res * Response object * @param session * Current session * @param returnPath * if not null, the path to use for the end-user browser redirect * after the logout is complete. Leave null to use the configured * logged out URL. * @throws ToolException */ void doLogout(HttpServletRequest req, HttpServletResponse res, Session session, String returnPath) throws ToolException; /** * get a new render context from the render engine * * @param siteType * @param title * @param skin * @param request * @return */ PortalRenderContext startPageContext(String siteType, String title, String skin, HttpServletRequest request); /** * perform a redirect if logged out * * @param res * @return * @throws IOException */ boolean redirectIfLoggedOut(HttpServletResponse res) throws IOException; /** * get the portal page URL base on the tool supplied * * @param siteTool * @return */ String getPortalPageUrl(ToolConfiguration siteTool); /** * populate the model with error status * * @param req * @param res * @param session * @param mode * @throws ToolException * @throws IOException */ void doError(HttpServletRequest req, HttpServletResponse res, Session session, int mode) throws ToolException, IOException; /** * forward to a portal url * * @param tool * @param req * @param res * @param siteTool * @param skin * @param toolContextPath * @param toolPathInfo * @throws IOException * @throws ToolException */ void forwardPortal(ActiveTool tool, HttpServletRequest req, HttpServletResponse res, ToolConfiguration siteTool, String skin, String toolContextPath, String toolPathInfo) throws ToolException, IOException; /** * setup in preparation for a forward * * @param req * @param res * @param p * @param skin */ void setupForward(HttpServletRequest req, HttpServletResponse res, Placement p, String skin) throws ToolException; /** * include the model section that relates to the bottom of the page. * * @param rcontext */ void includeBottom(PortalRenderContext rcontext); /** * work out the type of the site based on the site id. * * @param siteId * @return */ String calcSiteType(String siteId); /** * include the part od the view tree needed to render login * * @param rcontext * @param req * @param session */ void includeLogin(PortalRenderContext rcontext, HttpServletRequest req, Session session); /** * forward the request to a tool * * @param tool * @param req * @param res * @param siteTool * @param skin * @param toolContextPath * @param toolPathInfo * @throws ToolException */ void forwardTool(ActiveTool tool, HttpServletRequest req, HttpServletResponse res, Placement placement, String skin, String toolContextPath, String toolPathInfo) throws ToolException; /** * get the site id for the user * * @param userId * @return */ String getUserEidBasedSiteId(String userId); /** * populate the view tree for the model * * @param req * @param res * @param session * @param siteId * @param toolId * @param toolContextPath * @param prefix * @param doPages * @param resetTools * @param includeSummary * @param expandSite * @return * @throws ToolException * @throws IOException */ PortalRenderContext includePortal(HttpServletRequest req, HttpServletResponse res, Session session, String siteId, String toolId, String toolContextPath, String prefix, boolean doPages, boolean resetTools, boolean includeSummary, boolean expandSite) throws ToolException, IOException; /** * include the tool part of the view tree * * @param res * @param req * @param placement * @return * @throws IOException */ Map includeTool(HttpServletResponse res, HttpServletRequest req, ToolConfiguration placement) throws IOException; /** * include the tool part of the view tree * * @param res * @param req * @param placement * @param inlineTool * @return * @throws IOException */ Map includeTool(HttpServletResponse res, HttpServletRequest req, ToolConfiguration placement, boolean inlineTool) throws IOException; /** * Get the context name of the portal. This is the name used to identify the * portal implimentation in the portal service and to other parts of the * system. Typically portals will be registered with the portal service * using a name and render engines and PortalHandlers will connect to named * portals. * * @return */ String getPortalContext(); /** * Get the servlet context associated with the portal * * @return */ ServletContext getServletContext(); /** * Return the sub sites below a particular site * Map. * @param rcontext * @param req * @param siteId * @param toolContextPath * @param prefix * @param loggedIn */ void includeSubSites(PortalRenderContext rcontext, HttpServletRequest req, Session session, String siteId, String toolContextPath, String prefix, boolean resetTools ); /** * Get a the page Filter Implementation * @return */ PageFilter getPageFilter(); /** * Set page Filter * */ void setPageFilter(PageFilter pageFilter); /** * @return */ PortalSiteHelper getSiteHelper(); /** * @return */ SiteNeighbourhoodService getSiteNeighbourhoodService(); /** * Indicate if a placement is a JSR-168 placement * @return */ public boolean isPortletPlacement(Placement placement); /** * Find a cookie by this name from the request * * @param req * The servlet request. * @param name * The cookie name * @return The cookie of this name in the request, or null if not found. */ public Cookie findCookie(HttpServletRequest req, String name); }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.ssl.util; import io.netty.buffer.Unpooled; import io.netty.handler.codec.base64.Base64; import io.netty.util.CharsetUtil; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.SecureRandom; import java.security.cert.CertificateEncodingException; import java.security.cert.CertificateException; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import java.util.Date; /** * Generates a temporary self-signed certificate for testing purposes. * <p> * <strong>NOTE:</strong> * Never use the certificate and private key generated by this class in production. * It is purely for testing purposes, and thus it is very insecure. * It even uses an insecure pseudo-random generator for faster generation internally. * </p><p> * A X.509 certificate file and a RSA private key file are generated in a system's temporary directory using * {@link java.io.File#createTempFile(String, String)}, and they are deleted when the JVM exits using * {@link java.io.File#deleteOnExit()}. * </p><p> * At first, this method tries to use OpenJDK's X.509 implementation (the {@code sun.security.x509} package). * If it fails, it tries to use <a href="http://www.bouncycastle.org/">Bouncy Castle</a> as a fallback. * </p> */ public final class SelfSignedCertificate { private static final InternalLogger logger = InternalLoggerFactory.getInstance(SelfSignedCertificate.class); /** Current time minus 1 year, just in case software clock goes back due to time synchronization */ static final Date NOT_BEFORE = new Date(System.currentTimeMillis() - 86400000L * 365); /** The maximum possible value in X.509 specification: 9999-12-31 23:59:59 */ static final Date NOT_AFTER = new Date(253402300799000L); private final File certificate; private final File privateKey; private final X509Certificate cert; private final PrivateKey key; /** * Creates a new instance. */ public SelfSignedCertificate() throws CertificateException { this("example.com"); } /** * Creates a new instance. * * @param fqdn a fully qualified domain name */ public SelfSignedCertificate(String fqdn) throws CertificateException { // Bypass entrophy collection by using insecure random generator. // We just want to generate it without any delay because it's for testing purposes only. this(fqdn, ThreadLocalInsecureRandom.current(), 1024); } /** * Creates a new instance. * * @param fqdn a fully qualified domain name * @param random the {@link java.security.SecureRandom} to use * @param bits the number of bits of the generated private key */ public SelfSignedCertificate(String fqdn, SecureRandom random, int bits) throws CertificateException { // Generate an RSA key pair. final KeyPair keypair; try { KeyPairGenerator keyGen = KeyPairGenerator.getInstance("RSA"); keyGen.initialize(bits, random); keypair = keyGen.generateKeyPair(); } catch (NoSuchAlgorithmException e) { // Should not reach here because every Java implementation must have RSA key pair generator. throw new Error(e); } String[] paths; try { // Try the OpenJDK's proprietary implementation. paths = OpenJdkSelfSignedCertGenerator.generate(fqdn, keypair, random); } catch (Throwable t) { logger.debug("Failed to generate a self-signed X.509 certificate using sun.security.x509:", t); try { // Try Bouncy Castle if the current JVM didn't have sun.security.x509. paths = BouncyCastleSelfSignedCertGenerator.generate(fqdn, keypair, random); } catch (Throwable t2) { logger.debug("Failed to generate a self-signed X.509 certificate using Bouncy Castle:", t2); throw new CertificateException( "No provider succeeded to generate a self-signed certificate. " + "See debug log for the root cause."); } } certificate = new File(paths[0]); privateKey = new File(paths[1]); key = keypair.getPrivate(); try { cert = (X509Certificate) CertificateFactory.getInstance("X509").generateCertificate( new FileInputStream(certificate)); } catch (Exception e) { throw new CertificateEncodingException(e); } } /** * Returns the generated X.509 certificate file in PEM format. */ public File certificate() { return certificate; } /** * Returns the generated RSA private key file in PEM format. */ public File privateKey() { return privateKey; } /** * Returns the generated X.509 certificate. */ public X509Certificate cert() { return cert; } /** * Returns the generated RSA private key. */ public PrivateKey key() { return key; } /** * Deletes the generated X.509 certificate file and RSA private key file. */ public void delete() { safeDelete(certificate); safeDelete(privateKey); } static String[] newSelfSignedCertificate( String fqdn, PrivateKey key, X509Certificate cert) throws IOException, CertificateEncodingException { // Encode the private key into a file. String keyText = "-----BEGIN PRIVATE KEY-----\n" + Base64.encode(Unpooled.wrappedBuffer(key.getEncoded()), true).toString(CharsetUtil.US_ASCII) + "\n-----END PRIVATE KEY-----\n"; File keyFile = File.createTempFile("keyutil_" + fqdn + '_', ".key"); keyFile.deleteOnExit(); OutputStream keyOut = new FileOutputStream(keyFile); try { keyOut.write(keyText.getBytes(CharsetUtil.US_ASCII)); keyOut.close(); keyOut = null; } finally { if (keyOut != null) { safeClose(keyFile, keyOut); safeDelete(keyFile); } } // Encode the certificate into a CRT file. String certText = "-----BEGIN CERTIFICATE-----\n" + Base64.encode(Unpooled.wrappedBuffer(cert.getEncoded()), true).toString(CharsetUtil.US_ASCII) + "\n-----END CERTIFICATE-----\n"; File certFile = File.createTempFile("keyutil_" + fqdn + '_', ".crt"); certFile.deleteOnExit(); OutputStream certOut = new FileOutputStream(certFile); try { certOut.write(certText.getBytes(CharsetUtil.US_ASCII)); certOut.close(); certOut = null; } finally { if (certOut != null) { safeClose(certFile, certOut); safeDelete(certFile); safeDelete(keyFile); } } return new String[] { certFile.getPath(), keyFile.getPath() }; } private static void safeDelete(File certFile) { if (!certFile.delete()) { logger.warn("Failed to delete a file: " + certFile); } } private static void safeClose(File keyFile, OutputStream keyOut) { try { keyOut.close(); } catch (IOException e) { logger.warn("Failed to close a file: " + keyFile, e); } } }
package edu.turtlekit2.ui.utils; import java.awt.BorderLayout; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.Insets; import java.awt.Rectangle; import java.util.ArrayList; import javax.swing.JSeparator; import javax.swing.JToolBar; import javax.swing.SwingConstants; import javax.swing.UIManager; /** * Extended from AKDockLayout * http://forums.sun.com/thread.jspa?threadID=439235&range=15&start=0&forumID=57#1991118 */ public class ToolBarLayout extends BorderLayout { private static final long serialVersionUID = 1L; private ArrayList<Component> north = new ArrayList<Component>(1); private ArrayList<Component> south = new ArrayList<Component>(1); private ArrayList<Component> east = new ArrayList<Component>(1); private ArrayList<Component> west = new ArrayList<Component>(1); private Component center = null; private int northHeight, southHeight, eastWidth, westWidth; public Object[] curImbeddedTBRs=new Object[4]; public static final int TOP = SwingConstants.TOP; public static final int BOTTOM = SwingConstants.BOTTOM; public static final int LEFT = SwingConstants.LEFT; public static final int RIGHT = SwingConstants.RIGHT; public ToolBarLayout(){ curImbeddedTBRs[0]=north; curImbeddedTBRs[1]=south; curImbeddedTBRs[2]=west; curImbeddedTBRs[3]=east; } public void addLayoutComponent(Component c, Object con) { synchronized (c.getTreeLock()) { if (con != null) { String s = con.toString(); c.setVisible(true); if (s.equals(NORTH)) { north.add(c); } else if (s.equals(SOUTH)) { south.add(c); } else if (s.equals(EAST)) { east.add(c); } else if (s.equals(WEST)) { west.add(c); } else if (s.equals(CENTER)) { center = c; } c.getParent().validate(); } } } public void removeLayoutComponent(Component c) { north.remove(c); south.remove(c); east.remove(c); west.remove(c); if (c == center) { center = null; } flipSeparators(c, SwingConstants.VERTICAL); c.getParent().validate(); } public void layoutContainer(Container target) { synchronized (target.getTreeLock()) { Insets insets = target.getInsets(); int top = insets.top; int bottom = target.getHeight() - insets.bottom; int left = insets.left; int right = target.getWidth() - insets.right; northHeight = getPreferredDimension(north).height; southHeight = getPreferredDimension(south).height; eastWidth = getPreferredDimension(east).width; westWidth = getPreferredDimension(west).width; placeComponents(target, north, left, top, right - left, northHeight, TOP); top += (northHeight + getVgap()); placeComponents(target, south, left, bottom - southHeight, right - left, southHeight, BOTTOM); bottom -= (southHeight + getVgap()); placeComponents(target, east, right - eastWidth, top, eastWidth, bottom - top, RIGHT); right -= (eastWidth + getHgap()); placeComponents(target, west, left, top, westWidth, bottom - top, LEFT); left += (westWidth + getHgap()); if (center != null) { center.setBounds(left, top, right - left, bottom - top); } } } // Returns the ideal width for a vertically oriented toolbar // and the ideal height for a horizontally oriented tollbar: private Dimension getPreferredDimension(ArrayList<Component> comps) { int w = 0, h = 0; for (int i = 0; i < comps.size(); i++) { Component c = (comps.get(i)); Dimension d = c.getPreferredSize(); w = Math.max(w, d.width); h = Math.max(h, d.height); } return new Dimension(w, h); } private void placeComponents(Container target, ArrayList<Component> comps, int x, int y, int w, int h, int orientation) { int offset = 0; Component c = null; if (orientation == TOP || orientation == BOTTOM) { offset = x; int totalWidth = 0; int cwidth=0; int num=comps.size(); for (int i = 0; i < num; i++) { c = (comps.get(i)); flipSeparators(c, SwingConstants.VERTICAL); int widthSwap=totalWidth; int cwidthSwap=cwidth; cwidth = c.getPreferredSize().width; totalWidth += cwidth; if (w < totalWidth && i != 0) { Component c0=(comps.get(i-1)); Rectangle rec=c0.getBounds(); c0.setBounds(rec.x,rec.y,w-widthSwap+cwidthSwap,rec.height); offset = x; if (orientation == TOP) { y += h; northHeight += h; } else if (orientation == BOTTOM) { southHeight += h; y -= h; } totalWidth = cwidth; } if(i+1==num){ c.setBounds(x + offset, y, w-totalWidth+cwidth, h); }else{ c.setBounds(x + offset, y, cwidth, h); offset += cwidth; } } flipSeparators(c, SwingConstants.VERTICAL); } else { int totalHeight = 0; int cheight=0; int num=comps.size(); for (int i = 0; i < num; i++) { c = (comps.get(i)); int heightSwap=totalHeight; int cheightSwap=cheight; cheight = c.getPreferredSize().height; totalHeight += cheight; if (h < totalHeight && i != 0) { Component c0=(comps.get(i-1)); Rectangle rec=c0.getBounds(); c0.setBounds(rec.x,rec.y,rec.width,h-heightSwap+cheightSwap); //offset = y; if (orientation == LEFT) { x += w; westWidth += w; } else if (orientation == RIGHT) { eastWidth += w; x -= w; } totalHeight = cheight; offset = 0; } if (totalHeight > h) { //cheight = cheight-2; } if(i+1==num){ c.setBounds(x, y + offset, w, h-totalHeight+cheight); }else{ c.setBounds(x, y + offset, w, cheight); offset += cheight; } } flipSeparators(c, SwingConstants.HORIZONTAL); } } private void flipSeparators(Component c, int orientn) { if (c != null && c instanceof JToolBar && UIManager.getLookAndFeel().getName().toLowerCase().indexOf("windows") !=-1) { JToolBar jtb = (JToolBar) c; Component comps[] = jtb.getComponents(); if (comps != null && comps.length > 0) { for (int i = 0; i < comps.length; i++) { try { Component component = comps[i]; if (component != null) { if (component instanceof JSeparator) { boolean isVisi=component.isVisible(); jtb.remove(component); JSeparator separ = new JSeparator(); separ.setVisible(isVisi); if (orientn == SwingConstants.VERTICAL) { separ.setOrientation(SwingConstants.VERTICAL); separ.setMinimumSize(new Dimension(2, 6)); separ.setPreferredSize(new Dimension(2, 6)); separ.setMaximumSize(new Dimension(2, 100)); } else { separ.setOrientation(SwingConstants.HORIZONTAL); separ.setMinimumSize(new Dimension(6, 2)); separ.setPreferredSize(new Dimension(6, 2)); separ.setMaximumSize(new Dimension(100, 2)); } jtb.add(separ, i); } } } catch (Exception e) { e.printStackTrace(); } } } } //Gen.propChngSup.firePropertyChange("flipem","",new Double(Math.random())); } @SuppressWarnings("unchecked") public boolean containsImbeddedComp(Component c){ for(int i=0; i<this.curImbeddedTBRs.length; i++){ if( ((ArrayList)curImbeddedTBRs[i]).contains(c)) return true; } return false; } /**(SwingConstants top,left,bottom,right): * top:1, left:2, bottom:3, right:4 */ @SuppressWarnings("unchecked") public boolean containsImbeddedComp(Component c, int inx){ if(inx>0 && inx<5 && ((ArrayList)curImbeddedTBRs[inx+1]).contains(c)) return true; return false; } }
package edu.brown.cs.systems.retro.throttling.throttlingqueues; /* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * This file is available under and governed by the GNU General Public * License version 2 only, as published by the Free Software Foundation. * However, the following notice accompanied the original version of this * file: * * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/publicdomain/zero/1.0/ */ import java.util.AbstractQueue; import java.util.ArrayDeque; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.Queue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.lang3.NotImplementedException; import edu.brown.cs.systems.resourcethrottling.ThrottlingMessages.ThrottlingPointSpecification; import edu.brown.cs.systems.retro.Retro; import edu.brown.cs.systems.retro.aggregation.LocalResources; import edu.brown.cs.systems.retro.aggregation.aggregators.ThrottlingPointAggregator; import edu.brown.cs.systems.retro.throttling.ClassChecker; import edu.brown.cs.systems.retro.throttling.ThrottlingQueue; /** * An unbounded {@linkplain BlockingQueue blocking queue} of <tt>Delayed</tt> * elements, in which an element can only be taken when its delay has expired. * The <em>head</em> of the queue is that <tt>Delayed</tt> element whose delay * expired furthest in the past. If no delay has expired there is no head and * <tt>poll</tt> will return <tt>null</tt>. Expiration occurs when an element's * <tt>getDelay(TimeUnit.NANOSECONDS)</tt> method returns a value less than or * equal to zero. Even though unexpired elements cannot be removed using * <tt>take</tt> or <tt>poll</tt>, they are otherwise treated as normal * elements. For example, the <tt>size</tt> method returns the count of both * expired and unexpired elements. This queue does not permit null elements. * * <p> * This class and its iterator implement all of the <em>optional</em> methods of * the {@link Collection} and {@link Iterator} interfaces. * * <p> * This class is a member of the <a href="{@docRoot} * /../technotes/guides/collections/index.html"> Java Collections Framework</a>. * * @since 1.5 * @author Doug Lea * @param <E> * the type of elements held in this collection */ public class ThrottlingLockingQueue<E> extends AbstractQueue<E> implements ThrottlingQueue<E> { private final ClassChecker hadoop_ipc_class_checker_hack = new ClassChecker(); public volatile int size = 0; private final long window; private final ThrottlingPointAggregator taggregator; public ThrottlingLockingQueue(String queueid) { this(queueid, 1000000000); // default 1 second history (1 billion // nanoseconds) } /** * @param historySize * up to historySizeNanos nanoseconds grace period */ public ThrottlingLockingQueue(String queueid, long historySizeNanos) { this.window = historySizeNanos; this.taggregator = LocalResources.getThrottlingPointAggregator(queueid); } private long now() { return System.nanoTime(); } private class TenantQueue { public final int tenantId; public volatile long delay_ns = 0; public volatile long next = 0; public final Queue<Item> elements = new ArrayDeque<Item>(); public class Item implements Comparable<Item> { public final E element; public final long enqueue; public long available; public final TenantQueue q = TenantQueue.this; public Item(E element) { this.element = element; this.enqueue = System.nanoTime(); } @Override public int compareTo(Item other) { return Long.compare(this.enqueue, other.enqueue); } @Override public boolean equals(Object other) { return other == null ? false : other.equals(element); } } public TenantQueue(int tenantId) { this.tenantId = tenantId; } public boolean offer(E e, long t) { elements.add(new Item(e)); size++; if (elements.size() == 1) next = Math.max(t - window, next); return true; } public E peek() { Item peeked = elements.peek(); return peeked == null ? null : peeked.element; } public Item poll(long t) { Item polled = elements.poll(); if (polled == null) return null; size--; polled.available = next; // record when the element was actually // available for deq next = Math.max(t - window, next + delay_ns); return polled; } public void clear() { size -= elements.size(); elements.clear(); } public void setRate(double rate_per_second) { long new_delay_ns = (long) Math.floor(1000 * 1000 * 1000 / rate_per_second); next = next - delay_ns + new_delay_ns; delay_ns = new_delay_ns; } public void clearRate() { next -= delay_ns; this.delay_ns = 0; } /** * returns when the queue will next be available to pull from, or if it * can be pulled from now, when the available element was enqueued */ public long available(long t) { return (elements.size() == 0) ? Long.MAX_VALUE : ((next <= t) ? elements.peek().enqueue : next); } public Object[] toArray() { return elements.toArray(); } public boolean remove(Object o) { return elements.remove(o); } } private transient final ReentrantLock lock = new ReentrantLock(); private final Map<Integer, TenantQueue> qs = new HashMap<Integer, TenantQueue>(); public int tenant() { return Retro.getTenant(); } private TenantQueue queue(int tenantId) { TenantQueue q = qs.get(tenantId); if (q == null) qs.put(tenantId, q = new TenantQueue(tenantId)); return q; } private TenantQueue nextQueue(long now) { long available = Long.MAX_VALUE; TenantQueue next = null; for (TenantQueue q : qs.values()) { long qavailable = q.available(now); if (qavailable < available) { available = qavailable; next = q; } } return next; } private E peekNext(long now) { TenantQueue nextQueue = nextQueue(now); return nextQueue == null ? null : nextQueue.peek(); } private boolean hasNext() { return size > 0; } private E done(TenantQueue.Item item, long t) { if (item == null) return null; Retro.setTenant(item.q.tenantId); taggregator.throttled(item.q.tenantId, 0L); // TODO track latency if (hadoop_ipc_class_checker_hack.isHadoopIPCCall(item.element)) hadoop_ipc_class_checker_hack.setCallEnqueue(item.element, Math.max(item.enqueue, item.available)); if (hadoop_ipc_class_checker_hack.isHBaseCallRunner(item.element)) hadoop_ipc_class_checker_hack.setCallRunnerEnqueue(item.element, Math.max(item.enqueue, item.available)); return item.element; } /** * Thread designated to wait for the element at the head of the queue. This * variant of the Leader-Follower pattern * (http://www.cs.wustl.edu/~schmidt/POSA/POSA2/) serves to minimize * unnecessary timed waiting. When a thread becomes the leader, it waits * only for the next delay to elapse, but other threads await indefinitely. * The leader thread must signal some other thread before returning from * take() or poll(...), unless some other thread becomes leader in the * interim. Whenever the head of the queue is replaced with an element with * an earlier expiration time, the leader field is invalidated by being * reset to null, and some waiting thread, but not necessarily the current * leader, is signalled. So waiting threads must be prepared to acquire and * lose leadership while waiting. */ private Thread leader = null; /** * Condition signalled when a newer element becomes available at the head of * the queue or a new thread may need to become leader. */ private final Condition available = lock.newCondition(); /** * Inserts the specified element into this delay queue. * * @param e * the element to add * @return <tt>true</tt> (as specified by {@link Collection#add}) * @throws NullPointerException * if the specified element is null */ public boolean add(E e) { return offer(e); } /** * Inserts the specified element into this delay queue. * * @param e * the element to add * @return <tt>true</tt> * @throws NullPointerException * if the specified element is null */ public boolean offer(E e) { final ReentrantLock lock = this.lock; int tenant = tenant(); lock.lock(); try { long t = now(); queue(tenant).offer(e, t); if (peekNext(t) == e) { leader = null; available.signal(); } return true; } finally { lock.unlock(); taggregator.throttling(tenant); } } /** * Inserts the specified element into this delay queue. As the queue is * unbounded this method will never block. * * @param e * the element to add * @throws NullPointerException * {@inheritDoc} */ public void put(E e) { offer(e); } /** * Inserts the specified element into this delay queue. As the queue is * unbounded this method will never block. * * @param e * the element to add * @param timeout * This parameter is ignored as the method never blocks * @param unit * This parameter is ignored as the method never blocks * @return <tt>true</tt> * @throws NullPointerException * {@inheritDoc} */ public boolean offer(E e, long timeout, TimeUnit unit) { return offer(e); } /** * Retrieves and removes the head of this queue, or returns <tt>null</tt> if * this queue has no elements with an expired delay. * * @return the head of this queue, or <tt>null</tt> if this queue has no * elements with an expired delay */ public E poll() { final ReentrantLock lock = this.lock; lock.lock(); long t = now(); TenantQueue.Item item = null; try { TenantQueue q = nextQueue(t); if (q == null || q.next > t) return null; else { item = q.poll(t); return item == null ? null : item.element; } } finally { lock.unlock(); done(item, t); } } /** * Retrieves and removes the head of this queue, waiting if necessary until * an element with an expired delay is available on this queue. * * @return the head of this queue * @throws InterruptedException * {@inheritDoc} */ public E take() throws InterruptedException { final ReentrantLock lock = this.lock; lock.lockInterruptibly(); long t = now(); TenantQueue.Item item = null; try { for (;;) { TenantQueue q = nextQueue(t); if (q == null) available.await(); else { long delay = q.next - t; if (delay <= 0) { item = q.poll(t); return item == null ? null : item.element; } else if (leader != null) available.await(); else { Thread thisThread = Thread.currentThread(); leader = thisThread; try { available.awaitNanos(delay); } finally { if (leader == thisThread) leader = null; } } } t = System.nanoTime(); } } finally { if (leader == null && hasNext()) available.signal(); lock.unlock(); done(item, t); } } /** * Retrieves and removes the head of this queue, waiting if necessary until * an element with an expired delay is available on this queue, or the * specified wait time expires. * * @return the head of this queue, or <tt>null</tt> if the specified waiting * time elapses before an element with an expired delay becomes * available * @throws InterruptedException * {@inheritDoc} */ public E poll(long timeout, TimeUnit unit) throws InterruptedException { long nanos = unit.toNanos(timeout); final ReentrantLock lock = this.lock; lock.lockInterruptibly(); long t = now(); TenantQueue.Item item = null; try { for (;;) { TenantQueue q = nextQueue(t); if (q == null) { if (nanos <= 0) return null; else nanos = available.awaitNanos(nanos); } else { long delay = q.next - t; if (delay <= 0) { item = q.poll(t); return item == null ? null : item.element; } if (nanos <= 0) return null; if (nanos < delay || leader != null) nanos = available.awaitNanos(nanos); else { Thread thisThread = Thread.currentThread(); leader = thisThread; try { long timeLeft = available.awaitNanos(delay); nanos -= delay - timeLeft; } finally { if (leader == thisThread) leader = null; } } } t = System.nanoTime(); } } finally { if (leader == null && hasNext()) available.signal(); lock.unlock(); done(item, t); } } /** * Retrieves, but does not remove, the head of this queue, or returns * <tt>null</tt> if this queue is empty. Unlike <tt>poll</tt>, if no expired * elements are available in the queue, this method returns the element that * will expire next, if one exists. * * @return the head of this queue, or <tt>null</tt> if this queue is empty. */ public E peek() { final ReentrantLock lock = this.lock; lock.lock(); try { return peekNext(now()); } finally { lock.unlock(); } } public int size() { final ReentrantLock lock = this.lock; lock.lock(); try { return size; } finally { lock.unlock(); } } /** * @throws UnsupportedOperationException * {@inheritDoc} * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * {@inheritDoc} * @throws IllegalArgumentException * {@inheritDoc} */ public int drainTo(Collection<? super E> c) { // if (c == null) // throw new NullPointerException(); // if (c == this) // throw new IllegalArgumentException(); // final ReentrantLock lock = this.lock; // lock.lock(); // TenantQueue.Item item = null; // try { // int n = 0; // long t = now(); // for (;;) { // TenantQueue q = nextQueue(t); // if (q == null || q.next > t) // break; // c.add(q.poll(t)); // ++n; // } // return n; // } finally { // lock.unlock(); // } throw new NotImplementedException(""); } /** * @throws UnsupportedOperationException * {@inheritDoc} * @throws ClassCastException * {@inheritDoc} * @throws NullPointerException * {@inheritDoc} * @throws IllegalArgumentException * {@inheritDoc} */ public int drainTo(Collection<? super E> c, int maxElements) { // if (c == null) // throw new NullPointerException(); // if (c == this) // throw new IllegalArgumentException(); // if (maxElements <= 0) // return 0; // final ReentrantLock lock = this.lock; // lock.lock(); // TenantQueue.Item item = null; // try { // int n = 0; // long t = now(); // while (n < maxElements) { // TenantQueue q = nextQueue(t); // if (q == null || q.next > t) // break; // c.add(q.poll(t)); // ++n; // } // return n; // } finally { // lock.unlock(); // } throw new NotImplementedException(""); } /** * Atomically removes all of the elements from this delay queue. The queue * will be empty after this call returns. Elements with an unexpired delay * are not waited for; they are simply discarded from the queue. */ public void clear() { final ReentrantLock lock = this.lock; lock.lock(); try { for (TenantQueue q : qs.values()) { q.clear(); } } finally { lock.unlock(); } } /** * Always returns <tt>Integer.MAX_VALUE</tt> because a <tt>DelayQueue</tt> * is not capacity constrained. * * @return <tt>Integer.MAX_VALUE</tt> */ public int remainingCapacity() { return Integer.MAX_VALUE; } /** * Returns an array containing all of the elements in this queue. The * returned array elements are in no particular order. * * <p> * The returned array will be "safe" in that no references to it are * maintained by this queue. (In other words, this method must allocate a * new array). The caller is thus free to modify the returned array. * * <p> * This method acts as bridge between array-based and collection-based APIs. * * @return an array containing all of the elements in this queue */ public Object[] toArray() { final ReentrantLock lock = this.lock; lock.lock(); try { Object[] arr = new Object[size]; int i = 0; for (TenantQueue q : qs.values()) { Object[] qarr = q.toArray(); System.arraycopy(qarr, 0, arr, i, qarr.length); i += qarr.length; } return arr; } finally { lock.unlock(); } } /** * Returns an array containing all of the elements in this queue; the * runtime type of the returned array is that of the specified array. The * returned array elements are in no particular order. If the queue fits in * the specified array, it is returned therein. Otherwise, a new array is * allocated with the runtime type of the specified array and the size of * this queue. * * <p> * If this queue fits in the specified array with room to spare (i.e., the * array has more elements than this queue), the element in the array * immediately following the end of the queue is set to <tt>null</tt>. * * <p> * Like the {@link #toArray()} method, this method acts as bridge between * array-based and collection-based APIs. Further, this method allows * precise control over the runtime type of the output array, and may, under * certain circumstances, be used to save allocation costs. * * <p> * The following code can be used to dump a delay queue into a newly * allocated array of <tt>Delayed</tt>: * * <pre> * Delayed[] a = q.toArray(new Delayed[0]); * </pre> * * Note that <tt>toArray(new Object[0])</tt> is identical in function to * <tt>toArray()</tt>. * * @param a * the array into which the elements of the queue are to be * stored, if it is big enough; otherwise, a new array of the * same runtime type is allocated for this purpose * @return an array containing all of the elements in this queue * @throws ArrayStoreException * if the runtime type of the specified array is not a supertype * of the runtime type of every element in this queue * @throws NullPointerException * if the specified array is null */ public <T> T[] toArray(T[] a) { final ReentrantLock lock = this.lock; lock.lock(); try { return (T[]) toArray(a); } finally { lock.unlock(); } } /** * Removes a single instance of the specified element from this queue, if it * is present, whether or not it has expired. */ public boolean remove(Object o) { final ReentrantLock lock = this.lock; lock.lock(); try { for (TenantQueue q : qs.values()) { if (q.remove(o)) return true; } return false; } finally { lock.unlock(); } } /** * Returns an iterator over all the elements (both expired and unexpired) in * this queue. The iterator does not return the elements in any particular * order. * * <p> * The returned iterator is a "weakly consistent" iterator that will never * throw {@link java.util.ConcurrentModificationException * ConcurrentModificationException}, and guarantees to traverse elements as * they existed upon construction of the iterator, and may (but is not * guaranteed to) reflect any modifications subsequent to construction. * * @return an iterator over the elements in this queue */ public Iterator<E> iterator() { return new Itr(toArray()); } /** * Snapshot iterator that works off copy of underlying q array. */ private class Itr implements Iterator<E> { final Object[] array; // Array of all elements int cursor; // index of next element to return; int lastRet; // index of last element, or -1 if no such Itr(Object[] array) { lastRet = -1; this.array = array; } public boolean hasNext() { return cursor < array.length; } @SuppressWarnings("unchecked") public E next() { if (cursor >= array.length) throw new NoSuchElementException(); lastRet = cursor; return (E) array[cursor++]; } public void remove() { if (lastRet < 0) throw new IllegalStateException(); Object x = array[lastRet]; lastRet = -1; lock.lock(); try { ThrottlingLockingQueue.this.remove(x); } finally { lock.unlock(); } } } public void setRate(int tenantId, double rate) { final ReentrantLock lock = this.lock; lock.lock(); try { queue(tenantId).setRate(rate); } finally { lock.unlock(); } } @Override public void update(ThrottlingPointSpecification spec) { // Pull out the tenant IDs for tenants that currently are rate limited HashSet<Integer> remainingTenants = new HashSet<Integer>(qs.keySet()); // Update the limits as specified. for (int i = 0; i < spec.getTenantIDCount(); i++) { int tenantId = spec.getTenantID(i); setRate(tenantId, spec.getThrottlingRate(i)); remainingTenants.remove(tenantId); } // If a tenant does not have a rate specified, then it is no longer rate // limited for (Integer tenantId : remainingTenants) { clearRate(tenantId); } } public void clearRate(int tenantId) { final ReentrantLock lock = this.lock; lock.lock(); try { queue(tenantId).clearRate(); } finally { lock.unlock(); } } @Override public void clearRates() { final ReentrantLock lock = this.lock; lock.lock(); try { for (TenantQueue q : qs.values()) { q.clearRate(); } } finally { lock.unlock(); } } }
/* * <copyright> * Copyright 2010 BBN Technologies * </copyright> */ package com.bbn.openmap.layer.vpf; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import com.bbn.openmap.io.FormatException; /** * This parser class takes a string representing a logic statement and parses it * into objects that can be used for evaluating attributes of features. It's * based on expressions specified in the GeoSym Handbook MIL-HDBK-857A. * <P> * It can parse the expression given per the specification, such as * 42|1|bfc|1|0|1, or the strings specified in the symbol tables in the * MIL-DTL-89045A, like bfc=81 AND sta<>0and<>1and<>2and<>3and<>5and<>6and<>11. * <P> * * @author dietrick */ public class GeoSymAttExpression { private static Logger logger = Logger.getLogger("com.bbn.openmap.layer.vpf.GeoSymAttExpr"); public final static int NO_OP = 0; public final static int EQUALS_OP = 1; public final static int NOT_EQUALS_OP = 2; public final static int LESS_THAN_OP = 3; public final static int GREATER_THAN_OP = 4; public final static int LT_EQUALS_OP = 5; public final static int GT_EQUALS_OP = 6; public final static int NONE_CONN = 0; public final static int or_CONN = 1; // same attribute can be this or that public final static int AND_CONN = 2; // different attributes must all be // this and this public final static int and_CONN = 3; // same attribute must be this and that public final static int OR_CONN = 4; // one attribute can be this or a // different attribute can be that protected VPFAutoFeatureGraphicWarehouse warehouse; protected Expression exp; /** * The NOOP goes first to preserve the index value of each operator, as * specifed in the GeoSym spec. */ protected static String[] ops = new String[] { "", "=", "<>", "<", ">", "<=", ">=" }; /** * Create the expression object given a text representation of it. * * @param source * @param warehouse used to resolve the ECDIS variables. */ public GeoSymAttExpression(String source, VPFAutoFeatureGraphicWarehouse warehouse) { // Warehouse must be set first. this.warehouse = warehouse; exp = findExpression(source); if (logger.isLoggable(Level.FINER)) { logger.finer("Parsing: " + source); logger.finer(this.toString()); } } protected Connector findOp(String source) { int ANDIndex = source.lastIndexOf("AND"); int ORIndex = source.lastIndexOf("OR"); if (ANDIndex == ORIndex) { if (logger.isLoggable(Level.FINER)) { logger.finer("connector not found in " + source); } // both -1; return null; } if (ANDIndex > ORIndex) { if (logger.isLoggable(Level.FINER)) { logger.finer("found AND in " + source); } return new Connector(AND_CONN, ANDIndex); } else { if (logger.isLoggable(Level.FINER)) { logger.finer("found OR in " + source); } return new Connector(OR_CONN, ORIndex); } } public String toString() { if (exp != null) { return exp.toString(); } else { return "No Expression Defined"; } } protected Connector findMiniOp(String source) { int ANDIndex = source.lastIndexOf("and"); int ORIndex = source.lastIndexOf("or"); if (ANDIndex == ORIndex) { if (logger.isLoggable(Level.FINER)) { logger.finer("connector not found in " + source); } // both -1; return null; } if (ANDIndex > ORIndex) { if (logger.isLoggable(Level.FINER)) { logger.finer("found and in " + source); } return new Connector(and_CONN, ANDIndex); } else { if (logger.isLoggable(Level.FINER)) { logger.finer("found or in " + source); } return new Connector(or_CONN, ORIndex); } } protected Expression findMathOp(String source) { int opIndex = 1, locIndex = -1; Expression exp = null; // Need to make sure that the finding one op doesn't obscure another, // i.e. finding = but missing <=. while (opIndex < 7) { locIndex = source.indexOf(ops[opIndex]); if (locIndex >= 0) { if (opIndex == 1 || opIndex == 3 || opIndex == 4) { if (source.contains("<=") || source.contains(">=")) { opIndex++; continue; } else { break; } } else { break; } } opIndex++; } if (locIndex != -1) { // Check out right side. If string, then create CompareExpression. If // number, ValueExpression String rightSide = source.substring(locIndex + ops[opIndex].length()); String leftSide = null; if (locIndex > 0) { leftSide = source.substring(0, locIndex); } if (logger.isLoggable(Level.FINER)) { logger.finer("got left side: " + leftSide + " op: " + ops[opIndex] + " and right side: " + rightSide); } /** * So here, We need to make a determination of whether the the left * side is a column name from the data, as specified in the FCI, or if * it's a ECDIS check. If the right side is a numerical value, we're * just looking to test attribute data against hard numbers. We're * going to push this decision into the ValueExpression, let it figure * out what it should do. */ try { Double val = Double.parseDouble(rightSide); /** * We need to check the length of this String to see if it's 4, * which means it's an ECDIS variable, set by the user. On the left * side it's just a straight number value comparison that will be * provided for the right side. */ if (leftSide != null && leftSide.length() == 4) { exp = new ECDISExpression(leftSide, val, opIndex, warehouse); } else { exp = new ValueExpression(leftSide, val, opIndex); } } catch (NumberFormatException nfe) { /** * This expression gets set up here for when a table value is * compared against an ECDIS value. * * Turns out, there's never a need for the ColumnExpression because * any time right side is text, it's actually referring to the value * of the ECDIS External Attribute Name, which can be looked up and * set as a variable. * * exp = new ColumnExpression(leftSide, rightSide, opIndex); */ // TODO Need to handle UNK and NULL! double val = warehouse.getExternalAttribute(rightSide); if (val < 0) { // try to handle some string arguments if (rightSide.equalsIgnoreCase("NULL")) { exp = new StringExpression(leftSide, null, opIndex); } else { exp = new StringExpression(leftSide, rightSide, opIndex); } } else { exp = new ValueExpression(leftSide, val, opIndex); } } } return exp; } /** * Recursive parsing statement. Keys on Connectors (AND, OR) and builds * Expressions based on those. Then looks for mini connectors (and, or) and * builds on those. Of course, there might just be one expression here, one * that is separated by an operator. * * @param source * @return Expression tree */ protected Expression findExpression(String source) { if (source != null && source.length() > 0) { source = source.trim(); if (source.length() == 0) { return null; } String leftSide = source; String rightSide = null; Connector op = findOp(leftSide); if (op != null) { rightSide = op.getRightSide(leftSide); leftSide = leftSide.substring(0, op.sourceLoc); Expression leftExpression = findExpression(leftSide); Expression rightExpression = findExpression(rightSide); if (leftExpression != null) { op.addExpr(leftExpression); } if (rightExpression != null) { op.addExpr(rightExpression); } return op; } // Look for mini ops op = findMiniOp(leftSide); if (op != null) { rightSide = op.getRightSide(leftSide); leftSide = leftSide.substring(0, op.sourceLoc); Expression leftExpression = findExpression(leftSide); Expression rightExpression = findExpression(rightSide); if (leftExpression != null) { op.addExpr(leftExpression); } if (rightExpression != null) { op.addExpr(rightExpression); } return op; } // OK, here we are with the base expressions... if (logger.isLoggable(Level.FINER)) { logger.finer("need to break up: " + source); } return findMathOp(source); } return null; } /** * Does the feature in row of fci pass the conditions of this expression. * * @param fci * @param row * @return true if row contents passes evaluation */ public boolean evaluate(FeatureClassInfo fci, int row) { boolean ret = true; StringBuffer reasoning = null; if (logger.isLoggable(Level.FINE)) { reasoning = new StringBuffer(); } if (exp != null) { ret = exp.evaluate(fci, row, reasoning); } if (reasoning != null) { reasoning.append("\n--------"); logger.fine(reasoning.toString()); } return ret; } /** * This one is used by the CoverageTable. Does the feature in row of fci pass * the conditions of this expression. * * @param fci * @param row * @return true if row passes evaluation */ public boolean evaluate(FeatureClassInfo fci, List<Object> row) { boolean ret = true; StringBuffer reasoning = null; if (logger.isLoggable(Level.FINE)) { reasoning = new StringBuffer(); logger.fine(toString()); } if (exp != null) { ret = exp.evaluate(fci, row, reasoning); } if (reasoning != null) { reasoning.append("\n--------"); logger.fine(reasoning.toString()); } return ret; } /** * Connector class is the part of the expression that contains the logic * operation, AND, OR, and and or. * * @author dietrick */ public static class Connector implements Expression { List<Expression> exp; int op; int sourceLoc; public Connector(int op, int sLoc) { this.op = op; this.sourceLoc = sLoc; } public void addExpr(Expression expr) { if (exp == null) { exp = new LinkedList<Expression>(); } if (expr != null) { exp.add(expr); updateColumnNamesIfNeeded(); } } protected void updateColumnNamesIfNeeded() { String colName = null; for (Expression e : exp) { if (e instanceof CompareExpression) { String cName = ((CompareExpression) e).colName; if (cName != null) { colName = cName; break; } } } if (colName != null) { for (Expression e : exp) { if (e instanceof CompareExpression) { if (((CompareExpression) e).colName == null) { ((CompareExpression) e).colName = colName; break; } } } } } public String getRightSide(String source) { switch (op) { case NONE_CONN: break; case and_CONN: case AND_CONN: return source.substring(sourceLoc + 3).trim(); case or_CONN: case OR_CONN: return source.substring(sourceLoc + 2).trim(); default: } return null; } public boolean evaluate(FeatureClassInfo fci, int row, StringBuffer reasoning) { boolean ret = false; switch (op) { case NONE_CONN: break; case or_CONN: break; case AND_CONN: ret = true; for (Expression e : exp) { ret = e.evaluate(fci, row, reasoning); if (!ret) { break; } } break; case and_CONN: break; case OR_CONN: for (Expression e : exp) { ret = ret || e.evaluate(fci, row, reasoning); if (ret) { break; } } break; default: } if (reasoning != null) { reasoning.append("\n-> " + toString() + ": evaluates " + ret); } return ret; } public boolean evaluate(FeatureClassInfo fci, List<Object> row, StringBuffer reasoning) { boolean ret = false; switch (op) { case NONE_CONN: break; case AND_CONN: case and_CONN: ret = true; for (Expression e : exp) { ret = e.evaluate(fci, row, reasoning); if (!ret) { break; } } break; case or_CONN: case OR_CONN: for (Expression e : exp) { ret = e.evaluate(fci, row, reasoning); if (ret) { break; } } break; default: } if (reasoning != null) { reasoning.append("\n-> " + toString() + ": evaluates " + ret); } return ret; } public String toString() { StringBuffer sb = new StringBuffer("Connector["); boolean addConn = false; String conn = " AND "; if (op == OR_CONN) { conn = " OR "; } for (Expression e : exp) { if (addConn) { sb.append(conn); } sb.append(e.toString()); addConn = true; } sb.append("]"); return sb.toString(); } } /** * The ECDISExpression checks the warehouse for user set values when * evaluating. * * @author dietrick */ public static class StringExpression extends CompareExpression { protected String val; public StringExpression(String colName, String val, int op) { super(colName, op); if (val == null) { val = ""; } this.val = val; } /** * */ public boolean evaluate(FeatureClassInfo fci, int row, StringBuffer reasoning) { // Pre-cache column index so we don't have to do lookup for each entry. if (colIndex == -1 || this.fci != fci) { setIndexes(fci); } List<Object> fcirow = new ArrayList<Object>(); try { if (fci.getRow(fcirow, row)) { if (colIndex < 0) { if (reasoning != null) { reasoning.append("\n col ").append(colName).append(" not found in FCI[").append(fci.columnNameString()).append("]"); } logger.info("col " + colName + " not found in FCI[" + fci.columnNameString() + "]"); return false; } String realVal = fcirow.get(colIndex).toString().trim(); return test(realVal, val, reasoning); } else { if (reasoning != null) { reasoning.append("\n Can't read row ").append(row); } } } catch (FormatException fe) { if (reasoning != null) { reasoning.append("\n FormatException reading row ").append(row); } } return false; } /** * For ECDISExpressions, none of the arguments matter. */ public boolean evaluate(FeatureClassInfo fci, List<Object> row, StringBuffer reasoning) { // Pre-cache column index so we don't have to do lookup for each entry. if (colIndex == -1 || this.fci != fci) { setIndexes(fci); } // The columns aren't found if (colIndex == -1) { logger.finer("col " + colName + " not found in FCI[" + fci.columnNameString() + "]"); return false; } Object realVal = row.get(colIndex); if (realVal == null) { realVal = ""; } return test(realVal.toString().trim(), val, reasoning); } /** * The basic test for the operator, returning val1 op val2. * * @param val1 NOT NULL * @param val2 NOT NULL * @param buf * @return true if operation passes */ protected boolean test(String val1, String val2, StringBuffer buf) { boolean ret = false; switch (op) { case 1: ret = val1.equals(val2); break; case 2: ret = !val1.equals(val2); break; } if (buf != null) { String operation = null; switch (op) { case 1: operation = (ret + "=" + val1 + "==" + val2); break; case 2: operation = (ret + "=" + val1 + "!=" + val2); break; } buf.append("\n " + toString() + ":" + operation); } return ret; } public String toString() { return "StringExpression[" + colName + " " + ops[op] + " " + val + "]"; } } /** * The ECDISExpression checks the warehouse for user set values when * evaluating. * * @author dietrick */ public static class ECDISExpression extends ValueExpression { VPFAutoFeatureGraphicWarehouse warehouse = null; public ECDISExpression(String colName, double val, int op, VPFAutoFeatureGraphicWarehouse warehouse) { super(colName, val, op); this.warehouse = warehouse; } /** * For ECDISExpressions, none of the arguments matter. */ public boolean evaluate(FeatureClassInfo fci, int row, StringBuffer reasoning) { return evaluate(reasoning); } /** * For ECDISExpressions, none of the arguments matter. */ public boolean evaluate(FeatureClassInfo fci, List<Object> row, StringBuffer reasoning) { return evaluate(reasoning); } public boolean evaluate(StringBuffer reasoning) { double realVal = warehouse.getExternalAttribute(colName); return test(realVal, val, reasoning); } public String toString() { return "ECDISExpression[" + colName + " " + ops[op] + " " + val + "]"; } } /** * The ValueExpression is a comparison of a FCI value to a numerical value. * * @author dietrick */ public static class ValueExpression extends CompareExpression { double val; public ValueExpression(String colName, double val, int op) { super(colName, op); this.val = val; } public boolean evaluate(FeatureClassInfo fci, int row, StringBuffer reasoning) { // Pre-cache column index so we don't have to do lookup for each entry. if (colIndex == -1 || this.fci != fci) { setIndexes(fci); } List<Object> fcirow = new ArrayList<Object>(); try { if (fci.getRow(fcirow, row)) { if (colIndex < 0) { if (reasoning != null) { reasoning.append("\n col ").append(colName).append(" not found in FCI[").append(fci.columnNameString()).append("]"); } return false; } Double realVal = Double.parseDouble(fcirow.get(colIndex).toString()); return test(realVal, val, reasoning); } else { if (reasoning != null) { reasoning.append("\n Can't read row ").append(row); } } } catch (FormatException fe) { if (reasoning != null) { reasoning.append("\n FormatException reading row ").append(row); } } catch (NumberFormatException nfe) { if (reasoning != null) { reasoning.append("\n NumberFormatException reading ").append(fcirow.get(colIndex)); } } return false; } public boolean evaluate(FeatureClassInfo fci, List<Object> row, StringBuffer reasoning) { // Pre-cache column index so we don't have to do lookup for each entry. if (colIndex == -1 || this.fci != fci) { setIndexes(fci); } try { if (colIndex < 0) { if (reasoning != null) { reasoning.append("\n col ").append(colName).append(" not found in FCI[").append(fci.columnNameString()).append("]"); } return false; } Double realVal = Double.parseDouble(row.get(colIndex).toString()); return test(realVal, val, reasoning); } catch (NumberFormatException nfe) { if (reasoning != null) { reasoning.append("\n NumberFormatException reading ").append(row.get(colIndex)); } } return false; } public String toString() { return "ValueExpression[" + colName + " " + ops[op] + " " + val + "]"; } } /** * A ColumnExpression is the comparison of an FCI column value against * another column value. * * @author dietrick */ public static class ColumnExpression extends CompareExpression implements Expression { protected String otherColName; protected int otherColIndex = -1; public ColumnExpression(String colName, String otherName, int op) { super(colName, op); this.otherColName = otherName; } protected void setIndexes(FeatureClassInfo fci) { this.fci = fci; int columnCount = fci.getColumnCount(); colIndex = -1; otherColIndex = -1; for (int column = 0; column < columnCount; column++) { if (fci.getColumnName(column).equalsIgnoreCase(colName)) { colIndex = column; } if (fci.getColumnName(column).equalsIgnoreCase(otherColName)) { otherColIndex = column; } } } public boolean evaluate(FeatureClassInfo fci, int row, StringBuffer reasoning) { // Pre-cache column index so we don't have to do lookup for each entry. if (colIndex == -1 || otherColIndex == -1 || this.fci != fci) { setIndexes(fci); } // The columns aren't found if (colIndex == -1 || otherColIndex == -1) { logger.finer("col " + colName + " or " + otherColName + " not found in FCI[" + fci.columnNameString() + "]"); return false; } List<Object> fcirow = new ArrayList<Object>(); try { if (fci.getRow(fcirow, row)) { Double realVal1 = Double.parseDouble(fcirow.get(colIndex).toString()); Double realVal2 = Double.parseDouble(fcirow.get(otherColIndex).toString()); return test(realVal1, realVal2, reasoning); } } catch (FormatException fe) { } catch (NumberFormatException nfe) { } return false; } public boolean evaluate(FeatureClassInfo fci, List<Object> row, StringBuffer reasoning) { // Pre-cache column index so we don't have to do lookup for each entry. if (colIndex == -1 || otherColIndex == -1 || this.fci != fci) { setIndexes(fci); } // The columns aren't found if (colIndex == -1 || otherColIndex == -1) { logger.finer("col " + colName + " or " + otherColName + " not found in FCI[" + fci.columnNameString() + "]"); return false; } try { Double realVal1 = Double.parseDouble(row.get(colIndex).toString()); Double realVal2 = Double.parseDouble(row.get(otherColIndex).toString()); return test(realVal1, realVal2, reasoning); } catch (NumberFormatException nfe) { } return false; } public String toString() { return "ValueExpression[" + colName + " " + ops[op] + " " + otherColName + "]"; } } public static abstract class CompareExpression implements Expression { protected int op; protected FeatureClassInfo fci = null; protected String colName; protected int colIndex = -1; protected CompareExpression(String colName, int op) { this.colName = colName; this.op = op; } protected void setIndexes(FeatureClassInfo fci) { this.fci = fci; colIndex = -1; int columnCount = fci.getColumnCount(); for (int column = 0; column < columnCount; column++) { if (fci.getColumnName(column).equalsIgnoreCase(colName)) { colIndex = column; break; } } } /** * The basic test for the operator, returning val1 op val2. * * @param val1 * @param val2 * @param buf * @return true of operation passes. */ protected boolean test(double val1, double val2, StringBuffer buf) { boolean ret = false; switch (op) { case 1: ret = val1 == val2; break; case 2: ret = val1 != val2; break; case 3: ret = val1 < val2; break; case 4: ret = val1 > val2; break; case 5: ret = val1 <= val2; break; case 6: ret = val1 >= val2; } if (buf != null) { String operation = null; switch (op) { case 1: operation = (ret + "=" + val1 + "==" + val2); break; case 2: operation = (ret + "=" + val1 + "!=" + val2); break; case 3: operation = (ret + "=" + val1 + "<" + val2); break; case 4: operation = (ret + "=" + val1 + ">" + val2); break; case 5: operation = (ret + "=" + val1 + "<=" + val2); break; case 6: operation = (ret + "=" + val1 + ">=" + val2); } buf.append("\n " + toString() + ":" + operation); } return ret; } } /** * The Expression interface allows for the recursive queries of Connectors * and Value/CompareExpressions. * * @author dietrick */ public interface Expression { public boolean evaluate(FeatureClassInfo fci, int row, StringBuffer reasoning); public boolean evaluate(FeatureClassInfo fci, List<Object> row, StringBuffer reasoning); } public static void main(String[] args) { new GeoSymAttExpression("mac=2 AND idsm=0 AND hdp>=msscand<ssdc AND isdm=0", new VPFAutoFeatureGraphicWarehouse()); } }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.identity.mgt.impl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.wso2.carbon.caching.CarbonCachingService; import org.wso2.carbon.identity.mgt.AuthenticationContext; import org.wso2.carbon.identity.mgt.Group; import org.wso2.carbon.identity.mgt.IdentityStore; import org.wso2.carbon.identity.mgt.User; import org.wso2.carbon.identity.mgt.bean.GroupBean; import org.wso2.carbon.identity.mgt.bean.UserBean; import org.wso2.carbon.identity.mgt.claim.Claim; import org.wso2.carbon.identity.mgt.claim.MetaClaim; import org.wso2.carbon.identity.mgt.exception.AuthenticationFailure; import org.wso2.carbon.identity.mgt.exception.GroupNotFoundException; import org.wso2.carbon.identity.mgt.exception.IdentityStoreException; import org.wso2.carbon.identity.mgt.exception.UserNotFoundException; import org.wso2.carbon.identity.mgt.impl.config.CacheConfig; import org.wso2.carbon.identity.mgt.impl.internal.IdentityMgtDataHolder; import org.wso2.carbon.identity.mgt.impl.util.CacheHelper; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.cache.Cache; import javax.cache.CacheManager; import javax.security.auth.callback.Callback; import static org.wso2.carbon.kernel.utils.StringUtils.isNullOrEmpty; /** * Virtual identity store with the caching. * * @since 1.0.0 */ public class CacheBackedIdentityStore implements IdentityStore { private static Logger log = LoggerFactory.getLogger(CacheBackedIdentityStore.class); private static final String UNIQUE_USER_CACHE = "uniqueUserCache"; private static final String UNIQUE_GROUP_CACHE = "uniqueGroupCache"; private IdentityStore identityStore; private Map<String, Boolean> cacheStatus = new HashMap<>(); private CacheManager cacheManager; public CacheBackedIdentityStore(Map<String, CacheConfig> cacheConfigs, List<Domain> domains) throws IdentityStoreException { CarbonCachingService carbonCachingService = IdentityMgtDataHolder.getInstance().getCarbonCachingService(); identityStore = new IdentityStoreImpl(domains); cacheManager = carbonCachingService.getCachingProvider().getCacheManager(); // Initialize all caches. if (CacheHelper.isCacheEnabled(cacheConfigs, UNIQUE_USER_CACHE)) { CacheHelper.createCache(UNIQUE_USER_CACHE, String.class, User.class, CacheHelper.MEDIUM_EXPIRE_TIME, cacheConfigs, cacheManager); cacheStatus.put(UNIQUE_USER_CACHE, true); } else { cacheStatus.put(UNIQUE_USER_CACHE, false); } if (log.isDebugEnabled()) { log.debug("Cache backed identity store successfully initialized."); } } @Override public User getUser(String uniqueUserId) throws IdentityStoreException, UserNotFoundException { if (cacheStatus.get(UNIQUE_USER_CACHE) && isNullOrEmpty(uniqueUserId)) { return doGetUser(uniqueUserId, identityStore.getPrimaryDomainName()); } User user = identityStore.getUser(uniqueUserId); user.setIdentityStore(this); return user; } @Override public User getUser(Claim claim) throws IdentityStoreException, UserNotFoundException { return identityStore.getUser(claim); } @Override public User getUser(Claim claim, String domainName) throws IdentityStoreException, UserNotFoundException { return identityStore.getUser(claim); } @Override public List<User> listUsers(int offset, int length) throws IdentityStoreException { return identityStore.listUsers(offset, length); } @Override public List<User> listUsers(int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listUsers(offset, length, domainName); } @Override public List<User> listUsers(Claim claim, int offset, int length) throws IdentityStoreException { return identityStore.listUsers(claim, offset, length); } @Override public List<User> listUsers(Claim claim, int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listUsers(claim, offset, length, domainName); } @Override public List<User> listUsers(MetaClaim metaClaim, String filterPattern, int offset, int length) throws IdentityStoreException { return identityStore.listUsers(metaClaim, filterPattern, offset, length); } @Override public List<User> listUsers(MetaClaim metaClaim, String filterPattern, int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listUsers(metaClaim, filterPattern, offset, length, domainName); } @Override public Group getGroup(String uniqueGroupId) throws IdentityStoreException, GroupNotFoundException { if (cacheStatus.get(UNIQUE_GROUP_CACHE) && isNullOrEmpty(uniqueGroupId)) { return doGetGroup(uniqueGroupId, identityStore.getPrimaryDomainName()); } Group group = identityStore.getGroup(uniqueGroupId); group.setIdentityStore(this); return group; } @Override public Group getGroup(Claim claim) throws IdentityStoreException, GroupNotFoundException { return identityStore.getGroup(claim); } @Override public Group getGroup(Claim claim, String domainName) throws IdentityStoreException, GroupNotFoundException { return identityStore.getGroup(claim, domainName); } @Override public List<Group> listGroups(int offset, int length) throws IdentityStoreException { return identityStore.listGroups(offset, length); } @Override public List<Group> listGroups(int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listGroups(offset, length, domainName); } @Override public List<Group> listGroups(Claim claim, int offset, int length) throws IdentityStoreException { return identityStore.listGroups(claim, offset, length); } @Override public List<Group> listGroups(Claim claim, int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listGroups(claim, offset, length, domainName); } @Override public List<Group> listGroups(MetaClaim metaClaim, String filterPattern, int offset, int length) throws IdentityStoreException { return identityStore.listGroups(metaClaim, filterPattern, offset, length); } @Override public List<Group> listGroups(MetaClaim metaClaim, String filterPattern, int offset, int length, String domainName) throws IdentityStoreException { return identityStore.listGroups(metaClaim, filterPattern, offset, length, domainName); } @Override public List<Group> getGroupsOfUser(String uniqueUserId) throws IdentityStoreException, UserNotFoundException { return identityStore.getGroupsOfUser(uniqueUserId); } @Override public List<User> getUsersOfGroup(String uniqueGroupId) throws IdentityStoreException, GroupNotFoundException { return identityStore.getUsersOfGroup(uniqueGroupId); } @Override public boolean isUserInGroup(String uniqueUserId, String uniqueGroupId) throws IdentityStoreException, UserNotFoundException, GroupNotFoundException { return identityStore.isUserInGroup(uniqueUserId, uniqueGroupId); } @Override public List<Claim> getClaimsOfUser(String uniqueUserId) throws IdentityStoreException, UserNotFoundException { return identityStore.getClaimsOfUser(uniqueUserId); } @Override public List<Claim> getClaimsOfUser(String uniqueUserId, List<MetaClaim> metaClaims) throws IdentityStoreException, UserNotFoundException { return identityStore.getClaimsOfUser(uniqueUserId, metaClaims); } @Override public List<Claim> getClaimsOfGroup(String uniqueGroupId) throws IdentityStoreException, GroupNotFoundException { return identityStore.getClaimsOfGroup(uniqueGroupId); } @Override public List<Claim> getClaimsOfGroup(String uniqueGroupId, List<MetaClaim> metaClaims) throws IdentityStoreException, GroupNotFoundException { return identityStore.getClaimsOfGroup(uniqueGroupId, metaClaims); } @Override public User addUser(UserBean userBean) throws IdentityStoreException { return identityStore.addUser(userBean); } @Override public User addUser(UserBean userBean, String domainName) throws IdentityStoreException { return identityStore.addUser(userBean, domainName); } @Override public List<User> addUsers(List<UserBean> userBeen) throws IdentityStoreException { return identityStore.addUsers(userBeen); } @Override public List<User> addUsers(List<UserBean> userBeen, String domainName) throws IdentityStoreException { return identityStore.addUsers(userBeen, domainName); } @Override public void updateUserClaims(String uniqueUserId, List<Claim> claims) throws IdentityStoreException, UserNotFoundException { identityStore.updateUserClaims(uniqueUserId, claims); } @Override public void updateUserClaims(String uniqueUserId, List<Claim> claimsToAdd, List<Claim> claimsToRemove) throws IdentityStoreException, UserNotFoundException { identityStore.updateUserClaims(uniqueUserId, claimsToAdd, claimsToRemove); } @Override public void updateUserCredentials(String uniqueUserId, List<Callback> credentials) throws IdentityStoreException, UserNotFoundException { identityStore.updateUserCredentials(uniqueUserId, credentials); } @Override public void updateUserCredentials(String uniqueUserId, List<Callback> credentialsToAdd, List<Callback> credentialsToRemove) throws IdentityStoreException, UserNotFoundException { identityStore.updateUserCredentials(uniqueUserId, credentialsToAdd, credentialsToRemove); } @Override public void deleteUser(String uniqueUserId) throws IdentityStoreException, UserNotFoundException { identityStore.deleteUser(uniqueUserId); doDeleteUser(uniqueUserId, identityStore.getPrimaryDomainName()); } @Override public void updateGroupsOfUser(String uniqueUserId, List<String> uniqueGroupIds) throws IdentityStoreException { identityStore.updateGroupsOfUser(uniqueUserId, uniqueGroupIds); } @Override public void updateGroupsOfUser(String uniqueUserId, List<String> uniqueGroupIdsToAdd, List<String> uniqueGroupIdsToRemove) throws IdentityStoreException { identityStore.updateGroupsOfUser(uniqueUserId, uniqueGroupIdsToAdd, uniqueGroupIdsToRemove); } @Override public Group addGroup(GroupBean groupBean) throws IdentityStoreException { return identityStore.addGroup(groupBean); } @Override public Group addGroup(GroupBean groupBean, String domainName) throws IdentityStoreException { return identityStore.addGroup(groupBean, domainName); } @Override public List<Group> addGroups(List<GroupBean> groupBeen) throws IdentityStoreException { return identityStore.addGroups(groupBeen); } @Override public List<Group> addGroups(List<GroupBean> groupBeen, String domainName) throws IdentityStoreException { return identityStore.addGroups(groupBeen, domainName); } @Override public void updateGroupClaims(String uniqueGroupId, List<Claim> claims) throws IdentityStoreException, GroupNotFoundException { identityStore.updateGroupClaims(uniqueGroupId, claims); } @Override public void updateGroupClaims(String uniqueGroupId, List<Claim> claimsToAdd, List<Claim> claimsToRemove) throws IdentityStoreException, GroupNotFoundException { identityStore.updateGroupClaims(uniqueGroupId, claimsToAdd, claimsToRemove); } @Override public void deleteGroup(String uniqueGroupId) throws IdentityStoreException, GroupNotFoundException { identityStore.deleteGroup(uniqueGroupId); doDeleteGroup(uniqueGroupId, identityStore.getPrimaryDomainName()); } @Override public void updateUsersOfGroup(String uniqueGroupId, List<String> uniqueUserIds) throws IdentityStoreException { identityStore.updateUsersOfGroup(uniqueGroupId, uniqueUserIds); } @Override public void updateUsersOfGroup(String uniqueGroupId, List<String> uniqueUserIdsToAdd, List<String> uniqueUserIdsToRemove) throws IdentityStoreException { identityStore.updateUsersOfGroup(uniqueGroupId, uniqueUserIdsToAdd, uniqueUserIdsToRemove); } @Override public AuthenticationContext authenticate(Claim claim, Callback[] credentials, String domainName) throws AuthenticationFailure, IdentityStoreException { return identityStore.authenticate(claim, credentials, domainName); } @Override public String getPrimaryDomainName() throws IdentityStoreException { return identityStore.getPrimaryDomainName(); } @Override public Set<String> getDomainNames() throws IdentityStoreException { return identityStore.getDomainNames(); } private User doGetUser(String uniqueUserId, String domainName) throws IdentityStoreException, UserNotFoundException { Cache<String, User> userCache = cacheManager.getCache(UNIQUE_USER_CACHE, String.class, User.class); User user = userCache.get(uniqueUserId.hashCode() + ":" + domainName.hashCode()); if (user == null) { user = identityStore.getUser(uniqueUserId); userCache.put(user.getUniqueUserId().hashCode() + ":" + user.getDomainName().hashCode(), user); user.setIdentityStore(this); return user; } user.setIdentityStore(this); return user; } private Group doGetGroup(String uniqueGroupId, String domainName) throws IdentityStoreException, GroupNotFoundException { Cache<String, Group> groupCache = cacheManager.getCache(UNIQUE_GROUP_CACHE, String.class, Group.class); Group group = groupCache.get(uniqueGroupId.hashCode() + ":" + domainName.hashCode()); if (group == null) { group = identityStore.getGroup(uniqueGroupId); groupCache.put(group.getUniqueGroupId().hashCode() + ":" + group.getDomainName().hashCode(), group); group.setIdentityStore(this); return group; } group.setIdentityStore(this); return group; } private void doDeleteUser(String uniqueUserId, String domainName) { Cache<String, User> userCache = cacheManager.getCache(UNIQUE_USER_CACHE, String.class, User.class); userCache.remove(uniqueUserId.hashCode() + ":" + domainName.hashCode()); } private void doDeleteGroup(String uniqueGroupId, String domainName) { Cache<String, Group> groupCache = cacheManager.getCache(UNIQUE_GROUP_CACHE, String.class, Group.class); groupCache.remove(uniqueGroupId.hashCode() + ":" + domainName.hashCode()); } }
package org.drools.rule; /* * Copyright 2005 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.io.Externalizable; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.IOException; import org.drools.RuntimeDroolsException; import org.drools.common.InternalFactHandle; import org.drools.common.InternalWorkingMemory; import org.drools.reteoo.LeftTuple; import org.drools.spi.CompiledInvoker; import org.drools.spi.InternalReadAccessor; import org.drools.spi.PredicateExpression; import org.drools.spi.Restriction; import org.drools.spi.Wireable; /** * A predicate can be written as a top level constraint or be nested * inside inside a field constraint (and as so, must implement the * Restriction interface). * * @author etirelli */ public class PredicateConstraint extends MutableTypeConstraint implements Restriction, Wireable, Externalizable { /** * */ private static final long serialVersionUID = 400L; private PredicateExpression expression; private Declaration[] requiredDeclarations; private Declaration[] previousDeclarations; private Declaration[] localDeclarations; private String[] requiredGlobals; private List<PredicateConstraint> cloned = Collections.<PredicateConstraint> emptyList(); private static final Declaration[] EMPTY_DECLARATIONS = new Declaration[0]; private static final String[] EMPTY_GLOBALS = new String[0]; public PredicateConstraint() { this( null ); } public PredicateConstraint(final PredicateExpression evaluator) { this( evaluator, null, null, null ); } public PredicateConstraint(final Declaration[] previousDeclarations, final Declaration[] localDeclarations) { this( null, previousDeclarations, localDeclarations, null ); } public PredicateConstraint(final PredicateExpression expression, final Declaration[] previousDeclarations, final Declaration[] localDeclarations, final String[] requiredGlobals) { this.expression = expression; if ( previousDeclarations == null ) { this.previousDeclarations = PredicateConstraint.EMPTY_DECLARATIONS; } else { this.previousDeclarations = previousDeclarations; } if ( localDeclarations == null ) { this.localDeclarations = PredicateConstraint.EMPTY_DECLARATIONS; } else { this.localDeclarations = localDeclarations; } if ( requiredGlobals == null ) { this.requiredGlobals = PredicateConstraint.EMPTY_GLOBALS; } else { this.requiredGlobals = requiredGlobals; } this.requiredDeclarations = new Declaration[this.previousDeclarations.length + this.localDeclarations.length]; System.arraycopy( this.previousDeclarations, 0, this.requiredDeclarations, 0, this.previousDeclarations.length ); System.arraycopy( this.localDeclarations, 0, this.requiredDeclarations, this.previousDeclarations.length, this.localDeclarations.length ); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal( in ); this.expression = (PredicateExpression) in.readObject(); this.requiredDeclarations = (Declaration[]) in.readObject(); this.previousDeclarations = (Declaration[]) in.readObject(); this.localDeclarations = (Declaration[]) in.readObject(); this.requiredGlobals = (String[]) in.readObject(); this.cloned = (List<PredicateConstraint>) in.readObject(); } public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal( out ); if ( this.expression instanceof CompiledInvoker ) { out.writeObject( null ); } else { out.writeObject( this.expression ); } out.writeObject( this.requiredDeclarations ); out.writeObject( this.previousDeclarations ); out.writeObject( this.localDeclarations ); out.writeObject( this.requiredGlobals ); out.writeObject( this.cloned ); } public Declaration[] getRequiredDeclarations() { return this.requiredDeclarations; } public Declaration[] getPreviousDeclarations() { return this.previousDeclarations; } public Declaration[] getLocalDeclarations() { return this.localDeclarations; } public void replaceDeclaration(Declaration oldDecl, Declaration newDecl) { for ( int i = 0; i < this.requiredDeclarations.length; i++ ) { if ( this.requiredDeclarations[i].equals( oldDecl ) ) { this.requiredDeclarations[i] = newDecl; } } for ( int i = 0; i < this.previousDeclarations.length; i++ ) { if ( this.previousDeclarations[i].equals( oldDecl ) ) { this.previousDeclarations[i] = newDecl; } } for ( int i = 0; i < this.localDeclarations.length; i++ ) { if ( this.localDeclarations[i].equals( oldDecl ) ) { this.localDeclarations[i] = newDecl; } } } public void wire(Object object) { setPredicateExpression( (PredicateExpression) object ); for ( PredicateConstraint clone : this.cloned ) { clone.wire( object ); } } public void setPredicateExpression(final PredicateExpression expression) { this.expression = expression; } public PredicateExpression getPredicateExpression() { return this.expression; } public String toString() { return "[PredicateConstraint previousDeclarations=" + this.previousDeclarations + " localDeclarations=" + this.localDeclarations + "]"; } public int hashCode() { return this.expression.hashCode(); } public boolean equals(final Object object) { if ( object == this ) { return true; } if ( object == null || object.getClass() != PredicateConstraint.class ) { return false; } final PredicateConstraint other = (PredicateConstraint) object; if ( this.previousDeclarations.length != other.previousDeclarations.length ) { return false; } if ( this.localDeclarations.length != other.localDeclarations.length ) { return false; } if ( this.requiredGlobals.length != other.requiredGlobals.length ) { return false; } for ( int i = 0, length = this.previousDeclarations.length; i < length; i++ ) { if ( this.previousDeclarations[i].getPattern().getOffset() != other.previousDeclarations[i].getPattern().getOffset() ) { return false; } if ( !this.previousDeclarations[i].getExtractor().equals( other.previousDeclarations[i].getExtractor() ) ) { return false; } } for ( int i = 0, length = this.localDeclarations.length; i < length; i++ ) { if ( this.localDeclarations[i].getPattern().getOffset() != other.localDeclarations[i].getPattern().getOffset() ) { return false; } if ( !this.localDeclarations[i].getExtractor().equals( other.localDeclarations[i].getExtractor() ) ) { return false; } } if ( !Arrays.equals( this.requiredGlobals, other.requiredGlobals ) ) { return false; } return this.expression.equals( other.expression ); } public ContextEntry createContextEntry() { PredicateContextEntry ctx = new PredicateContextEntry(); ctx.dialectContext = this.expression.createContext(); return ctx; } public boolean isAllowed(final InternalFactHandle handle, final InternalWorkingMemory workingMemory, final ContextEntry ctx) { try { return this.expression.evaluate( handle.getObject(), null, this.previousDeclarations, this.localDeclarations, workingMemory, ((PredicateContextEntry) ctx).dialectContext ); } catch ( final Exception e ) { throw new RuntimeDroolsException( "Exception executing predicate " + this.expression, e ); } } public boolean isAllowed(InternalReadAccessor extractor, InternalFactHandle handle, InternalWorkingMemory workingMemory, ContextEntry context) { throw new UnsupportedOperationException( "Method not supported. Please contact development team." ); } public boolean isAllowedCachedLeft(final ContextEntry context, final InternalFactHandle handle) { try { final PredicateContextEntry ctx = (PredicateContextEntry) context; return this.expression.evaluate( handle.getObject(), ctx.leftTuple, this.previousDeclarations, this.localDeclarations, ctx.workingMemory, ctx.dialectContext ); } catch ( final Exception e ) { throw new RuntimeDroolsException( "Exception executing predicate " + this.expression, e ); } } public boolean isAllowedCachedRight(final LeftTuple tuple, final ContextEntry context) { try { final PredicateContextEntry ctx = (PredicateContextEntry) context; return this.expression.evaluate( ctx.rightObject, tuple, this.previousDeclarations, this.localDeclarations, ctx.workingMemory, ctx.dialectContext ); } catch ( final Exception e ) { throw new RuntimeDroolsException( "Exception executing predicate " + this.expression, e ); } } public Object clone() { Declaration[] previous = new Declaration[this.previousDeclarations.length]; for ( int i = 0; i < previous.length; i++ ) { previous[i] = (Declaration) this.previousDeclarations[i].clone(); } Declaration[] local = new Declaration[this.localDeclarations.length]; for ( int i = 0; i < local.length; i++ ) { local[i] = (Declaration) this.localDeclarations[i].clone(); } PredicateConstraint clone = new PredicateConstraint( this.expression, previous, local, this.requiredGlobals ); if ( this.cloned == Collections.EMPTY_LIST ) { this.cloned = new ArrayList<PredicateConstraint>( 1 ); } this.cloned.add( clone ); return clone; } public static class PredicateContextEntry implements ContextEntry { private static final long serialVersionUID = 400L; public LeftTuple leftTuple; public Object rightObject; public InternalWorkingMemory workingMemory; public Object dialectContext; private ContextEntry entry; public PredicateContextEntry() { } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { leftTuple = (LeftTuple) in.readObject(); rightObject = in.readObject(); workingMemory = (InternalWorkingMemory) in.readObject(); dialectContext = in.readObject(); entry = (ContextEntry) in.readObject(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeObject( leftTuple ); out.writeObject( rightObject ); out.writeObject( workingMemory ); out.writeObject( dialectContext ); out.writeObject( entry ); } public ContextEntry getNext() { return this.entry; } public void setNext(final ContextEntry entry) { this.entry = entry; } public void updateFromFactHandle(final InternalWorkingMemory workingMemory, final InternalFactHandle handle) { this.workingMemory = workingMemory; this.rightObject = handle.getObject(); } public void updateFromTuple(final InternalWorkingMemory workingMemory, final LeftTuple tuple) { this.workingMemory = workingMemory; this.leftTuple = tuple; } public void resetTuple() { this.leftTuple = null; } public void resetFactHandle() { this.rightObject = null; } } }
package biz.paluch.logging.gelf.log4j; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.MDC; import org.apache.log4j.NDC; import org.junit.Test; import biz.paluch.logging.gelf.GelfTestSender; import biz.paluch.logging.gelf.GelfUtil; import biz.paluch.logging.gelf.LogMessageField; import biz.paluch.logging.gelf.MdcGelfMessageAssembler; import biz.paluch.logging.gelf.intern.GelfMessage; /** * @author <a href="mailto:mpaluch@paluch.biz">Mark Paluch</a> * @since 27.09.13 08:16 */ public abstract class AbstractGelfLogAppenderTest { public static final String LOG_MESSAGE = "foo bar test log message"; public static final String EXPECTED_LOG_MESSAGE = LOG_MESSAGE; @Test public void testSimpleInfo() throws Exception { Logger logger = Logger.getLogger(getClass()); NDC.clear(); NDC.push("ndc message"); logger.info(LOG_MESSAGE); NDC.clear(); assertEquals(1, GelfTestSender.getMessages().size()); GelfMessage gelfMessage = GelfTestSender.getMessages().get(0); assertEquals(EXPECTED_LOG_MESSAGE, gelfMessage.getFullMessage()); assertEquals(EXPECTED_LOG_MESSAGE, gelfMessage.getShortMessage()); assertEquals("ndc message", gelfMessage.getField("NDC")); assertNotNull(gelfMessage.getField("MyTime")); assertEquals("6", gelfMessage.getLevel()); assertEquals(8192, gelfMessage.getMaximumMessageSize()); // this is because of default-logstash-fields.properties assertEquals("INFO", gelfMessage.getAdditonalFields().get("MySeverity")); } @Test public void testSimpleWarn() throws Exception { Logger logger = Logger.getLogger(getClass()); String expectedMessage = "foo bar test log message"; logger.warn(expectedMessage); GelfMessage gelfMessage = GelfTestSender.getMessages().get(0); assertEquals("4", gelfMessage.getLevel()); } @Test public void testSimpleError() throws Exception { Logger logger = Logger.getLogger(getClass()); String expectedMessage = "foo bar test log message"; logger.error(expectedMessage); GelfMessage gelfMessage = GelfTestSender.getMessages().get(0); assertEquals("3", gelfMessage.getLevel()); } @Test public void testSimpleFatal() throws Exception { Logger logger = Logger.getLogger(getClass()); String expectedMessage = "foo bar test log message"; logger.fatal(expectedMessage); GelfMessage gelfMessage = GelfTestSender.getMessages().get(0); assertEquals("2", gelfMessage.getLevel()); } @Test public void testSimpleDebug() throws Exception { Logger logger = Logger.getLogger(getClass()); logger.setLevel(Level.ALL); String expectedMessage = "foo bar test log message"; logger.debug(expectedMessage); GelfMessage gelfMessage = GelfTestSender.getMessages().get(0); assertEquals("7", gelfMessage.getLevel()); } @Test public void testSimpleTrace() throws Exception { Logger logger = Logger.getLogger(getClass()); logger.setLevel(Level.ALL); String expectedMessage = "foo bar test log message"; logger.trace(expectedMessage); GelfMessage gelfMessage = GelfTestSender.getMessages().get(0); assertEquals("7", gelfMessage.getLevel()); } @Test public void testException() throws Exception { Logger logger = Logger.getLogger(getClass()); logger.info(LOG_MESSAGE, new Exception("this is an exception")); assertEquals(1, GelfTestSender.getMessages().size()); GelfMessage gelfMessage = GelfTestSender.getMessages().get(0); assertEquals(LOG_MESSAGE, gelfMessage.getFullMessage()); assertEquals(AbstractGelfLogAppenderTest.class.getName(), gelfMessage.getField(LogMessageField.NamedLogField.SourceClassName.getFieldName())); assertEquals("testException", gelfMessage.getField(LogMessageField.NamedLogField.SourceMethodName.getFieldName())); assertThat(gelfMessage.getField(MdcGelfMessageAssembler.FIELD_STACK_TRACE), containsString("this is an exception")); assertThat(gelfMessage.getField(MdcGelfMessageAssembler.FIELD_STACK_TRACE), containsString("skipped")); assertThat(gelfMessage.getField(MdcGelfMessageAssembler.FIELD_STACK_TRACE), containsString("skipped")); } @Test public void testFields() throws Exception { Logger logger = Logger.getLogger(getClass()); MDC.put("mdcField1", "a value"); MDC.remove(GelfUtil.MDC_REQUEST_START_MS); logger.info(LOG_MESSAGE); assertEquals(1, GelfTestSender.getMessages().size()); GelfMessage gelfMessage = GelfTestSender.getMessages().get(0); assertEquals("fieldValue1", gelfMessage.getField("fieldName1")); assertEquals("fieldValue2", gelfMessage.getField("fieldName2")); assertEquals("a value", gelfMessage.getField("mdcField1")); assertNull(gelfMessage.getField("mdcField2")); assertNull(gelfMessage.getField(GelfUtil.MDC_REQUEST_DURATION)); assertNull(gelfMessage.getField(GelfUtil.MDC_REQUEST_END)); } @Test public void testProfiling() throws Exception { Logger logger = Logger.getLogger(getClass()); MDC.put(GelfUtil.MDC_REQUEST_START_MS, "" + System.currentTimeMillis()); logger.info(LOG_MESSAGE); assertEquals(1, GelfTestSender.getMessages().size()); GelfMessage gelfMessage = GelfTestSender.getMessages().get(0); assertNotNull(gelfMessage.getField(GelfUtil.MDC_REQUEST_DURATION)); assertNotNull(gelfMessage.getField(GelfUtil.MDC_REQUEST_END)); } @Test public void testLongProfiling() throws Exception { Logger logger = Logger.getLogger(getClass()); MDC.put(GelfUtil.MDC_REQUEST_START_MS, "" + (System.currentTimeMillis() - 2000)); logger.info(LOG_MESSAGE); assertEquals(1, GelfTestSender.getMessages().size()); GelfMessage gelfMessage = GelfTestSender.getMessages().get(0); assertNotNull(gelfMessage.getField(GelfUtil.MDC_REQUEST_DURATION)); assertNotNull(gelfMessage.getField(GelfUtil.MDC_REQUEST_END)); } @Test public void testProfilingWrongStart() throws Exception { Logger logger = Logger.getLogger(getClass()); MDC.put(GelfUtil.MDC_REQUEST_START_MS, ""); logger.info(LOG_MESSAGE); assertEquals(1, GelfTestSender.getMessages().size()); GelfMessage gelfMessage = GelfTestSender.getMessages().get(0); assertNull(gelfMessage.getField(GelfUtil.MDC_REQUEST_DURATION)); assertNull(gelfMessage.getField(GelfUtil.MDC_REQUEST_END)); } }
/* Copyright 2014-2016 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apple.intents; import apple.NSObject; import apple.foundation.NSArray; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; @Generated @Library("Intents") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class INRelativeReferenceResolutionResult extends INIntentResolutionResult { static { NatJ.register(); } @Generated protected INRelativeReferenceResolutionResult(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Owned @Selector("alloc") public static native INRelativeReferenceResolutionResult alloc(); @Owned @Generated @Selector("allocWithZone:") public static native INRelativeReferenceResolutionResult allocWithZone(VoidPtr zone); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("confirmationRequiredWithValueToConfirm:") public static native INRelativeReferenceResolutionResult confirmationRequiredWithValueToConfirm( @NInt long valueToConfirm); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key); @Generated @Selector("needsValue") public static native INRelativeReferenceResolutionResult needsValue(); @Generated @Owned @Selector("new") public static native INRelativeReferenceResolutionResult new_objc(); @Generated @Selector("notRequired") public static native INRelativeReferenceResolutionResult notRequired(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("setVersion:") public static native void setVersion_static(@NInt long aVersion); @Generated @Selector("successWithResolvedValue:") public static native INRelativeReferenceResolutionResult successWithResolvedValue(@NInt long resolvedValue); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("unsupported") public static native INRelativeReferenceResolutionResult unsupported(); @Generated @Selector("version") @NInt public static native long version_static(); @Generated @Selector("init") public native INRelativeReferenceResolutionResult init(); /** * This resolution result is to ask Siri to confirm if this is the value with which the user wants to continue. */ @Generated @Selector("confirmationRequiredWithRelativeReferenceToConfirm:") public static native INRelativeReferenceResolutionResult confirmationRequiredWithRelativeReferenceToConfirm( @NInt long relativeReferenceToConfirm); /** * This resolution result is for when the app extension wants to tell Siri to proceed, with a given INRelativeReference. The resolvedValue can be different than the original INRelativeReference. This allows app extensions to apply business logic constraints. * Use +notRequired to continue with a 'nil' value. */ @Generated @Selector("successWithResolvedRelativeReference:") public static native INRelativeReferenceResolutionResult successWithResolvedRelativeReference( @NInt long resolvedRelativeReference); @Generated @Selector("confirmationRequiredWithItemToConfirm:forReason:") public static native INRelativeReferenceResolutionResult confirmationRequiredWithItemToConfirmForReason( @Mapped(ObjCObjectMapper.class) Object itemToConfirm, @NInt long reason); @Generated @Selector("unsupportedWithReason:") public static native INRelativeReferenceResolutionResult unsupportedWithReason(@NInt long reason); }
package cgeo.geocaching.maps; import cgeo.geocaching.CachePopup; import cgeo.geocaching.storage.DataStore; import cgeo.geocaching.models.Geocache; import cgeo.geocaching.models.IWaypoint; import cgeo.geocaching.R; import cgeo.geocaching.WaypointPopup; import cgeo.geocaching.activity.Progress; import cgeo.geocaching.connector.gc.GCMap; import cgeo.geocaching.enumerations.CacheType; import cgeo.geocaching.enumerations.LoadFlags; import cgeo.geocaching.location.Geopoint; import cgeo.geocaching.maps.interfaces.CachesOverlayItemImpl; import cgeo.geocaching.maps.interfaces.GeoPointImpl; import cgeo.geocaching.maps.interfaces.ItemizedOverlayImpl; import cgeo.geocaching.maps.interfaces.MapItemFactory; import cgeo.geocaching.maps.interfaces.MapProjectionImpl; import cgeo.geocaching.maps.interfaces.MapProvider; import cgeo.geocaching.maps.interfaces.MapViewImpl; import cgeo.geocaching.settings.Settings; import cgeo.geocaching.utils.Log; import org.apache.commons.lang3.StringUtils; import org.eclipse.jdt.annotation.NonNull; import android.content.Context; import android.content.res.Resources.NotFoundException; import android.graphics.Canvas; import android.graphics.DashPathEffect; import android.graphics.Paint; import android.graphics.Paint.Style; import android.graphics.PaintFlagsDrawFilter; import android.graphics.Point; import android.location.Location; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class CachesOverlay extends AbstractItemizedOverlay { private List<CachesOverlayItemImpl> items = new ArrayList<>(); private Context context = null; private boolean displayCircles = false; private final Progress progress = new Progress(); private Paint blockedCircle = null; private PaintFlagsDrawFilter setFilter = null; private PaintFlagsDrawFilter removeFilter = null; private MapItemFactory mapItemFactory = null; public CachesOverlay(final ItemizedOverlayImpl ovlImpl, final Context contextIn) { super(ovlImpl); populate(); context = contextIn; final MapProvider mapProvider = Settings.getMapProvider(); mapItemFactory = mapProvider.getMapItemFactory(); } void updateItems(final CachesOverlayItemImpl item) { final List<CachesOverlayItemImpl> itemsPre = new ArrayList<>(); itemsPre.add(item); updateItems(itemsPre); } void updateItems(final List<CachesOverlayItemImpl> itemsPre) { if (itemsPre == null) { return; } for (final CachesOverlayItemImpl item : itemsPre) { item.setMarker(boundCenterBottom(item.getMarker(0))); } // ensure no interference between the draw and content changing routines getOverlayImpl().lock(); try { items = new ArrayList<>(itemsPre); setLastFocusedItemIndex(-1); // to reset tap during data change populate(); } finally { getOverlayImpl().unlock(); } } boolean getCircles() { return displayCircles; } void switchCircles() { displayCircles = !displayCircles; } @Override public void draw(final Canvas canvas, final MapViewImpl mapView, final boolean shadow) { drawInternal(canvas, mapView.getMapProjection()); super.draw(canvas, mapView, false); } @Override public void drawOverlayBitmap(final Canvas canvas, final Point drawPosition, final MapProjectionImpl projection, final byte drawZoomLevel) { drawInternal(canvas, projection); super.drawOverlayBitmap(canvas, drawPosition, projection, drawZoomLevel); } private void drawInternal(final Canvas canvas, final MapProjectionImpl projection) { if (!displayCircles || items.isEmpty()) { return; } // prevent content changes getOverlayImpl().lock(); try { lazyInitializeDrawingObjects(); canvas.setDrawFilter(setFilter); final int height = canvas.getHeight(); final int width = canvas.getWidth(); final int radius = calculateDrawingRadius(projection); final Point center = new Point(); for (final CachesOverlayItemImpl item : items) { if (item.applyDistanceRule()) { final Geopoint itemCoord = item.getCoord().getCoords(); final GeoPointImpl itemGeo = mapItemFactory.getGeoPointBase(itemCoord); projection.toPixels(itemGeo, center); if (center.x > -radius && center.y > -radius && center.x < width + radius && center.y < height + radius) { // dashed circle around the waypoint blockedCircle.setColor(0x66BB0000); blockedCircle.setStyle(Style.STROKE); canvas.drawCircle(center.x, center.y, radius, blockedCircle); // filling the circle area with a transparent color blockedCircle.setColor(0x44BB0000); blockedCircle.setStyle(Style.FILL); canvas.drawCircle(center.x, center.y, radius, blockedCircle); } } } canvas.setDrawFilter(removeFilter); } finally { getOverlayImpl().unlock(); } } /** * calculate the radius of the circle to be drawn for the first item only. Those circles are only 161 meters in * reality and therefore the minor changes due to the projection will not make any visible difference at the zoom * levels which are used to see the circles. * */ private int calculateDrawingRadius(final MapProjectionImpl projection) { final float[] distanceArray = new float[1]; final Geopoint itemCoord = items.get(0).getCoord().getCoords(); Location.distanceBetween(itemCoord.getLatitude(), itemCoord.getLongitude(), itemCoord.getLatitude(), itemCoord.getLongitude() + 1, distanceArray); final float longitudeLineDistance = distanceArray[0]; final GeoPointImpl itemGeo = mapItemFactory.getGeoPointBase(itemCoord); final Geopoint leftCoords = new Geopoint(itemCoord.getLatitude(), itemCoord.getLongitude() - 161 / longitudeLineDistance); final GeoPointImpl leftGeo = mapItemFactory.getGeoPointBase(leftCoords); final Point center = new Point(); projection.toPixels(itemGeo, center); final Point left = new Point(); projection.toPixels(leftGeo, left); return center.x - left.x; } private void lazyInitializeDrawingObjects() { if (blockedCircle == null) { blockedCircle = new Paint(); blockedCircle.setAntiAlias(true); blockedCircle.setStrokeWidth(2.0f); blockedCircle.setARGB(127, 0, 0, 0); blockedCircle.setPathEffect(new DashPathEffect(new float[] { 3, 2 }, 0)); } if (setFilter == null) { setFilter = new PaintFlagsDrawFilter(0, Paint.FILTER_BITMAP_FLAG); } if (removeFilter == null) { removeFilter = new PaintFlagsDrawFilter(Paint.FILTER_BITMAP_FLAG, 0); } } @Override public boolean onTap(final int index) { try { if (items.size() <= index) { return false; } progress.show(context, context.getResources().getString(R.string.map_live), context.getResources().getString(R.string.cache_dialog_loading_details), true, null); // prevent concurrent changes getOverlayImpl().lock(); CachesOverlayItemImpl item = null; try { if (index < items.size()) { item = items.get(index); } } finally { getOverlayImpl().unlock(); } if (item == null) { return false; } final IWaypoint coordinate = item.getCoord(); final String coordType = coordinate.getCoordType(); if (StringUtils.equalsIgnoreCase(coordType, "cache") && StringUtils.isNotBlank(coordinate.getGeocode())) { final Geocache cache = DataStore.loadCache(coordinate.getGeocode(), LoadFlags.LOAD_CACHE_OR_DB); if (cache != null) { final RequestDetailsThread requestDetailsThread = new RequestDetailsThread(cache); if (!requestDetailsThread.requestRequired()) { // don't show popup if we have enough details progress.dismiss(); } requestDetailsThread.start(); return true; } progress.dismiss(); return false; } if (StringUtils.equalsIgnoreCase(coordType, "waypoint") && coordinate.getId() >= 0) { CGeoMap.markCacheAsDirty(coordinate.getGeocode()); WaypointPopup.startActivity(context, coordinate.getId(), coordinate.getGeocode()); } else { progress.dismiss(); return false; } progress.dismiss(); } catch (final NotFoundException e) { Log.e("CachesOverlay.onTap", e); progress.dismiss(); } return true; } @Override public CachesOverlayItemImpl createItem(final int index) { try { return items.get(index); } catch (final Exception e) { Log.e("CachesOverlay.createItem", e); } return null; } @Override public int size() { try { return items.size(); } catch (final Exception e) { Log.e("CachesOverlay.size", e); } return 0; } private class RequestDetailsThread extends Thread { private final @NonNull Geocache cache; public RequestDetailsThread(final @NonNull Geocache cache) { this.cache = cache; } public boolean requestRequired() { return CacheType.UNKNOWN == cache.getType() || cache.getDifficulty() == 0; } @Override public void run() { if (requestRequired()) { /* final SearchResult search = */GCMap.searchByGeocodes(Collections.singleton(cache.getGeocode())); } CGeoMap.markCacheAsDirty(cache.getGeocode()); CachePopup.startActivity(context, cache.getGeocode()); progress.dismiss(); } } }
package com.gfk.athena.domain; import com.fasterxml.jackson.annotation.JsonIgnore; import org.hibernate.validator.constraints.Email; import javax.persistence.*; import org.hibernate.annotations.Type; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.validation.constraints.Size; import java.io.Serializable; import java.util.HashSet; import java.util.Set; import org.joda.time.DateTime; /** * A user. */ @Entity @Table(name = "JHI_USER") public class User extends AbstractAuditingEntity implements Serializable { @Id @GeneratedValue(strategy = GenerationType.AUTO) private Long id; @NotNull @Pattern(regexp = "^[a-z0-9]*$") @Size(min = 1, max = 50) @Column(length = 50, unique = true, nullable = false) private String login; @JsonIgnore @NotNull @Size(min = 60, max = 60) @Column(length = 60) private String password; @Size(max = 50) @Column(name = "first_name", length = 50) private String firstName; @Size(max = 50) @Column(name = "last_name", length = 50) private String lastName; @Email @Size(max = 100) @Column(length = 100, unique = true) private String email; @Column(nullable = false) private boolean activated = true; @Size(min = 2, max = 5) @Column(name = "lang_key", length = 5) private String langKey; @Size(max = 20) @Column(name = "activation_key", length = 20) @JsonIgnore private String activationKey; @Size(max = 20) @Column(name = "reset_key", length = 20) private String resetKey; @Type(type = "org.jadira.usertype.dateandtime.joda.PersistentDateTime") @Column(name = "reset_date", nullable = true) private DateTime resetDate = null; @JsonIgnore @ManyToMany @JoinTable( name = "JHI_USER_AUTHORITY", joinColumns = {@JoinColumn(name = "user_id", referencedColumnName = "id")}, inverseJoinColumns = {@JoinColumn(name = "authority_name", referencedColumnName = "name")}) private Set<Authority> authorities = new HashSet<>(); public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getLogin() { return login; } public void setLogin(String login) { this.login = login; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public boolean getActivated() { return activated; } public void setActivated(boolean activated) { this.activated = activated; } public String getActivationKey() { return activationKey; } public void setActivationKey(String activationKey) { this.activationKey = activationKey; } public String getResetKey() { return resetKey; } public void setResetKey(String resetKey) { this.resetKey = resetKey; } public DateTime getResetDate() { return resetDate; } public void setResetDate(DateTime resetDate) { this.resetDate = resetDate; } public String getLangKey() { return langKey; } public void setLangKey(String langKey) { this.langKey = langKey; } public Set<Authority> getAuthorities() { return authorities; } public void setAuthorities(Set<Authority> authorities) { this.authorities = authorities; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } User user = (User) o; if (!login.equals(user.login)) { return false; } return true; } @Override public int hashCode() { return login.hashCode(); } @Override public String toString() { return "User{" + "login='" + login + '\'' + ", password='" + password + '\'' + ", firstName='" + firstName + '\'' + ", lastName='" + lastName + '\'' + ", email='" + email + '\'' + ", activated='" + activated + '\'' + ", langKey='" + langKey + '\'' + ", activationKey='" + activationKey + '\'' + "}"; } }
/*L * Copyright SAIC * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/stats-analysis/LICENSE.txt for details. */ package gov.nih.nci.codegen.core.transformer; import gov.nih.nci.codegen.core.BaseArtifact; import gov.nih.nci.codegen.core.ConfigurationException; import gov.nih.nci.codegen.core.XMLConfigurable; import gov.nih.nci.codegen.core.filter.UML13ClassifierFilter; import gov.nih.nci.codegen.core.filter.UML13ModelElementFilter; import gov.nih.nci.codegen.core.util.UML13Utils; import gov.nih.nci.codegen.core.util.XMLUtils; import gov.nih.nci.codegen.framework.FilteringException; import gov.nih.nci.codegen.framework.TransformationException; import gov.nih.nci.codegen.framework.Transformer; import gov.nih.nci.common.util.Constant; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.StringTokenizer; import java.util.Vector; import javax.jmi.reflect.RefObject; import org.apache.log4j.Logger; import org.omg.uml.foundation.core.Classifier; import org.omg.uml.modelmanagement.Model; import org.omg.uml.modelmanagement.UmlPackage; /** * Produces an XML file that contains object-relational mapping configuration * information for use by the OJB tool ( <a href="http://db.apache.org/ojb/" * target="_blank">http://db.apache.org/ojb/ </a>). In particular, it produces * class-descriptor elements from a set classes defined in a UML 1.3 model. * <p> * In order to use this transformer, the supplied UML model must contain certain * information, in the form of tagged values and stereotypes. This section * describes the control file configuration and how it relates to the code. It * does not describe how the UML model must be annotated (see the User's Guide * for that). * <p> * The content model for this transformer's configuration element is as follows: * <p> * <code> * <pre> * * * * &lt;!ELEMENT transformer (param, filter)&gt; * &lt;!ATTLIST transformer * name CDATA #REQUIRED * className CDATA #FIXED gov.nih.nci.codegen.core.transformer.OJBRepTransformer&gt; * &lt;!ELEMENT param EMPTY&gt; * &lt;!ATTLIST param * name CDATA #FIXED packageName * value CDATA #REQUIRED&gt; * &lt;!ELEMENT filter ... see {@link gov.nih.nci.codegen.core.filter.UML13ClassifierFilter#configure(org.w3c.dom.Element)} ... * * * * </pre> * </code> * <p> * As you can see, this transformer expects a nested filter element. The reason * is that this transformer produces a single Artifact (an XML file) from a * collection of model elements. * <p> * UML13OJBRepTransformer expects to be passed an instance of * org.omg.uml.modelmanagement.Model. It uses UML13ModelElementFilter to obtain * all model elements in the model. Then it use UML13Classifier to obtain the * classifiers selected by the contents of the nested filter element. Then it * iterates through these classifiers, building the class-descriptor elements. * <p> * A Collection containing a single Artifact is returned by this transformer's * execute method. The name attribute of the Artifact is set to "ojb_repository" * and its source attribute is set to the String that represents the XML * document. * <p> * * @author caBIO Team * @version 1.0 */ public class UML13WSDDTransformer implements Transformer , XMLConfigurable { private static Logger log = Logger.getLogger(UML13CommonRoleUtilTransformer.class); private UML13ClassifierFilter _classifierFilt; private String _pkgName, _omPkg, _svcName; String cache = ""; /** * */ public UML13WSDDTransformer() { super(); } /** * @see gov.nih.nci.codegen.framework.Transformer#execute(javax.jmi.reflect.RefObject, * java.util.Collection) */ public Collection execute(RefObject modelElement, Collection artifacts) throws TransformationException { if (modelElement == null) { log.error("model element is null"); throw new TransformationException("model element is null"); } if (!(modelElement instanceof Model)) { log.error("model element not instance of Model"); throw new TransformationException( "model element not instance of Model"); } ArrayList newArtifacts = new ArrayList(); UML13ModelElementFilter meFilt = new UML13ModelElementFilter(); ArrayList umlExtentCol = new ArrayList(); umlExtentCol.add(modelElement.refOutermostPackage()); Collection classifiers = null; try { classifiers = _classifierFilt.execute(meFilt.execute(umlExtentCol)); } catch (FilteringException ex) { log.error("couldn't filter model elements " + ex.getMessage()); throw new TransformationException("couldn't filter model elements", ex); } String methodList = generateConfig(classifiers); newArtifacts.add(new BaseArtifact("ehcache", modelElement, methodList )); return newArtifacts; } /** * @param classifiers * @return */ private String generateConfig(Collection classifiers) { cache = cache + "<deployment xmlns=\"http://xml.apache.org/axis/wsdd/\""; cache = cache + "\n"; cache = cache + " xmlns:java=\"http://xml.apache.org/axis/wsdd/providers/java\">"; cache = cache + "\n"; cache = cache + "<handler name=\"CSMSOAPHandler\" type=\"java:gov.nih.nci.system.webservice.CSMSOAPHandler\"/>"; cache = cache + "\n"; cache = cache + "<service name=\"" + _svcName + "\" style=\"java:RPC\" use=\"literal\">"; cache = cache + "\n"; cache = cache + " <requestFlow>"; cache = cache + "\n"; cache = cache + " <handler name=\"CSMSOAPHandler\" type=\"java:gov.nih.nci.system.webservice.CSMSOAPHandler\"/>"; cache = cache + "\n"; cache = cache + " </requestFlow>"; cache = cache + "\n"; cache = cache + " <parameter name=\"className\" value=\"gov.nih.nci.system.webservice.WSQuery\"/>"; cache = cache + "\n"; cache = cache + " <parameter name=\"allowedMethods\" value=\"*\"/>"; cache = cache + "\n"; cache = cache + " <parameter name=\"extraClasses\""; cache = cache + "\n"; cache = cache + " value=\""; String nn1 = new String(); for (Iterator i = classifiers.iterator(); i.hasNext();) { Classifier klass = (Classifier) i.next(); UmlPackage classPkg = null; if (_pkgName != null) { classPkg = UML13Utils.getPackage(UML13Utils.getModel(klass),_pkgName); } else { classPkg = UML13Utils.getModel(klass); } String name = UML13Utils.getNamespaceName(classPkg, klass); nn1 = name; nn1 = nn1 + ".ws."; nn1 = nn1 + klass.getName(); nn1 = nn1 + Constant.COMMA; nn1 = nn1 + name; //UML13Utils.getNamespaceName(classPkg, klass); nn1 = nn1 + ".ws."; nn1 = nn1 + klass.getName(); nn1 = nn1 + "Impl"; if (i.hasNext()) { nn1 += Constant.COMMA; } //fill in fullyqualified object name cache = cache + nn1; //cache = cache + "\n"; } /*StringBuffer tmpStringBuffer = new StringBuffer(nn1); //System.out.println("String before: " + tmpStringBuffer.toString() + "\n"); int length = tmpStringBuffer.lastIndexOf(","); String finalString = tmpStringBuffer.substring(0,length-1); //System.out.println("String before: " + finalString + "\n"); */ cache = cache + "\"/>" + "\n"; for (Iterator i = classifiers.iterator(); i.hasNext();) { Classifier klass = (Classifier) i.next(); UmlPackage classPkg = null; if (_pkgName != null) { classPkg = UML13Utils.getPackage(UML13Utils.getModel(klass),_pkgName); } else { classPkg = UML13Utils.getModel(klass); } String tmp1_1 = "<beanMapping xmlns:myNS=\"urn:ws."; tmp1_1 = tmp1_1 + reversePackageName(UML13Utils.getNamespaceName(classPkg, klass)).replaceAll(":impl.",":ws."); tmp1_1 = tmp1_1 + "\" "; tmp1_1 = tmp1_1 + " qname=\"myNS:"; tmp1_1 = tmp1_1 + klass.getName(); tmp1_1 = tmp1_1 + "\" "; tmp1_1 = tmp1_1 + "languageSpecificType=\"java:"; String nn2_1 = UML13Utils.getNamespaceName(classPkg, klass); nn2_1 = nn2_1 + ".ws."; nn2_1 = nn2_1 + klass.getName(); tmp1_1 = tmp1_1 + nn2_1; tmp1_1 = tmp1_1 + "\" />"; tmp1_1 = tmp1_1 + "\n"; cache = cache + tmp1_1; String tmp1 = "<beanMapping xmlns:myNS=\"urn:ws."; tmp1 = tmp1 + reversePackageName(UML13Utils.getNamespaceName(classPkg, klass)).replaceAll(":impl.",":ws."); tmp1 = tmp1 + "\" "; //tmp1= tmp1 + "\n"; tmp1 = tmp1 + " qname=\"myNS:"; tmp1 = tmp1 + klass.getName(); tmp1 = tmp1 + "Impl"; tmp1 = tmp1 + "\" "; //tmp1= tmp1 + "\n"; tmp1 = tmp1 + "languageSpecificType=\"java:"; String nn2 = UML13Utils.getNamespaceName(classPkg, klass); nn2 = nn2 + ".ws."; nn2 = nn2 + klass.getName(); nn2 = nn2 + "Impl"; tmp1 = tmp1 + nn2; tmp1 = tmp1 + "\" />"; tmp1= tmp1 + "\n"; cache = cache + tmp1; } cache = cache + "</service>"; cache = cache + "\n"; cache = cache + "</deployment>"; cache = cache + "\n"; String setUp = ""; setUp = setUp + cache;; //System.out.println("The wsdd: " + setUp + "\n"); return setUp; } /** * @see gov.nih.nci.codegen.core.JDOMConfigurable#configure(org.jdom.Element) */ public void configure(org.w3c.dom.Element config) throws ConfigurationException { org.w3c.dom.Element filterEl = XMLUtils.getChild(config, "filter"); if (filterEl == null) { log.error("no child filter element found"); throw new ConfigurationException("no child filter element found"); } String className = filterEl.getAttribute("className"); if (className == null) { log.error("no filter class name specified"); throw new ConfigurationException("no filter class name specified"); } _pkgName = getParameter(config, "basePackage"); log.debug("basePackage: " + _pkgName); _svcName = getParameter(config, "webserviceName"); try { _classifierFilt = (UML13ClassifierFilter) Class.forName(className) .newInstance(); } catch (Exception ex) { log.error("Couldn't instantiate " + className); throw new ConfigurationException("Couldn't instantiate " + className); } _classifierFilt.configure(filterEl); } public String capFirst(String s){ return s.substring(0, 1).toUpperCase() + s.substring(1); } public String reversePackageName(String s) { StringTokenizer st = new StringTokenizer(s,"."); Vector myVector = new Vector(); StringBuffer myStringBuffer = new StringBuffer(); while (st.hasMoreTokens()) { String t = st.nextToken(); myVector.add(t); } for (int i = myVector.size(); i>0; i--) { myStringBuffer.append(myVector.elementAt(i-1)); myStringBuffer.append(Constant.DOT); } int length1 = myStringBuffer.length(); String finalString1 = myStringBuffer.substring(0,length1-1); return finalString1; } private String getParameter(org.w3c.dom.Element config, String paramName) { String param = null; List params = XMLUtils.getChildren(config, "param"); for (Iterator i = params.iterator(); i.hasNext();) { org.w3c.dom.Element paramEl = (org.w3c.dom.Element) i.next(); if (paramName.equals(paramEl.getAttribute("name"))) { param = paramEl.getAttribute("value"); break; } } return param; } }
/* * Copyright 2015-2020 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.opencga.analysis.variant.knockout; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectWriter; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.RegexFileFilter; import org.apache.commons.lang3.StringUtils; import org.opencb.biodata.models.clinical.interpretation.DiseasePanel; import org.opencb.biodata.models.core.Gene; import org.opencb.biodata.models.core.Transcript; import org.opencb.cellbase.core.api.GeneDBAdaptor; import org.opencb.cellbase.core.variant.annotation.VariantAnnotationUtils; import org.opencb.commons.datastore.core.Query; import org.opencb.commons.datastore.core.QueryOptions; import org.opencb.opencga.analysis.tools.OpenCgaToolScopeStudy; import org.opencb.opencga.analysis.variant.knockout.result.KnockoutByGene; import org.opencb.opencga.analysis.variant.knockout.result.KnockoutBySample; import org.opencb.opencga.catalog.db.api.IndividualDBAdaptor; import org.opencb.opencga.core.api.ParamConstants; import org.opencb.opencga.core.common.JacksonUtils; import org.opencb.opencga.core.models.common.Enums; import org.opencb.opencga.core.models.family.Family; import org.opencb.opencga.core.models.individual.Individual; import org.opencb.opencga.core.models.panel.Panel; import org.opencb.opencga.core.models.sample.Sample; import org.opencb.opencga.core.models.variant.KnockoutAnalysisParams; import org.opencb.opencga.core.tools.annotations.Tool; import org.opencb.opencga.storage.core.metadata.models.Trio; import org.opencb.opencga.storage.core.utils.CellBaseUtils; import org.opencb.opencga.storage.core.variant.query.VariantQueryUtils; import java.io.File; import java.util.*; import java.util.function.Predicate; import java.util.stream.Collectors; @Tool(id= KnockoutAnalysis.ID, description = KnockoutAnalysis.DESCRIPTION, resource = Enums.Resource.VARIANT) public class KnockoutAnalysis extends OpenCgaToolScopeStudy { public static final String ID = "knockout"; public static final String DESCRIPTION = "Obtains the list of knocked out genes for each sample."; private KnockoutAnalysisParams analysisParams = new KnockoutAnalysisParams(); private String studyFqn; @Override protected List<String> getSteps() { return Arrays.asList( "list-genes", "list-families", getId(), "add-metadata-to-output-files"); } @Override protected void check() throws Exception { analysisParams.updateParams(params); studyFqn = getStudyFqn(); executorParams.put("executionMethod", params.getString("executionMethod", "auto")); if (CollectionUtils.isEmpty(analysisParams.getSample()) || analysisParams.getSample().size() == 1 && analysisParams.getSample().get(0).equals(ParamConstants.ALL)) { analysisParams.setSample(new ArrayList<>(getVariantStorageManager().getIndexedSamples(studyFqn, getToken()))); } if (StringUtils.isEmpty(analysisParams.getBiotype())) { if (CollectionUtils.isEmpty(analysisParams.getGene()) && CollectionUtils.isEmpty(analysisParams.getPanel())) { analysisParams.setBiotype(VariantAnnotationUtils.PROTEIN_CODING); } } if (StringUtils.isEmpty(analysisParams.getConsequenceType())) { analysisParams.setConsequenceType(VariantQueryUtils.LOF); } super.check(); } @Override protected void run() throws Exception { List<String> proteinCodingGenes = new LinkedList<>(); List<String> otherGenes = new LinkedList<>(); step("list-genes", () -> { CellBaseUtils cellBaseUtils = getVariantStorageManager().getCellBaseUtils(studyFqn, token); QueryOptions queryOptions = new QueryOptions(QueryOptions.INCLUDE, "id,name,chromosome,start,end,biotype,transcripts.biotype"); boolean allProteinCoding = false; if (CollectionUtils.isEmpty(analysisParams.getGene()) && CollectionUtils.isEmpty(analysisParams.getPanel())) { // No genes or panel given. // Get genes by biotype List<String> biotypes = new ArrayList<>(Arrays.asList(analysisParams.getBiotype().split(","))); if (biotypes.contains(VariantAnnotationUtils.PROTEIN_CODING)) { allProteinCoding = true; proteinCodingGenes.add(VariantQueryUtils.ALL); biotypes.remove(VariantAnnotationUtils.PROTEIN_CODING); } if (!biotypes.isEmpty()) { Query query = new Query(GeneDBAdaptor.QueryParams.TRANSCRIPT_BIOTYPE.key(), String.join(",", biotypes)); for (Gene gene : cellBaseUtils.getCellBaseClient().getGeneClient().search(query, queryOptions).allResults()) { otherGenes.add(gene.getName()); } } } else { Set<String> genes = new HashSet<>(); Predicate<String> biotypeFilter; if (StringUtils.isNotEmpty(analysisParams.getBiotype())) { biotypeFilter = new HashSet<>(Arrays.asList(analysisParams.getBiotype().split(",")))::contains; } else { biotypeFilter = s -> true; } if (CollectionUtils.isNotEmpty(analysisParams.getGene())) { genes.addAll(analysisParams.getGene()); } if (CollectionUtils.isNotEmpty(analysisParams.getPanel())) { List<Panel> panels = getCatalogManager() .getPanelManager() .get(studyFqn, analysisParams.getPanel(), new QueryOptions(), getToken()) .getResults(); for (Panel panel : panels) { for (DiseasePanel.GenePanel gene : panel.getGenes()) { genes.add(gene.getName()); } } } for (Gene gene : cellBaseUtils.getCellBaseClient().getGeneClient().get(new ArrayList<>(genes), queryOptions).allResults()) { Set<String> biotypes = gene.getTranscripts().stream() .map(Transcript::getBiotype) .filter(biotypeFilter) .collect(Collectors.toSet()); if (biotypes.contains(VariantAnnotationUtils.PROTEIN_CODING)) { proteinCodingGenes.add(gene.getName()); } if (biotypes.size() == 1 && !biotypes.contains(VariantAnnotationUtils.PROTEIN_CODING) || biotypes.size() > 1) { otherGenes.add(gene.getName()); } } } if (allProteinCoding) { addAttribute("proteinCodingGenesCount", VariantQueryUtils.ALL); } else { addAttribute("proteinCodingGenesCount", proteinCodingGenes.size()); } addAttribute("otherGenesCount", otherGenes.size()); // addAttribute("proteinCodingGenes", proteinCodingGenes); // addAttribute("otherGenes", otherGenes); }); Map<String, Trio> triosMap = new HashMap<>(analysisParams.getSample().size()); step("list-families", () -> { Query familyQuery = new Query(IndividualDBAdaptor.QueryParams.SAMPLES.key(), analysisParams.getSample()); for (Family family : getCatalogManager().getFamilyManager() .search(studyFqn, familyQuery, new QueryOptions(), getToken()).getResults()) { if (family == null || StringUtils.isEmpty(family.getId())) { continue; } List<List<String>> trios = variantStorageManager.getTriosFromFamily(studyFqn, family, true, getToken()); for (List<String> trio : trios) { String child = trio.get(2); if (analysisParams.getSample().contains(child)) { triosMap.put(child, new Trio(family.getId(), trio.get(0), trio.get(1), child)); } } } List<String> samplesWithoutFamily = new LinkedList<>(); for (String sample : analysisParams.getSample()) { if (!triosMap.containsKey(sample)) { samplesWithoutFamily.add(sample); } } if (samplesWithoutFamily.size() < 10) { samplesWithoutFamily.sort(String::compareTo); String warning = "Missing family for samples " + samplesWithoutFamily + ". Unable to get compoundHeterozygous."; logger.warn(warning); addWarning(warning); } else { String warning = "Missing family for " + samplesWithoutFamily.size() + " samples. Unable to get compoundHeterozygous."; logger.warn(warning); addWarning(warning); } }); step(() -> { setUpStorageEngineExecutor(studyFqn); // MappingIterator<String> objectMappingIterator = JacksonUtils.getDefaultObjectMapper().reader().readValues(genesFile); // List<String> genes = objectMappingIterator.readAll(new ArrayList<>()); getToolExecutor(KnockoutAnalysisExecutor.class) .setStudy(studyFqn) .setSamples(analysisParams.getSample()) .setSampleFileNamePattern(getOutDir().resolve("knockout.sample.{sample}.json").toString()) .setGeneFileNamePattern(getOutDir().resolve("knockout.gene.{gene}.json").toString()) .setProteinCodingGenes(new HashSet<>(proteinCodingGenes)) .setOtherGenes(new HashSet<>(otherGenes)) .setBiotype(analysisParams.getBiotype()) .setFilter(analysisParams.getFilter()) .setQual(analysisParams.getQual()) .setCt(analysisParams.getConsequenceType()) .setTrios(triosMap) .execute(); }); step("add-metadata-to-output-files", () -> { ObjectReader reader = JacksonUtils.getDefaultObjectMapper().readerFor(KnockoutBySample.class); ObjectWriter writer = JacksonUtils.getDefaultObjectMapper().writerWithDefaultPrettyPrinter().forType(KnockoutBySample.class); for (File file : FileUtils.listFiles(getOutDir().toFile(), new RegexFileFilter("knockout.sample..*.json"), null)) { KnockoutBySample knockoutBySample = reader.readValue(file); Sample sample = catalogManager .getSampleManager() .get(studyFqn, knockoutBySample.getSample().getId(), new QueryOptions(), getToken()) .first(); sample.getAttributes().remove("OPENCGA_INDIVIDUAL"); knockoutBySample.setSample(sample); if (StringUtils.isNotEmpty(sample.getIndividualId())) { Individual individual = catalogManager .getIndividualManager() .get(studyFqn, sample.getIndividualId(), new QueryOptions(QueryOptions.EXCLUDE, IndividualDBAdaptor.QueryParams.SAMPLES.key()), getToken()) .first(); if (individual.getFather() != null && individual.getFather().getId() == null) { individual.setFather(null); } if (individual.getMother() != null && individual.getMother().getId() == null) { individual.setMother(null); } knockoutBySample.setIndividual(individual); } writer.writeValue(file, knockoutBySample); } reader = JacksonUtils.getDefaultObjectMapper().readerFor(KnockoutByGene.class); writer = JacksonUtils.getDefaultObjectMapper().writerWithDefaultPrettyPrinter().forType(KnockoutByGene.class); for (File file : FileUtils.listFiles(getOutDir().toFile(), new RegexFileFilter("knockout.gene..*.json"), null)) { KnockoutByGene knockoutByGene = reader.readValue(file); QueryOptions queryOptions = new QueryOptions(QueryOptions.EXCLUDE, "annotation.expression"); Gene gene = getVariantStorageManager().getCellBaseUtils(studyFqn, getToken()).getCellBaseClient().getGeneClient() .search(new Query(GeneDBAdaptor.QueryParams.NAME.key(), knockoutByGene.getName()), queryOptions).firstResult(); knockoutByGene.setGene(gene); writer.writeValue(file, knockoutByGene); } }); } }
/** * Copyright 2010 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.util; import java.io.IOException; import java.math.BigInteger; import java.util.LinkedList; import java.util.Set; import java.util.TreeMap; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.StoreFile; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; /** * The {@link RegionSplitter} class provides several utilities to help in the * administration lifecycle for developers who choose to manually split regions * instead of having HBase handle that automatically. The most useful utilities * are: * <p> * <ul> * <li>Create a table with a specified number of pre-split regions * <li>Execute a rolling split of all regions on an existing table * </ul> * <p> * Both operations can be safely done on a live server. * <p> * <b>Question:</b> How do I turn off automatic splitting? <br> * <b>Answer:</b> Automatic splitting is determined by the configuration value * <i>"hbase.hregion.max.filesize"</i>. It is not recommended that you set this * to Long.MAX_VALUE in case you forget about manual splits. A suggested setting * is 100GB, which would result in > 1hr major compactions if reached. * <p> * <b>Question:</b> Why did the original authors decide to manually split? <br> * <b>Answer:</b> Specific workload characteristics of our use case allowed us * to benefit from a manual split system. * <p> * <ul> * <li>Data (~1k) that would grow instead of being replaced * <li>Data growth was roughly uniform across all regions * <li>OLTP workload. Data loss is a big deal. * </ul> * <p> * <b>Question:</b> Why is manual splitting good for this workload? <br> * <b>Answer:</b> Although automated splitting is not a bad option, there are * benefits to manual splitting. * <p> * <ul> * <li>With growing amounts of data, splits will continually be needed. Since * you always know exactly what regions you have, long-term debugging and * profiling is much easier with manual splits. It is hard to trace the logs to * understand region level problems if it keeps splitting and getting renamed. * <li>Data offlining bugs + unknown number of split regions == oh crap! If an * HLog or StoreFile was mistakenly unprocessed by HBase due to a weird bug and * you notice it a day or so later, you can be assured that the regions * specified in these files are the same as the current regions and you have * less headaches trying to restore/replay your data. * <li>You can finely tune your compaction algorithm. With roughly uniform data * growth, it's easy to cause split / compaction storms as the regions all * roughly hit the same data size at the same time. With manual splits, you can * let staggered, time-based major compactions spread out your network IO load. * </ul> * <p> * <b>Question:</b> What's the optimal number of pre-split regions to create? <br> * <b>Answer:</b> Mileage will vary depending upon your application. * <p> * The short answer for our application is that we started with 10 pre-split * regions / server and watched our data growth over time. It's better to err on * the side of too little regions and rolling split later. * <p> * The more complicated answer is that this depends upon the largest storefile * in your region. With a growing data size, this will get larger over time. You * want the largest region to be just big enough that the {@link Store} compact * selection algorithm only compacts it due to a timed major. If you don't, your * cluster can be prone to compaction storms as the algorithm decides to run * major compactions on a large series of regions all at once. Note that * compaction storms are due to the uniform data growth, not the manual split * decision. * <p> * If you pre-split your regions too thin, you can increase the major compaction * interval by configuring HConstants.MAJOR_COMPACTION_PERIOD. If your data size * grows too large, use this script to perform a network IO safe rolling split * of all regions. */ public class RegionSplitter { static final Log LOG = LogFactory.getLog(RegionSplitter.class); /** * A generic interface for the RegionSplitter code to use for all it's * functionality. Note that the original authors of this code use * {@link MD5StringSplit} to partition their table and set it as default, but * provided this for your custom algorithm. To use, create a new derived class * from this interface and call the RegionSplitter class with the argument: <br> * <b>-D split.algorithm=<your_class_path></b> */ public static interface SplitAlgorithm { /** * Split a pre-existing region into 2 regions. * * @param start * row * @param end * row * @return the split row to use */ byte[] split(byte[] start, byte[] end); /** * Split an entire table. * * @param numberOfSplits * number of regions to split the table into * * @return array of split keys for the initial regions of the table */ byte[][] split(int numberOfSplits); /** * In HBase, the first row is represented by an empty byte array. This might * cause problems with your split algorithm or row printing. All your APIs * will be passed firstRow() instead of empty array. * * @return your representation of your first row */ byte[] firstRow(); /** * In HBase, the last row is represented by an empty byte array. This might * cause problems with your split algorithm or row printing. All your APIs * will be passed firstRow() instead of empty array. * * @return your representation of your last row */ byte[] lastRow(); /** * @param input * user or file input for row * @return byte array representation of this row for HBase */ byte[] strToRow(String input); /** * @param row * byte array representing a row in HBase * @return String to use for debug & file printing */ String rowToStr(byte[] row); /** * @return the separator character to use when storing / printing the row */ String separator(); } /** * The main function for the RegionSplitter application. Common uses: * <p> * <ul> * <li>create a table named 'myTable' with 60 pre-split regions containing 2 * column families 'test' & 'rs' bin/hbase * <ul> * <li>org.apache.hadoop.hbase.util.RegionSplitter -c 60 -f test:rs myTable * </ul> * <li>perform a rolling split of 'myTable' (i.e. 60 => 120 regions), # 2 * outstanding splits at a time bin/hbase * <ul> * <li>org.apache.hadoop.hbase.util.RegionSplitter -r -o 2 myTable * </ul> * </ul> * * @param args * Usage: RegionSplitter &lt;TABLE&gt; &lt;-c &lt;# regions&gt; -f * &lt;family:family:...&gt; | -r [-o &lt;# outstanding * splits&gt;]&gt; [-D &lt;conf.param=value&gt;] * @throws IOException * HBase IO problem * @throws InterruptedException * user requested exit * @throws ParseException * problem parsing user input */ @SuppressWarnings("static-access") public static void main(String[] args) throws IOException, InterruptedException, ParseException { Configuration conf = HBaseConfiguration.create(); // parse user input Options opt = new Options(); opt.addOption(OptionBuilder.withArgName("property=value").hasArg() .withDescription("Override HBase Configuration Settings").create("D")); opt.addOption(OptionBuilder.withArgName("region count").hasArg() .withDescription( "Create a new table with a pre-split number of regions") .create("c")); opt.addOption(OptionBuilder.withArgName("family:family:...").hasArg() .withDescription( "Column Families to create with new table. Required with -c") .create("f")); opt.addOption("h", false, "Print this usage help"); opt.addOption("r", false, "Perform a rolling split of an existing region"); opt.addOption(OptionBuilder.withArgName("count").hasArg().withDescription( "Max outstanding splits that have unfinished major compactions") .create("o")); opt.addOption(null, "risky", false, "Skip verification steps to complete quickly." + "STRONGLY DISCOURAGED for production systems. "); CommandLine cmd = new GnuParser().parse(opt, args); if (cmd.hasOption("D")) { for (String confOpt : cmd.getOptionValues("D")) { String[] kv = confOpt.split("=", 2); if (kv.length == 2) { conf.set(kv[0], kv[1]); LOG.debug("-D configuration override: " + kv[0] + "=" + kv[1]); } else { throw new ParseException("-D option format invalid: " + confOpt); } } } if (cmd.hasOption("risky")) { conf.setBoolean("split.verify", false); } boolean createTable = cmd.hasOption("c") && cmd.hasOption("f"); boolean rollingSplit = cmd.hasOption("r"); boolean oneOperOnly = createTable ^ rollingSplit; if (1 != cmd.getArgList().size() || !oneOperOnly || cmd.hasOption("h")) { new HelpFormatter().printHelp("RegionSplitter <TABLE>", opt); return; } String tableName = cmd.getArgs()[0]; if (createTable) { conf.set("split.count", cmd.getOptionValue("c")); createPresplitTable(tableName, cmd.getOptionValue("f").split(":"), conf); } if (rollingSplit) { if (cmd.hasOption("o")) { conf.set("split.outstanding", cmd.getOptionValue("o")); } rollingSplit(tableName, conf); } } static void createPresplitTable(String tableName, String[] columnFamilies, Configuration conf) throws IOException, InterruptedException { Class<? extends SplitAlgorithm> splitClass = conf.getClass( "split.algorithm", MD5StringSplit.class, SplitAlgorithm.class); SplitAlgorithm splitAlgo; try { splitAlgo = splitClass.newInstance(); } catch (Exception e) { throw new IOException("Problem loading split algorithm: ", e); } final int splitCount = conf.getInt("split.count", 0); Preconditions.checkArgument(splitCount > 1, "Split count must be > 1"); Preconditions.checkArgument(columnFamilies.length > 0, "Must specify at least one column family. "); LOG.debug("Creating table " + tableName + " with " + columnFamilies.length + " column families. Presplitting to " + splitCount + " regions"); HTableDescriptor desc = new HTableDescriptor(tableName); for (String cf : columnFamilies) { desc.addFamily(new HColumnDescriptor(Bytes.toBytes(cf))); } HBaseAdmin admin = new HBaseAdmin(conf); Preconditions.checkArgument(!admin.tableExists(tableName), "Table already exists: " + tableName); admin.createTable(desc, splitAlgo.split(splitCount)); LOG.debug("Table created! Waiting for regions to show online in META..."); if (!conf.getBoolean("split.verify", true)) { // NOTE: createTable is synchronous on the table, but not on the regions HTable table = new HTable(tableName); int onlineRegions = 0; while (onlineRegions < splitCount) { onlineRegions = table.getRegionsInfo().size(); LOG.debug(onlineRegions + " of " + splitCount + " regions online..."); if (onlineRegions < splitCount) { Thread.sleep(10 * 1000); // sleep } } } LOG.debug("Finished creating table with " + splitCount + " regions"); } static void rollingSplit(String tableName, Configuration conf) throws IOException, InterruptedException { Class<? extends SplitAlgorithm> splitClass = conf.getClass( "split.algorithm", MD5StringSplit.class, SplitAlgorithm.class); SplitAlgorithm splitAlgo; try { splitAlgo = splitClass.newInstance(); } catch (Exception e) { throw new IOException("Problem loading split algorithm: ", e); } final int minOS = conf.getInt("split.outstanding", 2); HTable table = new HTable(conf, tableName); // max outstanding splits. default == 50% of servers final int MAX_OUTSTANDING = Math.max(table.getCurrentNrHRS() / 2, minOS); Path hbDir = new Path(conf.get(HConstants.HBASE_DIR)); Path tableDir = HTableDescriptor.getTableDir(hbDir, table.getTableName()); Path splitFile = new Path(tableDir, "_balancedSplit"); FileSystem fs = FileSystem.get(conf); // get a list of daughter regions to create LinkedList<Pair<byte[], byte[]>> tmpRegionSet = getSplits(table, splitAlgo); LinkedList<Pair<byte[], byte[]>> outstanding = Lists.newLinkedList(); int splitCount = 0; final int origCount = tmpRegionSet.size(); // all splits must compact & we have 1 compact thread, so 2 split // requests to the same RS can stall the outstanding split queue. // To fix, group the regions into an RS pool and round-robin through it LOG.debug("Bucketing regions by regionserver..."); TreeMap<HServerAddress, LinkedList<Pair<byte[], byte[]>>> daughterRegions = Maps .newTreeMap(); for (Pair<byte[], byte[]> dr : tmpRegionSet) { HServerAddress rsLocation = table.getRegionLocation(dr.getSecond()) .getServerAddress(); if (!daughterRegions.containsKey(rsLocation)) { LinkedList<Pair<byte[], byte[]>> entry = Lists.newLinkedList(); daughterRegions.put(rsLocation, entry); } daughterRegions.get(rsLocation).add(dr); } LOG.debug("Done with bucketing. Split time!"); long startTime = System.currentTimeMillis(); // open the split file and modify it as splits finish FSDataInputStream tmpIn = fs.open(splitFile); byte[] rawData = new byte[tmpIn.available()]; tmpIn.readFully(rawData); tmpIn.close(); FSDataOutputStream splitOut = fs.create(splitFile); splitOut.write(rawData); try { // *** split code *** while (!daughterRegions.isEmpty()) { LOG.debug(daughterRegions.size() + " RS have regions to splt."); // round-robin through the RS list for (HServerAddress rsLoc = daughterRegions.firstKey(); rsLoc != null; rsLoc = daughterRegions.higherKey(rsLoc)) { Pair<byte[], byte[]> dr = null; // find a region in the RS list that hasn't been moved LOG.debug("Finding a region on " + rsLoc); LinkedList<Pair<byte[], byte[]>> regionList = daughterRegions .get(rsLoc); while (!regionList.isEmpty()) { dr = regionList.pop(); // get current region info byte[] split = dr.getSecond(); HRegionLocation regionLoc = table.getRegionLocation(split); // if this region moved locations HServerAddress newRs = regionLoc.getServerAddress(); if (newRs.compareTo(rsLoc) != 0) { LOG.debug("Region with " + splitAlgo.rowToStr(split) + " moved to " + newRs + ". Relocating..."); // relocate it, don't use it right now if (!daughterRegions.containsKey(newRs)) { LinkedList<Pair<byte[], byte[]>> entry = Lists.newLinkedList(); daughterRegions.put(newRs, entry); } daughterRegions.get(newRs).add(dr); dr = null; continue; } // make sure this region wasn't already split byte[] sk = regionLoc.getRegionInfo().getStartKey(); if (sk.length != 0) { if (Bytes.equals(split, sk)) { LOG.debug("Region already split on " + splitAlgo.rowToStr(split) + ". Skipping this region..."); dr = null; continue; } byte[] start = dr.getFirst(); Preconditions.checkArgument(Bytes.equals(start, sk), splitAlgo .rowToStr(start) + " != " + splitAlgo.rowToStr(sk)); } // passed all checks! found a good region break; } if (regionList.isEmpty()) { daughterRegions.remove(rsLoc); } if (dr == null) continue; // we have a good region, time to split! byte[] start = dr.getFirst(); byte[] split = dr.getSecond(); // request split LOG.debug("Splitting at " + splitAlgo.rowToStr(split)); HBaseAdmin admin = new HBaseAdmin(table.getConfiguration()); admin.split(table.getTableName(), split); if (conf.getBoolean("split.verify", true)) { // wait for one of the daughter regions to come online boolean daughterOnline = false; int daughterSleep = 5; // seconds while (!daughterOnline) { LOG.debug("Waiting for daughter region to come online..."); table.clearRegionCache(); HRegionInfo hri = table.getRegionLocation(split).getRegionInfo(); daughterOnline = Bytes.equals(hri.getStartKey(), split) && !hri.isOffline(); daughterSleep = Math.min(daughterSleep * 2, 60); Thread.sleep(daughterSleep * 1000); // sleep } LOG.debug("Daughter region is online."); } // mark the region as successfully split. // NOTE: split done, but daughter regions still need to major compact splitOut.writeChars("- " + splitAlgo.rowToStr(dr.getFirst()) + " " + splitAlgo.rowToStr(dr.getSecond()) + "\n"); splitCount++; if (splitCount % 10 == 0) { long tDiff = (System.currentTimeMillis() - startTime) / splitCount; LOG.debug("STATUS UPDATE: " + splitCount + " / " + origCount + ". Avg Time / Split = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff)); } if (conf.getBoolean("split.verify", true)) { // if we have too many outstanding splits, wait for oldest ones to // finish outstanding.addLast(Pair.newPair(start, split)); if (outstanding.size() > MAX_OUTSTANDING) { Pair<byte[], byte[]> reg = outstanding.removeFirst(); String outStart = splitAlgo.rowToStr(reg.getFirst()); String outSplit = splitAlgo.rowToStr(reg.getSecond()); LOG.debug("Waiting for " + outStart + " , " + outSplit + " to finish compaction"); // when a daughter region is opened, a compaction is triggered // wait until compaction completes for both daughter regions LinkedList<HRegionInfo> check = Lists.newLinkedList(); // figure out where this region should be in HDFS check .add(table.getRegionLocation(reg.getFirst()).getRegionInfo()); check.add(table.getRegionLocation(reg.getSecond()) .getRegionInfo()); while (!check.isEmpty()) { // compaction is completed when all reference files are gone for (HRegionInfo hri : check.toArray(new HRegionInfo[] {})) { boolean refFound = false; byte[] sk = hri.getStartKey(); if (sk.length == 0) sk = splitAlgo.firstRow(); String startKey = splitAlgo.rowToStr(sk); // check every Column Family for that region for (HColumnDescriptor c : hri.getTableDesc().getFamilies()) { Path cfDir = Store.getStoreHomedir(tableDir, hri .getEncodedName(), c.getName()); if (fs.exists(cfDir)) { for (FileStatus file : fs.listStatus(cfDir)) { refFound |= StoreFile.isReference(file.getPath()); if (refFound) { LOG.debug("Reference still exists for " + startKey + " at " + file.getPath()); break; } } } if (refFound) break; } if (!refFound) { check.remove(hri); LOG.debug("- finished compaction of " + startKey); } } // sleep in between requests if (!check.isEmpty()) { LOG.debug("Waiting for " + check.size() + " compactions"); Thread.sleep(30 * 1000); } } } } } } LOG.debug("All regions have been sucesfully split!"); } finally { long tDiff = System.currentTimeMillis() - startTime; LOG.debug("TOTAL TIME = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff)); LOG.debug("Splits = " + splitCount); LOG.debug("Avg Time / Split = " + org.apache.hadoop.util.StringUtils.formatTime(tDiff / splitCount)); splitOut.close(); } fs.delete(splitFile, false); } static LinkedList<Pair<byte[], byte[]>> getSplits(HTable table, SplitAlgorithm splitAlgo) throws IOException { Path hbDir = new Path(table.getConfiguration().get(HConstants.HBASE_DIR)); Path tableDir = HTableDescriptor.getTableDir(hbDir, table.getTableName()); Path splitFile = new Path(tableDir, "_balancedSplit"); FileSystem fs = FileSystem.get(table.getConfiguration()); // using strings because (new byte[]{0}).equals(new byte[]{0}) == false Set<Pair<String, String>> daughterRegions = Sets.newHashSet(); // does a split file exist? if (!fs.exists(splitFile)) { // NO = fresh start. calculate splits to make LOG.debug("No _balancedSplit file. Calculating splits..."); // query meta for all regions in the table Set<Pair<byte[], byte[]>> rows = Sets.newHashSet(); Pair<byte[][], byte[][]> tmp = table.getStartEndKeys(); Preconditions.checkArgument( tmp.getFirst().length == tmp.getSecond().length, "Start and End rows should be equivalent"); for (int i = 0; i < tmp.getFirst().length; ++i) { byte[] start = tmp.getFirst()[i], end = tmp.getSecond()[i]; if (start.length == 0) start = splitAlgo.firstRow(); if (end.length == 0) end = splitAlgo.lastRow(); rows.add(Pair.newPair(start, end)); } LOG.debug("Table " + Bytes.toString(table.getTableName()) + " has " + rows.size() + " regions that will be split."); // prepare the split file Path tmpFile = new Path(tableDir, "_balancedSplit_prepare"); FSDataOutputStream tmpOut = fs.create(tmpFile); // calculate all the splits == [daughterRegions] = [(start, splitPoint)] for (Pair<byte[], byte[]> r : rows) { byte[] splitPoint = splitAlgo.split(r.getFirst(), r.getSecond()); String startStr = splitAlgo.rowToStr(r.getFirst()); String splitStr = splitAlgo.rowToStr(splitPoint); daughterRegions.add(Pair.newPair(startStr, splitStr)); LOG.debug("Will Split [" + startStr + " , " + splitAlgo.rowToStr(r.getSecond()) + ") at " + splitStr); tmpOut.writeChars("+ " + startStr + splitAlgo.separator() + splitStr + "\n"); } tmpOut.close(); fs.rename(tmpFile, splitFile); } else { LOG.debug("_balancedSplit file found. Replay log to restore state..."); FSUtils.recoverFileLease(fs, splitFile, table.getConfiguration()); // parse split file and process remaining splits FSDataInputStream tmpIn = fs.open(splitFile); StringBuilder sb = new StringBuilder(tmpIn.available()); while (tmpIn.available() > 0) { sb.append(tmpIn.readChar()); } tmpIn.close(); for (String line : sb.toString().split("\n")) { String[] cmd = line.split(splitAlgo.separator()); Preconditions.checkArgument(3 == cmd.length); byte[] start = splitAlgo.strToRow(cmd[1]); String startStr = splitAlgo.rowToStr(start); byte[] splitPoint = splitAlgo.strToRow(cmd[2]); String splitStr = splitAlgo.rowToStr(splitPoint); Pair<String, String> r = Pair.newPair(startStr, splitStr); if (cmd[0].equals("+")) { LOG.debug("Adding: " + r); daughterRegions.add(r); } else { LOG.debug("Removing: " + r); Preconditions.checkArgument(cmd[0].equals("-"), "Unknown option: " + cmd[0]); Preconditions.checkState(daughterRegions.contains(r), "Missing row: " + r); daughterRegions.remove(r); } } LOG.debug("Done reading. " + daughterRegions.size() + " regions left."); } LinkedList<Pair<byte[], byte[]>> ret = Lists.newLinkedList(); for (Pair<String, String> r : daughterRegions) { ret.add(Pair.newPair(splitAlgo.strToRow(r.getFirst()), splitAlgo .strToRow(r.getSecond()))); } return ret; } /** * MD5StringSplit is the default {@link SplitAlgorithm} for creating pre-split * tables. The format of MD5StringSplit is the ASCII representation of an MD5 * checksum. Row are long values in the range <b>"00000000" => "7FFFFFFF"</b> * and are left-padded with zeros to keep the same order lexographically as if * they were binary. */ public static class MD5StringSplit implements SplitAlgorithm { final static String MAXMD5 = "7FFFFFFF"; final static BigInteger MAXMD5_INT = new BigInteger(MAXMD5, 16); final static int rowComparisonLength = MAXMD5.length(); public byte[] split(byte[] start, byte[] end) { BigInteger s = convertToBigInteger(start); BigInteger e = convertToBigInteger(end); Preconditions.checkArgument(!e.equals(BigInteger.ZERO)); return convertToByte(split2(s, e)); } public byte[][] split(int n) { BigInteger[] splits = new BigInteger[n - 1]; BigInteger sizeOfEachSplit = MAXMD5_INT.divide(BigInteger.valueOf(n)); for (int i = 1; i < n; i++) { // NOTE: this means the last region gets all the slop. // This is not a big deal if we're assuming n << MAXMD5 splits[i - 1] = sizeOfEachSplit.multiply(BigInteger.valueOf(i)); } return convertToBytes(splits); } public byte[] firstRow() { return convertToByte(BigInteger.ZERO); } public byte[] lastRow() { return convertToByte(MAXMD5_INT); } public byte[] strToRow(String in) { return convertToByte(new BigInteger(in, 16)); } public String rowToStr(byte[] row) { return Bytes.toStringBinary(row); } public String separator() { return " "; } static BigInteger split2(BigInteger minValue, BigInteger maxValue) { return maxValue.add(minValue).divide(BigInteger.valueOf(2)); } /** * Returns an array of bytes corresponding to an array of BigIntegers * * @param bigIntegers * @return bytes corresponding to the bigIntegers */ static byte[][] convertToBytes(BigInteger[] bigIntegers) { byte[][] returnBytes = new byte[bigIntegers.length][]; for (int i = 0; i < bigIntegers.length; i++) { returnBytes[i] = convertToByte(bigIntegers[i]); } return returnBytes; } /** * Returns the bytes corresponding to the BigInteger * * @param bigInteger * @return byte corresponding to input BigInteger */ static byte[] convertToByte(BigInteger bigInteger) { String bigIntegerString = bigInteger.toString(16); bigIntegerString = StringUtils.leftPad(bigIntegerString, rowComparisonLength, '0'); return Bytes.toBytes(bigIntegerString); } /** * Returns the BigInteger represented by thebyte array * * @param row * @return the corresponding BigInteger */ static BigInteger convertToBigInteger(byte[] row) { return (row.length > 0) ? new BigInteger(Bytes.toString(row), 16) : BigInteger.ZERO; } } }
package com.rabbitframework.dbase.reflect; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.ReflectPermission; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import com.rabbitframework.commons.exceptions.ReflectionException; import com.rabbitframework.commons.reflect.invoker.GetFieldInvoker; import com.rabbitframework.commons.reflect.invoker.Invoker; import com.rabbitframework.commons.reflect.invoker.MethodInvoker; import com.rabbitframework.commons.reflect.invoker.SetFieldInvoker; import com.rabbitframework.commons.reflect.property.PropertyNamer; public class Reflector { private static boolean classCacheEnabled = true; private static final String[] EMPTY_STRING_ARRAY = new String[0]; private static final Map<Class<?>, Reflector> REFLECTOR_MAP = new ConcurrentHashMap<Class<?>, Reflector>(); private Class<?> type; private String[] readablePropertyNames = EMPTY_STRING_ARRAY; private String[] writeablePropertyNames = EMPTY_STRING_ARRAY; private Map<String, Invoker> setMethods = new HashMap<String, Invoker>(); private Map<String, Invoker> getMethods = new HashMap<String, Invoker>(); private Map<String, Class<?>> setTypes = new HashMap<String, Class<?>>(); private Map<String, Class<?>> getTypes = new HashMap<String, Class<?>>(); private Constructor<?> defaultConstructor; private Map<String, String> caseInsensitivePropertyMap = new HashMap<String, String>(); private Reflector(Class<?> clazz) { type = clazz; addDefaultConstructor(clazz); addGetMethods(clazz); addSetMethods(clazz); addFields(clazz); readablePropertyNames = getMethods.keySet().toArray( new String[getMethods.keySet().size()]); writeablePropertyNames = setMethods.keySet().toArray( new String[setMethods.keySet().size()]); for (String propName : readablePropertyNames) { caseInsensitivePropertyMap.put( propName.toUpperCase(Locale.ENGLISH), propName); } for (String propName : writeablePropertyNames) { caseInsensitivePropertyMap.put( propName.toUpperCase(Locale.ENGLISH), propName); } } private void addDefaultConstructor(Class<?> clazz) { Constructor<?>[] consts = clazz.getDeclaredConstructors(); for (Constructor<?> constructor : consts) { if (constructor.getParameterTypes().length == 0) { if (canAccessPrivateMethods()) { try { constructor.setAccessible(true); } catch (Exception e) { // Ignored. This is only a final precaution, nothing we // can do. } } if (constructor.isAccessible()) { this.defaultConstructor = constructor; } } } } private void addGetMethods(Class<?> cls) { Map<String, List<Method>> conflictingGetters = new HashMap<String, List<Method>>(); Method[] methods = getClassMethods(cls); for (Method method : methods) { String name = method.getName(); if (name.startsWith("get") && name.length() > 3) { if (method.getParameterTypes().length == 0) { name = PropertyNamer.methodToProperty(name); addMethodConflict(conflictingGetters, name, method); } } else if (name.startsWith("is") && name.length() > 2) { if (method.getParameterTypes().length == 0) { name = PropertyNamer.methodToProperty(name); addMethodConflict(conflictingGetters, name, method); } } } resolveGetterConflicts(conflictingGetters); } private void resolveGetterConflicts( Map<String, List<Method>> conflictingGetters) { for (String propName : conflictingGetters.keySet()) { List<Method> getters = conflictingGetters.get(propName); Iterator<Method> iterator = getters.iterator(); Method firstMethod = iterator.next(); if (getters.size() == 1) { addGetMethod(propName, firstMethod); } else { Method getter = firstMethod; Class<?> getterType = firstMethod.getReturnType(); while (iterator.hasNext()) { Method method = iterator.next(); Class<?> methodType = method.getReturnType(); if (methodType.equals(getterType)) { throw new ReflectionException( "Illegal overloaded getter method with ambiguous type for property " + propName + " in class " + firstMethod.getDeclaringClass() + ". This breaks the JavaBeans " + "specification and can cause unpredicatble results."); } else if (methodType.isAssignableFrom(getterType)) { // OK getter type is descendant } else if (getterType.isAssignableFrom(methodType)) { getter = method; getterType = methodType; } else { throw new ReflectionException( "Illegal overloaded getter method with ambiguous type for property " + propName + " in class " + firstMethod.getDeclaringClass() + ". This breaks the JavaBeans " + "specification and can cause unpredicatble results."); } } addGetMethod(propName, getter); } } } private void addGetMethod(String name, Method method) { if (isValidPropertyName(name)) { getMethods.put(name, new MethodInvoker(method)); getTypes.put(name, method.getReturnType()); } } private void addSetMethods(Class<?> cls) { Map<String, List<Method>> conflictingSetters = new HashMap<String, List<Method>>(); Method[] methods = getClassMethods(cls); for (Method method : methods) { String name = method.getName(); if (name.startsWith("set") && name.length() > 3) { if (method.getParameterTypes().length == 1) { name = PropertyNamer.methodToProperty(name); addMethodConflict(conflictingSetters, name, method); } } } resolveSetterConflicts(conflictingSetters); } private void addMethodConflict( Map<String, List<Method>> conflictingMethods, String name, Method method) { List<Method> list = conflictingMethods.get(name); if (list == null) { list = new ArrayList<Method>(); conflictingMethods.put(name, list); } list.add(method); } private void resolveSetterConflicts( Map<String, List<Method>> conflictingSetters) { for (String propName : conflictingSetters.keySet()) { List<Method> setters = conflictingSetters.get(propName); Method firstMethod = setters.get(0); if (setters.size() == 1) { addSetMethod(propName, firstMethod); } else { Class<?> expectedType = getTypes.get(propName); if (expectedType == null) { throw new ReflectionException( "Illegal overloaded setter method with ambiguous type for property " + propName + " in class " + firstMethod.getDeclaringClass() + ". This breaks the JavaBeans " + "specification and can cause unpredicatble results."); } else { Iterator<Method> methods = setters.iterator(); Method setter = null; while (methods.hasNext()) { Method method = methods.next(); if (method.getParameterTypes().length == 1 && expectedType.equals(method .getParameterTypes()[0])) { setter = method; break; } } if (setter == null) { throw new ReflectionException( "Illegal overloaded setter method with ambiguous type for property " + propName + " in class " + firstMethod.getDeclaringClass() + ". This breaks the JavaBeans " + "specification and can cause unpredicatble results."); } addSetMethod(propName, setter); } } } } private void addSetMethod(String name, Method method) { if (isValidPropertyName(name)) { setMethods.put(name, new MethodInvoker(method)); setTypes.put(name, method.getParameterTypes()[0]); } } private void addFields(Class<?> clazz) { Field[] fields = clazz.getDeclaredFields(); for (Field field : fields) { if (canAccessPrivateMethods()) { try { field.setAccessible(true); } catch (Exception e) { // Ignored. This is only a final precaution, nothing we can // do. } } if (field.isAccessible()) { if (!setMethods.containsKey(field.getName())) { // issue #379 - removed the check for final because JDK 1.5 // allows // modification of final fields through reflection // (JSR-133). (JGB) // pr #16 - final static can only be set by the classloader int modifiers = field.getModifiers(); if (!(Modifier.isFinal(modifiers) && Modifier .isStatic(modifiers))) { addSetField(field); } } if (!getMethods.containsKey(field.getName())) { addGetField(field); } } } if (clazz.getSuperclass() != null) { addFields(clazz.getSuperclass()); } } private void addSetField(Field field) { if (isValidPropertyName(field.getName())) { setMethods.put(field.getName(), new SetFieldInvoker(field)); setTypes.put(field.getName(), field.getType()); } } private void addGetField(Field field) { if (isValidPropertyName(field.getName())) { getMethods.put(field.getName(), new GetFieldInvoker(field)); getTypes.put(field.getName(), field.getType()); } } private boolean isValidPropertyName(String name) { return !(name.startsWith("$") || "serialVersionUID".equals(name) || "class" .equals(name)); } /* * This method returns an array containing all methods declared in this * class and any superclass. We use this method, instead of the simpler * Class.getMethods(), because we want to look for private methods as well. * * @param cls The class * * @return An array containing all methods in this class */ private Method[] getClassMethods(Class<?> cls) { HashMap<String, Method> uniqueMethods = new HashMap<String, Method>(); Class<?> currentClass = cls; while (currentClass != null) { addUniqueMethods(uniqueMethods, currentClass.getDeclaredMethods()); // we also need to look for interface methods - // because the class may be abstract Class<?>[] interfaces = currentClass.getInterfaces(); for (Class<?> anInterface : interfaces) { addUniqueMethods(uniqueMethods, anInterface.getMethods()); } currentClass = currentClass.getSuperclass(); } Collection<Method> methods = uniqueMethods.values(); return methods.toArray(new Method[methods.size()]); } private void addUniqueMethods(HashMap<String, Method> uniqueMethods, Method[] methods) { for (Method currentMethod : methods) { if (!currentMethod.isBridge()) { String signature = getSignature(currentMethod); // check to see if the method is already known // if it is known, then an extended class must have // overridden a method if (!uniqueMethods.containsKey(signature)) { if (canAccessPrivateMethods()) { try { currentMethod.setAccessible(true); } catch (Exception e) { // Ignored. This is only a final precaution, nothing // we can do. } } uniqueMethods.put(signature, currentMethod); } } } } private String getSignature(Method method) { StringBuilder sb = new StringBuilder(); Class<?> returnType = method.getReturnType(); if (returnType != null) { sb.append(returnType.getName()).append('#'); } sb.append(method.getName()); Class<?>[] parameters = method.getParameterTypes(); for (int i = 0; i < parameters.length; i++) { if (i == 0) { sb.append(':'); } else { sb.append(','); } sb.append(parameters[i].getName()); } return sb.toString(); } private static boolean canAccessPrivateMethods() { try { SecurityManager securityManager = System.getSecurityManager(); if (null != securityManager) { securityManager.checkPermission(new ReflectPermission( "suppressAccessChecks")); } } catch (SecurityException e) { return false; } return true; } /* * Gets the name of the class the instance provides information for * * @return The class name */ public Class<?> getType() { return type; } public Constructor<?> getDefaultConstructor() { if (defaultConstructor != null) { return defaultConstructor; } else { throw new ReflectionException( "There is no default constructor for " + type); } } public Invoker getSetInvoker(String propertyName) { Invoker method = setMethods.get(propertyName); if (method == null) { throw new ReflectionException( "There is no setter for property named '" + propertyName + "' in '" + type + "'"); } return method; } public Invoker getGetInvoker(String propertyName) { Invoker method = getMethods.get(propertyName); if (method == null) { throw new ReflectionException( "There is no getter for property named '" + propertyName + "' in '" + type + "'"); } return method; } /* * Gets the type for a property setter * * @param propertyName - the name of the property * * @return The Class of the propery setter */ public Class<?> getSetterType(String propertyName) { Class<?> clazz = setTypes.get(propertyName); if (clazz == null) { throw new ReflectionException( "There is no setter for property named '" + propertyName + "' in '" + type + "'"); } return clazz; } /* * Gets the type for a property getter * * @param propertyName - the name of the property * * @return The Class of the propery getter */ public Class<?> getGetterType(String propertyName) { Class<?> clazz = getTypes.get(propertyName); if (clazz == null) { throw new ReflectionException( "There is no getter for property named '" + propertyName + "' in '" + type + "'"); } return clazz; } /* * Gets an array of the readable properties for an object * * @return The array */ public String[] getGetablePropertyNames() { return readablePropertyNames; } /* * Gets an array of the writeable properties for an object * * @return The array */ public String[] getSetablePropertyNames() { return writeablePropertyNames; } /* * Check to see if a class has a writeable property by name * * @param propertyName - the name of the property to check * * @return True if the object has a writeable property by the name */ public boolean hasSetter(String propertyName) { return setMethods.keySet().contains(propertyName); } /* * Check to see if a class has a readable property by name * * @param propertyName - the name of the property to check * * @return True if the object has a readable property by the name */ public boolean hasGetter(String propertyName) { return getMethods.keySet().contains(propertyName); } public String findPropertyName(String name) { return caseInsensitivePropertyMap.get(name.toUpperCase(Locale.ENGLISH)); } /* * Gets an instance of ClassInfo for the specified class. * * @param clazz The class for which to lookup the method cache. * * @return The method cache for the class */ public static Reflector forClass(Class<?> clazz) { if (classCacheEnabled) { // synchronized (clazz) removed see issue #461 Reflector cached = REFLECTOR_MAP.get(clazz); if (cached == null) { cached = new Reflector(clazz); REFLECTOR_MAP.put(clazz, cached); } return cached; } else { return new Reflector(clazz); } } public static void setClassCacheEnabled(boolean classCacheEnabled) { Reflector.classCacheEnabled = classCacheEnabled; } public static boolean isClassCacheEnabled() { return classCacheEnabled; } }
//Copyright 2012 RobustNet Lab, University of Michigan. All Rights Reserved. package com.mobilyzer.measurements; import java.io.IOException; import java.io.InvalidClassException; import java.io.InputStream; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketAddress; import java.security.InvalidParameterException; import java.util.ArrayList; import java.util.Date; import java.util.Map; import java.util.Random; import com.mobilyzer.MeasurementDesc; import com.mobilyzer.MeasurementResult; import com.mobilyzer.MeasurementTask; import com.mobilyzer.MeasurementResult.TaskProgress; import com.mobilyzer.exceptions.MeasurementError; import com.mobilyzer.util.Logger; import com.mobilyzer.util.MLabNS; import com.mobilyzer.util.MeasurementJsonConvertor; import com.mobilyzer.util.PhoneUtils; import android.content.Context; import android.os.Parcel; import android.os.Parcelable; /** * @author Haokun Luo * * TCP Throughput is a measurement task for cellular network throughput. * 1. Uplink: the mobile device continuously sends packets with consistent * packet size. We send packets for a fixed amount of time, and we sample * each throughput value at a smaller period. We use the median of all the * sampling result as the final measurement result. The result is calculated * at the server side, and send back to the device. * 2. Downlink: similar methodology as uplink. Only difference is that the * device is receiving packets from the server, and calculate the result * locally. */ public class TCPThroughputTask extends MeasurementTask { // default constant here public static final String DESCRIPTOR = "TCP Speed Test"; public static final int PORT_DOWNLINK = 6001; public static final int PORT_UPLINK = 6002; public static final int PORT_CONFIG = 6003; public static final String TYPE = "tcpthroughput"; // Timing related public final int BUFFER_SIZE = 5000; public static final long DURATION_IN_SEC = 15; public final int KSEC = 1000; public static final long SAMPLE_PERIOD_IN_SEC = 1; public static final long SLOW_START_PERIOD_IN_SEC = 5; public static final int TCP_TIMEOUT_IN_SEC = 30; // largest non-fragment packet size in LTE (uplink) public static final int THROUGHPUT_UP_PKT_SIZE_MAX = 1357; public static final int THROUGHPUT_UP_PKT_SIZE_MIN = 700; // Data related private final int KBYTE = 1024; private static final int DATA_LIMIT_MB_UP = 5; private static final int DATA_LIMIT_MB_DOWN = 10; private boolean DATA_LIMIT_ON = true; private boolean DATA_LIMIT_EXCEEDED = false; private static final String UPLINK_FINISH_MSG = "*"; private Context context = null; // helper variables private int accumulativeSize = 0; private Random randStr = new Random(); private ArrayList<Double> samplingResults = new ArrayList<Double>(); //start time of each sampling period private long startSampleTime = 0; private String serverVersion = ""; private long taskStartTime = 0; private double taskDuration = 0; //uplink accumulative data private int totalSendSize = 0; // downlink accumulative data private int totalRevSize = 0; private long duration; private TaskProgress taskProgress; private volatile boolean stopFlag; // class constructor public TCPThroughputTask(MeasurementDesc desc) { super(new TCPThroughputDesc(desc.key, desc.startTime, desc.endTime, desc.intervalSec, desc.count, desc.priority, desc.contextIntervalSec, desc.parameters)); this.taskProgress=TaskProgress.FAILED; this.stopFlag=false; this.duration=(long)(this.KSEC* ((TCPThroughputDesc)measurementDesc).duration_period_sec + ((TCPThroughputDesc)measurementDesc).slow_start_period_sec); Logger.i("Create new throughput task"); } protected TCPThroughputTask(Parcel in) { super(in); taskProgress = (TaskProgress)in.readSerializable(); stopFlag = in.readByte() != 0; duration = in.readLong(); } public static final Parcelable.Creator<TCPThroughputTask> CREATOR = new Parcelable.Creator<TCPThroughputTask>() { public TCPThroughputTask createFromParcel(Parcel in) { return new TCPThroughputTask(in); } public TCPThroughputTask[] newArray(int size) { return new TCPThroughputTask[size]; } }; @Override public void writeToParcel(Parcel dest, int flags) { super.writeToParcel(dest, flags); dest.writeSerializable(taskProgress); dest.writeByte((byte) (stopFlag ? 1 : 0)); dest.writeLong(duration); } /** * There are seven parameters specifically for this experiment: * 1. data_limit_mb_up: uplink cellular network data limit * 2. data_limit_mb_down: downlink cellular network data limit * 3. duration_period_sec : downlink maximum experiment duration period * 4. pkt_size_up_bytes: the size each packet in the uplink * 5. sample_period_sec : the small interval to calculate current throughput result * 6. slow_start_period_sec : waiting period to avoid TCP slow start * 7. tcp_timeout_sec: TCP connection timeout */ public static class TCPThroughputDesc extends MeasurementDesc { // declared parameters public double data_limit_mb_up = TCPThroughputTask.DATA_LIMIT_MB_UP; public double data_limit_mb_down = TCPThroughputTask.DATA_LIMIT_MB_DOWN; public boolean dir_up = false; public double duration_period_sec = TCPThroughputTask.DURATION_IN_SEC; public int pkt_size_up_bytes = TCPThroughputTask.THROUGHPUT_UP_PKT_SIZE_MAX; public double sample_period_sec = TCPThroughputTask.SAMPLE_PERIOD_IN_SEC; public double slow_start_period_sec = TCPThroughputTask.SLOW_START_PERIOD_IN_SEC; public String target = null; public double tcp_timeout_sec = TCPThroughputTask.TCP_TIMEOUT_IN_SEC; public TCPThroughputDesc(String key, Date startTime, Date endTime, double intervalSec, long count, long priority, int contextIntervalSec, Map<String, String> params) throws InvalidParameterException { super(TCPThroughputTask.TYPE, key, startTime, endTime, intervalSec, count, priority, contextIntervalSec, params); initializeParams(params); if (this.target == null || this.target.length() == 0) { throw new InvalidParameterException("TCPThroughputTask null target"); } } protected TCPThroughputDesc(Parcel in) { super(in); data_limit_mb_up = in.readDouble(); data_limit_mb_down = in.readDouble(); dir_up = in.readByte() != 0; duration_period_sec = in.readDouble(); pkt_size_up_bytes = in.readInt(); sample_period_sec = in.readDouble(); slow_start_period_sec = in.readDouble(); target = in.readString(); tcp_timeout_sec = in.readDouble(); } public static final Parcelable.Creator<TCPThroughputDesc> CREATOR = new Parcelable.Creator<TCPThroughputDesc>() { public TCPThroughputDesc createFromParcel(Parcel in) { return new TCPThroughputDesc(in); } public TCPThroughputDesc[] newArray(int size) { return new TCPThroughputDesc[size]; } }; @Override public int describeContents() { return super.describeContents(); } @Override public void writeToParcel(Parcel dest, int flags) { super.writeToParcel(dest, flags); dest.writeDouble(data_limit_mb_up); dest.writeDouble(data_limit_mb_down); dest.writeByte((byte) (dir_up ? 1 : 0)); dest.writeDouble(duration_period_sec); dest.writeInt(pkt_size_up_bytes); dest.writeDouble(sample_period_sec); dest.writeDouble(slow_start_period_sec); dest.writeString(target); dest.writeDouble(tcp_timeout_sec); } @Override protected void initializeParams(Map<String, String> params) { if (params == null) { return; } if ( (target = params.get("target")) == null ) { target = MLabNS.TARGET; } try { String readVal = null; if ((readVal = params.get("data_limit_mb_down")) != null && readVal.length() > 0 && Integer.parseInt(readVal) > 0) { this.data_limit_mb_down = Double.parseDouble(readVal); if (this.data_limit_mb_down > TCPThroughputTask.DATA_LIMIT_MB_DOWN) { this.data_limit_mb_down = TCPThroughputTask.DATA_LIMIT_MB_DOWN; } } if ((readVal = params.get("data_limit_mb_up")) != null && readVal.length() > 0 && Integer.parseInt(readVal) > 0) { this.data_limit_mb_up = Double.parseDouble(readVal); if (this.data_limit_mb_up > TCPThroughputTask.DATA_LIMIT_MB_UP) { this.data_limit_mb_up = TCPThroughputTask.DATA_LIMIT_MB_UP; } } if ((readVal = params.get("duration_period_sec")) != null && readVal.length() > 0 && Integer.parseInt(readVal) > 0) { this.duration_period_sec = Double.parseDouble(readVal); if (this.duration_period_sec > TCPThroughputTask.DURATION_IN_SEC) { this.duration_period_sec = TCPThroughputTask.DURATION_IN_SEC; } } if ((readVal = params.get("pkt_size_up_bytes")) != null && readVal.length() > 0 && Integer.parseInt(readVal) > 0) { this.pkt_size_up_bytes = Integer.parseInt(readVal); if (this.pkt_size_up_bytes > TCPThroughputTask.THROUGHPUT_UP_PKT_SIZE_MAX) { this.pkt_size_up_bytes = TCPThroughputTask.THROUGHPUT_UP_PKT_SIZE_MAX; } if (this.pkt_size_up_bytes < TCPThroughputTask.THROUGHPUT_UP_PKT_SIZE_MIN) { this.pkt_size_up_bytes = TCPThroughputTask.THROUGHPUT_UP_PKT_SIZE_MIN; } } if ((readVal = params.get("sample_period_sec")) != null && readVal.length() > 0 && Integer.parseInt(readVal) > 0) { this.sample_period_sec = Double.parseDouble(readVal); if (this.sample_period_sec > TCPThroughputTask.DURATION_IN_SEC/2) { this.sample_period_sec = TCPThroughputTask.DURATION_IN_SEC/2; } } if ((readVal = params.get("slow_start_period_sec")) != null && readVal.length() > 0 && Integer.parseInt(readVal) > 0) { this.slow_start_period_sec = Double.parseDouble(readVal); if (this.slow_start_period_sec > TCPThroughputTask.DURATION_IN_SEC/2) { this.slow_start_period_sec = TCPThroughputTask.DURATION_IN_SEC/2; } } if ((readVal = params.get("tcp_timeout_sec")) != null && readVal.length() > 0 && Integer.parseInt(readVal) > 0) { this.tcp_timeout_sec = Integer.parseInt(readVal)*1000; if (this.tcp_timeout_sec > TCPThroughputTask.TCP_TIMEOUT_IN_SEC) { this.tcp_timeout_sec = TCPThroughputTask.TCP_TIMEOUT_IN_SEC; } } } catch (NumberFormatException e) { throw new InvalidParameterException("TCP Throughput Task invalid parameters."); } String dir = null; if ((dir = params.get("dir_up")) != null && dir.length() > 0) { if (dir.compareTo("Up") == 0 || dir.compareTo("true") == 0) { this.dir_up = true; } } } @Override public String getType() { return TCPThroughputTask.TYPE; } /** * Find the median value from a TCPThroughput JSON result string (already sorted) * Suppose N is the number of results. If N is odd, we pick the result with index * (N-1)/2. If N is even, we take the mean value between index N/2 and N/2-1 * * @return -1 fail to create result * @return median value result */ public double calMedianSpeedFromTCPThroughputOutput(String outputInJSON) { if (outputInJSON == null || outputInJSON.equals("") || outputInJSON.equals("[]") || outputInJSON.charAt(0) != '[' || outputInJSON.charAt(outputInJSON.length()-1) != ']') { return -1; } String[] splitResult = outputInJSON.substring(1, outputInJSON.length()-1).split(","); int resultLen = splitResult.length; if (resultLen <= 0) return 0.0; double result = 0.0; if (resultLen % 2 == 0) { result = (Double.parseDouble(splitResult[resultLen / 2]) + Double.parseDouble(splitResult[resultLen / 2 - 1])) / 2; } else { result = Double.parseDouble(splitResult[(resultLen - 1) / 2]); } return result; } } /** * Make a deep cloning of the task */ @Override public MeasurementTask clone() { MeasurementDesc desc = this.measurementDesc; TCPThroughputDesc newDesc = new TCPThroughputDesc( desc.key, desc.startTime, desc.endTime, desc.intervalSec, desc.count, desc.priority, desc.contextIntervalSec, desc.parameters); return new TCPThroughputTask(newDesc); } @Override public String getType() { return TCPThroughputTask.TYPE; } @Override public String getDescriptor() { return TCPThroughputTask.DESCRIPTOR; } /** * This will be printed to the device log console. Make sure it's well * structured and human readable */ @Override public String toString() { TCPThroughputDesc desc = (TCPThroughputDesc) measurementDesc; String resp; if (desc.dir_up) { resp = "[TCP Uplink]\n"; } else { resp = "[TCP Downlink]\n"; } resp += " Target: " + desc.target + "\n Interval (sec): " + desc.intervalSec + "\n Next run: " + desc.startTime; return resp; } @SuppressWarnings("rawtypes") public static Class getDescClass() throws InvalidClassException { return TCPThroughputDesc.class; } @Override public MeasurementResult[] call() throws MeasurementError { this.taskProgress=TaskProgress.FAILED; TCPThroughputDesc desc = (TCPThroughputDesc)measurementDesc; // Apply MLabNS lookup to fetch FQDN if (!desc.target.equals(MLabNS.TARGET)) { Logger.i("Not using MLab server!"); throw new InvalidParameterException("Unknown target " + desc.target + " for TCPThroughput"); } try { ArrayList<String> mlabResult = MLabNS.Lookup(context, "mobiperf"); if (mlabResult.size() == 1) { desc.target = mlabResult.get(0); } else { throw new MeasurementError("Invalid MLabNS result"); } } catch (InvalidParameterException e) { throw new MeasurementError(e.getMessage()); } Logger.i("Setting target to: " + desc.target); PhoneUtils phoneUtils = PhoneUtils.getPhoneUtils(); // reset the data limit if the phone is under Wifi if (phoneUtils.getNetwork().equals(phoneUtils.NETWORK_WIFI)) { Logger.i("Detect Wifi network"); this.DATA_LIMIT_ON = false; } Logger.i("Running TCPThroughput on " + desc.target); try { // fetch server information if (!acquireServerConfig()) { throw new MeasurementError("Fail to acquire server configuration"); } Logger.i("Server version is " + this.serverVersion); if (desc.dir_up == true) { uplink(); if(stopFlag){ throw new MeasurementError("Cancelled"); } Logger.i("Uplink measurement result is:"); } else { this.taskStartTime = System.currentTimeMillis(); downlink(); if(stopFlag){ throw new MeasurementError("Cancelled"); } Logger.i("Downlink measurement result is:"); } this.taskProgress=TaskProgress.COMPLETED; } catch (MeasurementError e) { throw e; } catch (IOException e) { Logger.e("Error close the socket for " + desc.type); throw new MeasurementError("Error close the socket for " + desc.type); } catch (InterruptedException e) { Logger.e("Interrupted captured"); throw new MeasurementError("Task gets interrrupted"); } MeasurementResult result = new MeasurementResult( phoneUtils.getDeviceInfo().deviceId, phoneUtils.getDeviceProperty(this.getKey()), TCPThroughputTask.TYPE, System.currentTimeMillis() * 1000, taskProgress, this.measurementDesc); // TODO (Haokun): add more results if necessary result.addResult("tcp_speed_results", this.samplingResults); result.addResult("data_limit_exceeded", this.DATA_LIMIT_EXCEEDED); result.addResult("duration", this.taskDuration); result.addResult("server_version", this.serverVersion); Logger.i(MeasurementJsonConvertor.toJsonString(result)); MeasurementResult[] mrArray= new MeasurementResult[1]; mrArray[0]=result; return mrArray; } /***************************************************************** * Core measurement functions definitions ***************************************************************** * acquire server configuration information * 1) m-lab slice version * * @return: true -- successful acquire data from M-Lab slice * @return: false -- failure to acquire data from M-Lab slice */ private boolean acquireServerConfig() throws MeasurementError, IOException, InterruptedException { Socket tcpSocket = null; InputStream iStream = null; boolean result = false; try { tcpSocket = new Socket(); buildUpSocket(tcpSocket, ((TCPThroughputDesc)measurementDesc).target, TCPThroughputTask.PORT_CONFIG); iStream = tcpSocket.getInputStream(); } catch (IOException e) { throw new MeasurementError("Error open uplink socket at " + ((TCPThroughputDesc)measurementDesc).target + " with port " + TCPThroughputTask.PORT_CONFIG); } try { // read from server side configuration byte [] resultMsg = new byte[this.BUFFER_SIZE]; int resultMsgLen = iStream.read(resultMsg, 0, resultMsg.length); if (resultMsgLen > 0) { // TODO (Haokun): Maybe switch to JSON for multiple acquired data // currently use one double number this.serverVersion = new String(resultMsg).substring(0, resultMsgLen); result = true; } } catch (IOException e) { throw new MeasurementError("Error to acquire configuration from " + ((TCPThroughputDesc)measurementDesc).target); } finally { iStream.close(); tcpSocket.close(); Logger.i("Close server Config socket"); } return result; } /* Uplink measurement task * @throws IOException * @throws InterruptedException */ private void uplink() throws MeasurementError, IOException, InterruptedException { Logger.i("Start uplink task on " + ((TCPThroughputDesc)measurementDesc).target); Socket tcpSocket = null; InputStream iStream = null; OutputStream oStream = null; try { tcpSocket = new Socket(); buildUpSocket(tcpSocket, ((TCPThroughputDesc)measurementDesc).target, TCPThroughputTask.PORT_UPLINK); oStream = tcpSocket.getOutputStream(); iStream = tcpSocket.getInputStream(); } catch (IOException e){ e.printStackTrace(); throw new MeasurementError("Error open uplink socket at " + ((TCPThroughputDesc)measurementDesc).target + " with port " + TCPThroughputTask.PORT_UPLINK); } long startTime = System.currentTimeMillis(); long endTime = startTime; int data_limit_byte_up = (int)(((TCPThroughputDesc)measurementDesc).data_limit_mb_up *this.KBYTE*this.KBYTE); byte[] uplinkBuffer = new byte[((TCPThroughputDesc)measurementDesc).pkt_size_up_bytes]; this.genRandomByteArray(uplinkBuffer); try { long totalDuration = (long)(this.KSEC* ((TCPThroughputDesc)measurementDesc).duration_period_sec + ((TCPThroughputDesc)measurementDesc).slow_start_period_sec); do { if(stopFlag){ throw new MeasurementError("Cancelled"); } oStream.write(uplinkBuffer, 0, uplinkBuffer.length); oStream.flush(); endTime = System.currentTimeMillis(); this.totalSendSize += ((TCPThroughputDesc)measurementDesc).pkt_size_up_bytes; if (this.DATA_LIMIT_ON && this.totalSendSize >= data_limit_byte_up) { Logger.i("Detect uplink exceeding limitation " + (double)((TCPThroughputDesc)measurementDesc).data_limit_mb_up + " MB"); this.DATA_LIMIT_EXCEEDED = true; break; } // propagate every quarter } while ((endTime - startTime) < totalDuration); // convert into seconds this.taskDuration = (double)(endTime - startTime) / 1000.0; Logger.i("Uplink total data comsumption is " + (double)this.totalSendSize/(1024*1024) + " MB"); // send last message with special content uplinkBuffer = TCPThroughputTask.UPLINK_FINISH_MSG.getBytes(); oStream.write(uplinkBuffer, 0, uplinkBuffer.length); oStream.flush(); // read from server side results byte [] resultMsg = new byte[this.BUFFER_SIZE]; int resultMsgLen = iStream.read(resultMsg, 0, resultMsg.length); if (resultMsgLen > 0) { String resultMsgStr = new String(resultMsg).substring(0, resultMsgLen); // Sample result string is "1111.11#2222.22#3333.33"; Logger.i("Uplink result from server is " + resultMsgStr); String [] tps_result_str = resultMsgStr.split("#"); double sampleResult; for (int i = 0; i < tps_result_str.length; i++) { sampleResult = Double.valueOf(tps_result_str[i]); this.samplingResults = this.insertWithOrder(this.samplingResults, sampleResult); } } Logger.i("Total number of sampling result is " + this.samplingResults.size()); } catch (OutOfMemoryError e) { throw new MeasurementError("Detect out of memory during Uplink task."); } catch (IOException e) { throw new MeasurementError("Error to send/receive data to " + ((TCPThroughputDesc)measurementDesc).target); } finally { iStream.close(); oStream.close(); tcpSocket.close(); Logger.i("Close uplink socket"); } } /** * Downlink measurement task */ private void downlink() throws MeasurementError, IOException { Logger.i("Start downlink task on " + ((TCPThroughputDesc)measurementDesc).target); Socket tcpSocket = null; InputStream iStream = null; try { tcpSocket = new Socket(); buildUpSocket(tcpSocket, ((TCPThroughputDesc)measurementDesc).target, TCPThroughputTask.PORT_DOWNLINK); iStream = tcpSocket.getInputStream(); } catch (IOException i) { Logger.e("Downlink socket opening error" + i.getCause().toString()); throw new MeasurementError("Error to open downlink socket at " + ((TCPThroughputDesc)measurementDesc).target + " with port " + TCPThroughputTask.PORT_DOWNLINK); } try { int read_bytes = 0; int data_limit_byte_down = (int)(this.KBYTE*this.KBYTE* ((TCPThroughputDesc)measurementDesc).data_limit_mb_down); byte[] buffer = new byte[this.BUFFER_SIZE]; long totalDuration = (long)(this.KSEC* ((TCPThroughputDesc)measurementDesc).duration_period_sec + ((TCPThroughputDesc)measurementDesc).slow_start_period_sec); do { if(stopFlag){ throw new MeasurementError("Cancelled"); } read_bytes = iStream.read(buffer, 0, buffer.length); updateSize(read_bytes); this.totalRevSize += read_bytes; if (this.DATA_LIMIT_ON && this.totalRevSize >= data_limit_byte_down) { Logger.i("Detect downlink data limitation exceed with " + ((TCPThroughputDesc)measurementDesc).data_limit_mb_down + " MB"); this.DATA_LIMIT_EXCEEDED = true; break; } } while (read_bytes >= 0); // convert milliseconds to seconds this.taskDuration = (System.currentTimeMillis() - (double) this.taskStartTime) / 1000.0; Logger.i("Total download data is " + (double)this.totalRevSize/(1024*1024) + " MB"); Logger.i("Total number of sampling result is " + this.samplingResults.size()); } catch (OutOfMemoryError e) { throw new MeasurementError("Detect out of memory at Downlink task."); } catch (IOException e) { throw new MeasurementError("Error to receive data from " + ((TCPThroughputDesc)measurementDesc).target); } finally { iStream.close(); tcpSocket.close(); Logger.i("Close downlink socket"); } } /***************************************************************** * Helper functions ***************************************************************** * update the total received packet size * @param time period increment */ private void updateSize(int delta) { double gtime = System.currentTimeMillis() - this.taskStartTime; //ignore slow start if (gtime<((TCPThroughputDesc)measurementDesc).slow_start_period_sec*this.KSEC) return; if (this.startSampleTime == 0) { this.startSampleTime = System.currentTimeMillis(); this.accumulativeSize = 0; } this.accumulativeSize += delta; double time = System.currentTimeMillis() - this.startSampleTime; if (time < ((TCPThroughputDesc)measurementDesc).sample_period_sec*this.KSEC) { return; } else { double throughput = (double)this.accumulativeSize * 8.0 / time; this.samplingResults = this.insertWithOrder(this.samplingResults, throughput); this.accumulativeSize = 0; this.startSampleTime = System.currentTimeMillis(); } } private void buildUpSocket(Socket tcpSocket, String hostname, int portNum) throws IOException { TCPThroughputDesc desc = (TCPThroughputDesc) measurementDesc; SocketAddress remoteAddr = new InetSocketAddress(hostname, portNum); tcpSocket.connect(remoteAddr, (int)desc.tcp_timeout_sec*this.KSEC); tcpSocket.setSoTimeout((int)desc.tcp_timeout_sec*this.KSEC); tcpSocket.setTcpNoDelay(true); } private void genRandomByteArray(byte[] byteArray) { for (int i = 0; i < byteArray.length; i++) { byteArray[i] = (byte)('a' + randStr.nextInt(26)); } } // insert element with ascending order, i.e. insertion sort private ArrayList<Double> insertWithOrder(ArrayList<Double> array, double item) { int i; for (i = 0; i < array.size(); i++ ) { if (item < array.get(i)) { break; } } array.add(i,item); return array; } @Override public long getDuration() { return this.duration; } @Override public void setDuration(long newDuration) { if(newDuration<0){ this.duration=0; }else{ this.duration=newDuration; } } @Override public boolean stop() { stopFlag=true; return true; } /** * Based on the measured total data sent and received, the same returned as * a measurement result */ @Override public long getDataConsumed() { return totalSendSize + totalRevSize; } }
/* * WbFilePicker.java * * This file is part of SQL Workbench/J, http://www.sql-workbench.net * * Copyright 2002-2015, Thomas Kellerer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * To contact the author please send an email to: support@sql-workbench.net * */ package workbench.gui.components; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; import javax.swing.JButton; import javax.swing.JFileChooser; import javax.swing.JTextField; import javax.swing.SwingUtilities; import javax.swing.filechooser.FileFilter; import workbench.log.LogMgr; import workbench.resource.Settings; import workbench.gui.WbSwingUtilities; import workbench.util.ExceptionUtil; import workbench.util.StringUtil; /** * * @author Thomas Kellerer */ public class WbFilePicker extends javax.swing.JPanel { private String lastDir; private FileFilter fileFilter; private boolean allowMultiple; private File[] selectedFiles; private String lastDirProperty; private boolean selectDirectory; public WbFilePicker() { super(); initComponents(); WbSwingUtilities.adjustButtonWidth(selectFileButton,22,22); } public void setSelectDirectoryOnly(boolean flag) { selectDirectory = flag; } public boolean getSelectDirectoryOnly(boolean flag) { return selectDirectory; } public void setTextFieldPropertyName(String name) { this.tfFilename.setName(name); } public void setTextfieldTooltip(String text) { tfFilename.setToolTipText(text); } public void setButtonTooltip(String text) { selectFileButton.setToolTipText(text); } public void setLastDirProperty(String prop) { this.lastDirProperty = prop; this.lastDir = Settings.getInstance().getProperty(prop, null); } @Override public void setEnabled(boolean flag) { super.setEnabled(flag); this.tfFilename.setEnabled(flag); this.selectFileButton.setEnabled(flag); } @Override public void setToolTipText(String text) { super.setToolTipText(text); tfFilename.setToolTipText(text); selectFileButton.setToolTipText(text); } /** * Adds an ActionListener for the text field. * @param l */ public void addActionListener(ActionListener l) { this.tfFilename.addActionListener(l); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { GridBagConstraints gridBagConstraints; tfFilename = new StringPropertyEditor(); selectFileButton = new FlatButton(); setLayout(new GridBagLayout()); tfFilename.setColumns(10); tfFilename.setHorizontalAlignment(JTextField.LEFT); tfFilename.setName("library"); // NOI18N tfFilename.addMouseListener(new TextComponentMouseListener()); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.anchor = GridBagConstraints.WEST; gridBagConstraints.weightx = 1.0; gridBagConstraints.weighty = 1.0; add(tfFilename, gridBagConstraints); selectFileButton.setText("..."); selectFileButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { selectFileButtonActionPerformed(evt); } }); gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 0; gridBagConstraints.fill = GridBagConstraints.BOTH; gridBagConstraints.anchor = GridBagConstraints.WEST; gridBagConstraints.weighty = 1.0; gridBagConstraints.insets = new Insets(0, 2, 0, 0); add(selectFileButton, gridBagConstraints); }// </editor-fold>//GEN-END:initComponents private void selectFileButtonActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_selectFileButtonActionPerformed {//GEN-HEADEREND:event_selectFileButtonActionPerformed try { JFileChooser jf = new WbFileChooser(); if (selectDirectory) { jf.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); } else { jf.setFileSelectionMode(JFileChooser.FILES_ONLY); jf.setMultiSelectionEnabled(allowMultiple); if (this.lastDir != null) { jf.setCurrentDirectory(new File(this.lastDir)); } if (this.fileFilter != null) { jf.setFileFilter(this.fileFilter); } } int answer = jf.showOpenDialog(SwingUtilities.getWindowAncestor(this)); if (answer == JFileChooser.APPROVE_OPTION) { if (this.allowMultiple) { this.selectedFiles = jf.getSelectedFiles(); } else { this.selectedFiles = new File[1]; this.selectedFiles[0] = jf.getSelectedFile(); } StringBuilder path = new StringBuilder(this.selectedFiles.length * 100); for (int i = 0; i < this.selectedFiles.length; i++) { if (this.selectedFiles.length > 1 && i > 0) { path.append(StringUtil.getPathSeparator()); } path.append(this.selectedFiles[i].getAbsolutePath().trim()); } String newValue = path.toString(); String oldValue = tfFilename.getText(); this.tfFilename.setText(newValue); if (this.lastDirProperty != null) { Settings.getInstance().setProperty(lastDirProperty, selectedFiles[0].getParent()); } this.firePropertyChange("filename", oldValue, newValue); } } catch (Throwable e) { LogMgr.logError("WbFilePicker.selectFileButtonActionPerformed()", "Error selecting file", e); WbSwingUtilities.showErrorMessage(ExceptionUtil.getDisplay(e)); } }//GEN-LAST:event_selectFileButtonActionPerformed public String getFilename() { return tfFilename.getText(); } public void setFilename(String name) { this.tfFilename.setText(name != null ? name : ""); this.tfFilename.setCaretPosition(0); } public File getSelectedFile() { if (this.selectedFiles == null) { return null; } return this.selectedFiles[0]; } public File[] getSelectedFiles() { if (!this.allowMultiple) { return null; } return this.selectedFiles; } public void setAllowMultiple(boolean flag) { this.allowMultiple = flag; } public void setFileFilter(FileFilter f) { this.fileFilter = f; } // Variables declaration - do not modify//GEN-BEGIN:variables private JButton selectFileButton; private JTextField tfFilename; // End of variables declaration//GEN-END:variables }
package edgarallen.soundmuffler.bauble; import baubles.api.BaubleType; import baubles.api.IBauble; import edgarallen.soundmuffler.SuperSoundMuffler; import edgarallen.soundmuffler.gui.GuiHandler; import net.minecraft.client.gui.GuiScreen; import net.minecraft.client.renderer.block.model.ModelResourceLocation; import net.minecraft.client.resources.I18n; import net.minecraft.client.util.ITooltipFlag; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.init.SoundEvents; import net.minecraft.item.IItemPropertyGetter; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.util.ActionResult; import net.minecraft.util.EnumActionResult; import net.minecraft.util.EnumHand; import net.minecraft.util.ResourceLocation; import net.minecraft.world.World; import net.minecraftforge.client.model.ModelLoader; import net.minecraftforge.fml.common.Optional; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.List; @Optional.Interface(modid = "baubles", iface = "baubles.api.IBauble") public class ItemSoundMufflerBauble extends Item implements IBauble { public static final String NAME = "sound_muffler_bauble"; public ItemSoundMufflerBauble() { setUnlocalizedName(NAME); setRegistryName(NAME); setNoRepair(); setMaxDamage(0); setMaxStackSize(1); setCreativeTab(CreativeTabs.TOOLS); addPropertyOverride(new ResourceLocation("disabled"), new IItemPropertyGetter() { @SideOnly(Side.CLIENT) public float apply(@Nonnull ItemStack stack, @Nullable World worldIn, @Nullable EntityLivingBase entityIn) { return isDisabled(stack) ? 1.0f : 0.0F; } }); } @SideOnly(Side.CLIENT) public void registerModels() { ModelLoader.setCustomModelResourceLocation(this, 0, new ModelResourceLocation(getRegistryName().toString(), "inventory")); } @Optional.Method(modid = "baubles") public BaubleType getBaubleType (ItemStack itemstack) { return BaubleType.TRINKET; } @Override public boolean showDurabilityBar(ItemStack stack) { return false; } @Override @Nonnull public ActionResult<ItemStack> onItemRightClick(World worldIn, EntityPlayer playerIn, @Nonnull EnumHand hand) { ItemStack stack = playerIn.getHeldItem(hand); if(playerIn.isSneaking()) { toggleDisabled(playerIn, stack); } else { playerIn.openGui(SuperSoundMuffler.instance, GuiHandler.SOUND_MUFFLER_BAUBLE_GUI_ID, worldIn, (int) playerIn.posX, (int) playerIn.posY, (int) playerIn.posZ); } return ActionResult.newResult(EnumActionResult.SUCCESS, stack); } @Override @SideOnly(Side.CLIENT) public void addInformation(ItemStack stack, @Nullable World worldIn, List<String> tooltip, ITooltipFlag flagIn) { tooltip.add(I18n.format("item.sound_muffler_bauble.tooltip.header")); if(stack.hasTagCompound()) { NBTTagCompound compound = stack.getTagCompound(); boolean showWhiteListTooltip = !compound.hasKey("whiteList") || compound.getBoolean("whiteList"); String key = showWhiteListTooltip ? "item.sound_muffler.tooltip.mode.white_list" : "item.sound_muffler.tooltip.mode.black_list"; tooltip.add(I18n.format(key)); if(compound.hasKey("sounds")) { NBTTagList tagList = compound.getTagList("sounds", 10); int count = tagList.tagCount(); tooltip.add(I18n.format("item.sound_muffler.tooltip.sounds.count", count)); if(GuiScreen.isShiftKeyDown()) { for(int i = 0; i < tagList.tagCount(); ++i) { NBTTagCompound sound = tagList.getCompoundTagAt(i); tooltip.add(I18n.format("item.sound_muffler.tooltip.sound", sound.getString("sound"))); } } } else { tooltip.add(I18n.format("item.sound_muffler.tooltip.sounds.count", 0)); } } else { tooltip.add(I18n.format("item.sound_muffler.tooltip.mode.black_list")); tooltip.add(I18n.format("item.sound_muffler.tooltip.sounds.count", 0)); } } public boolean shouldMuffleSound(ItemStack stack, ResourceLocation sound) { if(!stack.hasTagCompound()) { return false; } NBTTagCompound compound = stack.getTagCompound(); if(compound.hasKey("disabled")) { return false; } boolean isWhiteList = compound.hasKey("whiteList") && compound.getBoolean("whiteList"); if(compound.hasKey("sounds")) { NBTTagList tags = compound.getTagList("sounds", 10); if(containsSound(tags, sound)) { return !isWhiteList; } } return isWhiteList; } public void toggleWhiteList(ItemStack stack) { boolean isWhiteList = false; if(stack.hasTagCompound()) { NBTTagCompound compound = stack.getTagCompound(); if(compound.hasKey("whiteList")) { isWhiteList = compound.getBoolean("whiteList"); } compound.setBoolean("whiteList", !isWhiteList); stack.setTagCompound(compound); } else { NBTTagCompound compound = new NBTTagCompound(); compound.setBoolean("whiteList", !isWhiteList); stack.setTagCompound(compound); } } public void muffleSound(ItemStack stack, ResourceLocation sound) { NBTTagCompound compound = stack.hasTagCompound() ? stack.getTagCompound() : new NBTTagCompound(); NBTTagList tags = compound.hasKey("sounds") ? compound.getTagList("sounds", 10) : new NBTTagList(); if(containsSound(tags, sound)) { return; } NBTTagCompound tag = new NBTTagCompound(); tag.setString("sound", sound.toString()); tags.appendTag(tag); compound.setTag("sounds", tags); stack.setTagCompound(compound); } public void unmuffleSound(ItemStack stack, ResourceLocation sound) { if(stack.hasTagCompound()) { NBTTagCompound compound = stack.getTagCompound(); if (compound.hasKey("sounds")) { NBTTagList tags = compound.getTagList("sounds", 10); NBTTagList newTags = new NBTTagList(); for(int i = 0; i < tags.tagCount(); ++i) { NBTTagCompound s = tags.getCompoundTagAt(i); String soundLocation = s.getString("sound"); if(!soundLocation.equals(sound.toString())) { newTags.appendTag(s); } } compound.setTag("sounds", newTags); stack.setTagCompound(compound); } } } private boolean containsSound(NBTTagList tags, ResourceLocation sound) { for(int i = 0; i < tags.tagCount(); ++i) { NBTTagCompound s = tags.getCompoundTagAt(i); String soundLocation = s.getString("sound"); if(soundLocation.equals(sound.toString())) { return true; } } return false; } private boolean isDisabled(ItemStack stack) { if(stack.hasTagCompound()) { NBTTagCompound compound = stack.getTagCompound(); return compound.hasKey("disabled"); } return false; } private void toggleDisabled(EntityPlayer playerIn, ItemStack stack) { if(stack.hasTagCompound()) { NBTTagCompound compound = stack.getTagCompound(); if(compound.hasKey("disabled")) { compound.removeTag("disabled"); stack.setTagCompound(compound); playerIn.playSound(SoundEvents.ENTITY_EXPERIENCE_ORB_PICKUP, 0.1F, 1F); } else { compound.setBoolean("disabled", true); stack.setTagCompound(compound); playerIn.playSound(SoundEvents.ENTITY_EXPERIENCE_ORB_PICKUP, 0.1F, 0.8F); } } else { NBTTagCompound compound = new NBTTagCompound(); compound.setBoolean("disabled", true); stack.setTagCompound(compound); playerIn.playSound(SoundEvents.ENTITY_EXPERIENCE_ORB_PICKUP, 0.1F, 0.8F); } } }
// Copyright 2019 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package com.google.devtools.build.lib.bazel.rules.ninja; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertThrows; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Maps; import com.google.devtools.build.lib.bazel.rules.ninja.file.FileFragment; import com.google.devtools.build.lib.bazel.rules.ninja.lexer.NinjaLexer; import com.google.devtools.build.lib.bazel.rules.ninja.parser.NinjaFileParseResult; import com.google.devtools.build.lib.bazel.rules.ninja.parser.NinjaParserStep; import com.google.devtools.build.lib.bazel.rules.ninja.parser.NinjaRule; import com.google.devtools.build.lib.bazel.rules.ninja.parser.NinjaRuleVariable; import com.google.devtools.build.lib.bazel.rules.ninja.parser.NinjaScope; import com.google.devtools.build.lib.bazel.rules.ninja.parser.NinjaVariableValue; import com.google.devtools.build.lib.util.Pair; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.stream.Collectors; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link NinjaScope} */ @RunWith(JUnit4.class) public class NinjaScopeTest { @Test public void testSortVariables() { NinjaFileParseResult parseResult = new NinjaFileParseResult(); parseResult.addVariable("abc", 12, NinjaVariableValue.createPlainText("cba")); parseResult.addVariable("abc", 1, NinjaVariableValue.createPlainText("cba1")); parseResult.addVariable("abc", 14, NinjaVariableValue.createPlainText("cba2")); parseResult.sortResults(); List<Long> offsets = parseResult.getVariables().get("abc").stream() .map(Pair::getFirst) .collect(Collectors.toList()); assertThat(offsets).isInOrder(); } @Test public void testSortRules() { // We can just use the same rule value here. NinjaRule rule = rule("rule1"); NinjaFileParseResult parseResult = new NinjaFileParseResult(); parseResult.addRule(10, rule); parseResult.addRule(1115, rule); parseResult.addRule(5, rule); parseResult.sortResults(); List<Long> offsets = parseResult.getRules().get(rule.getName()).stream() .map(Pair::getFirst) .collect(Collectors.toList()); assertThat(offsets).isInOrder(); } @Test public void testMerge() { NinjaRule rule1 = rule("rule1"); NinjaRule rule2 = rule("rule2"); NinjaFileParseResult parseResult1 = new NinjaFileParseResult(); parseResult1.addRule(10, rule1); parseResult1.addVariable("from1", 7, NinjaVariableValue.createPlainText("111")); parseResult1.addVariable("abc", 5, NinjaVariableValue.createPlainText("5")); parseResult1.addVariable("abc", 115, NinjaVariableValue.createPlainText("7")); NinjaFileParseResult parseResult2 = new NinjaFileParseResult(); parseResult2.addRule(10, rule2); parseResult2.addVariable("from2", 20017, NinjaVariableValue.createPlainText("222")); parseResult2.addVariable("abc", 2005, NinjaVariableValue.createPlainText("15")); parseResult2.addVariable("abc", 20015, NinjaVariableValue.createPlainText("17")); NinjaFileParseResult result = NinjaFileParseResult.merge(ImmutableList.of(parseResult1, parseResult2)); assertThat(result.getRules()).hasSize(2); assertThat(result.getRules()).containsKey("rule1"); assertThat(result.getRules()).containsKey("rule2"); assertThat(result.getVariables()).hasSize(3); assertThat(result.getVariables()).containsKey("from1"); assertThat(result.getVariables()).containsKey("from2"); assertThat(result.getVariables()).containsKey("abc"); List<Pair<Long, NinjaVariableValue>> abc = result.getVariables().get("abc"); assertThat(abc).hasSize(4); assertThat(abc.stream().map(Pair::getFirst).collect(Collectors.toList())).isInOrder(); } @Test public void testFindVariable() throws Exception { NinjaFileParseResult parseResult = new NinjaFileParseResult(); parseResult.addVariable("abc", 12, NinjaVariableValue.createPlainText("cba")); parseResult.addVariable("abc", 5, NinjaVariableValue.createPlainText("cba1")); parseResult.addVariable("abc", 14, NinjaVariableValue.createPlainText("cba2")); parseResult.sortResults(); NinjaScope scope = new NinjaScope(); parseResult.expandIntoScope(scope, Maps.newHashMap()); assertThat(scope.findExpandedVariable(1, "not_there")).isNull(); assertThat(scope.findExpandedVariable(1, "abc")).isNull(); assertThat(scope.findExpandedVariable(6, "abc")).isEqualTo("cba1"); assertThat(scope.findExpandedVariable(13, "abc")).isEqualTo("cba"); assertThat(scope.findExpandedVariable(130, "abc")).isEqualTo("cba2"); } @Test public void testFindVariableErrors() throws Exception { NinjaFileParseResult parseResult = new NinjaFileParseResult(); parseResult.addVariable("abc", 12, NinjaVariableValue.createPlainText("cba")); parseResult.addVariable("abc", 5, NinjaVariableValue.createPlainText("cba1")); parseResult.addVariable("abc", 14, NinjaVariableValue.createPlainText("cba2")); parseResult.sortResults(); NinjaScope scope = new NinjaScope(); parseResult.expandIntoScope(scope, Maps.newHashMap()); IllegalStateException exception = assertThrows(IllegalStateException.class, () -> scope.findExpandedVariable(5, "abc")); assertThat(exception) .hasMessageThat() .isEqualTo("Trying to interpret declaration as reference."); } @Test public void testFindRule() throws Exception { NinjaFileParseResult parseResult = new NinjaFileParseResult(); parseResult.addRule(10, rule("rule1", "10")); parseResult.addRule(1115, rule("rule1", "1115")); parseResult.addRule(5, rule("rule1", "5")); parseResult.sortResults(); NinjaScope scope = new NinjaScope(); parseResult.expandIntoScope(scope, Maps.newHashMap()); assertThat(scope.findRule(1, "non-existent")).isNull(); assertThat(scope.findRule(1, "rule1")).isNull(); NinjaRule rule1 = scope.findRule(6, "rule1"); assertThat(rule1).isNotNull(); assertThat(rule1.getVariables().get(NinjaRuleVariable.COMMAND).getRawText()).isEqualTo("5"); rule1 = scope.findRule(15, "rule1"); assertThat(rule1).isNotNull(); assertThat(rule1.getVariables().get(NinjaRuleVariable.COMMAND).getRawText()).isEqualTo("10"); } @Test public void testFindVariableInParentScope() throws Exception { NinjaFileParseResult parentParseResult = new NinjaFileParseResult(); parentParseResult.addVariable("abc", 12, NinjaVariableValue.createPlainText("abc")); parentParseResult.addVariable("edf", 120, NinjaVariableValue.createPlainText("edf")); parentParseResult.addVariable("xyz", 1000, NinjaVariableValue.createPlainText("xyz")); // This is subninja scope, not include scope. NinjaFileParseResult childParseResult = new NinjaFileParseResult(); parentParseResult.addSubNinjaScope(140, scope -> childParseResult); // Shadows this variable from parent. childParseResult.addVariable("edf", 1, NinjaVariableValue.createPlainText("11111")); parentParseResult.sortResults(); NinjaScope scope = new NinjaScope(); parentParseResult.expandIntoScope(scope, Maps.newHashMap()); assertThat(scope.getSubNinjaScopes()).hasSize(1); NinjaScope child = scope.getSubNinjaScopes().iterator().next(); assertThat(child.findExpandedVariable(2, "abc")).isEqualTo("abc"); assertThat(child.findExpandedVariable(2, "edf")).isEqualTo("11111"); assertThat(child.findExpandedVariable(2, "xyz")).isNull(); } @Test public void testfindExpandedVariableInIncludedScope() throws Exception { NinjaFileParseResult parentParseResult = new NinjaFileParseResult(); parentParseResult.addVariable("abc", 12, NinjaVariableValue.createPlainText("abc")); parentParseResult.addVariable("edf", 120, NinjaVariableValue.createPlainText("edf")); parentParseResult.addVariable("xyz", 1000, NinjaVariableValue.createPlainText("xyz")); NinjaFileParseResult childParseResult = new NinjaFileParseResult(); parentParseResult.addIncludeScope(140, scope -> childParseResult); // Shadows this variable from parent. childParseResult.addVariable("edf", 1, NinjaVariableValue.createPlainText("11111")); childParseResult.addVariable("child", 2, NinjaVariableValue.createPlainText("child")); NinjaFileParseResult childParseResult2 = new NinjaFileParseResult(); parentParseResult.addIncludeScope(200, scope -> childParseResult2); childParseResult2.addVariable("edf", 1, NinjaVariableValue.createPlainText("22222")); parentParseResult.sortResults(); NinjaScope scope = new NinjaScope(); parentParseResult.expandIntoScope(scope, Maps.newHashMap()); assertThat(scope.findExpandedVariable(160, "edf")).isEqualTo("11111"); assertThat(scope.findExpandedVariable(220, "edf")).isEqualTo("22222"); assertThat(scope.findExpandedVariable(125, "edf")).isEqualTo("edf"); assertThat(scope.findExpandedVariable(145, "child")).isEqualTo("child"); } @Test public void testFindInRecursivelyIncluded() throws Exception { NinjaFileParseResult parentParseResult = new NinjaFileParseResult(); parentParseResult.addVariable("abc", 12, NinjaVariableValue.createPlainText("abc")); parentParseResult.addVariable("edf", 120, NinjaVariableValue.createPlainText("edf")); parentParseResult.addVariable("xyz", 1000, NinjaVariableValue.createPlainText("xyz")); NinjaFileParseResult childParseResult1 = new NinjaFileParseResult(); parentParseResult.addIncludeScope(140, scope -> childParseResult1); // Shadows this variable from parent. childParseResult1.addVariable("edf", 1, NinjaVariableValue.createPlainText("11111")); childParseResult1.addVariable("child", 2, NinjaVariableValue.createPlainText("child")); NinjaFileParseResult childParseResult2 = new NinjaFileParseResult(); childParseResult1.addIncludeScope(3, scope -> childParseResult2); childParseResult2.addVariable("edf", 1, NinjaVariableValue.createPlainText("22222")); parentParseResult.sortResults(); NinjaScope scope = new NinjaScope(); parentParseResult.expandIntoScope(scope, Maps.newHashMap()); assertThat(scope.findExpandedVariable(220, "edf")).isEqualTo("22222"); } @Test public void testVariableExpand() throws Exception { NinjaFileParseResult parseResult = new NinjaFileParseResult(); parseResult.addVariable("abc", 12, NinjaVariableValue.createPlainText("abc")); parseResult.addVariable("edf", 120, parseValue("=> $abc = ?")); parseResult.addVariable("abc", 130, NinjaVariableValue.createPlainText("redefined")); parseResult.addVariable("edf", 180, parseValue("now$: $abc!")); parseResult.sortResults(); NinjaScope scope = new NinjaScope(); parseResult.expandIntoScope(scope, Maps.newHashMap()); assertThat(scope.findExpandedVariable(15, "abc")).isEqualTo("abc"); assertThat(scope.findExpandedVariable(150, "edf")).isEqualTo("=> abc = ?"); assertThat(scope.findExpandedVariable(140, "abc")).isEqualTo("redefined"); assertThat(scope.findExpandedVariable(181, "edf")).isEqualTo("now: redefined!"); } @Test public void testExpandWithParentChild() throws Exception { NinjaFileParseResult parentParseResult = new NinjaFileParseResult(); parentParseResult.addVariable("abc", 12, NinjaVariableValue.createPlainText("abc")); parentParseResult.addVariable("edf", 120, parseValue("$abc === ${ abc }")); NinjaFileParseResult includeParseResult = new NinjaFileParseResult(); parentParseResult.addIncludeScope(140, scope -> includeParseResult); includeParseResult.addVariable("included", 1, parseValue("<$abc and ${ edf }>")); NinjaFileParseResult childParseResult = new NinjaFileParseResult(); parentParseResult.addSubNinjaScope(150, scope -> childParseResult); childParseResult.addVariable("subninja", 2, parseValue("$edf = ${ included }*")); parentParseResult.sortResults(); NinjaScope parentScope = new NinjaScope(); parentParseResult.expandIntoScope(parentScope, Maps.newHashMap()); assertThat(parentScope.getIncludedScopes()).hasSize(1); NinjaScope includeScope = parentScope.getIncludedScopes().iterator().next(); assertThat(parentScope.getSubNinjaScopes()).hasSize(1); NinjaScope childScope = parentScope.getSubNinjaScopes().iterator().next(); assertThat(includeScope.findExpandedVariable(2, "included")).isEqualTo("<abc and abc === abc>"); assertThat(childScope.findExpandedVariable(3, "subninja")) .isEqualTo("abc === abc = <abc and abc === abc>*"); assertThat(parentScope.findExpandedVariable(150, "included")) .isEqualTo("<abc and abc === abc>"); } private static NinjaRule rule(String name) { return rule(name, "command"); } private static NinjaRule rule(String name, String command) { return new NinjaRule( ImmutableSortedMap.of( NinjaRuleVariable.NAME, NinjaVariableValue.createPlainText(name), NinjaRuleVariable.COMMAND, NinjaVariableValue.createPlainText(command))); } private static NinjaVariableValue parseValue(String text) throws Exception { ByteBuffer bb = ByteBuffer.wrap(text.getBytes(StandardCharsets.ISO_8859_1)); NinjaLexer lexer = new NinjaLexer(new FileFragment(bb, 0, 0, bb.limit())); return new NinjaParserStep(lexer).parseVariableValue(); } }
// Copyright 2017 The Nomulus Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package google.registry.networking.handler; import static com.google.common.truth.Truth.assertThat; import static google.registry.networking.handler.SslInitializerTestUtils.getKeyPair; import static google.registry.networking.handler.SslInitializerTestUtils.setUpSslChannel; import static google.registry.networking.handler.SslInitializerTestUtils.signKeyPair; import static google.registry.networking.handler.SslInitializerTestUtils.verifySslException; import static google.registry.networking.handler.SslServerInitializer.CLIENT_CERTIFICATE_PROMISE_KEY; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import google.registry.util.SelfSignedCaCertificate; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelPipeline; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.channel.local.LocalAddress; import io.netty.channel.local.LocalChannel; import io.netty.handler.ssl.OpenSsl; import io.netty.handler.ssl.SslContextBuilder; import io.netty.handler.ssl.SslHandler; import io.netty.handler.ssl.SslProvider; import java.nio.channels.ClosedChannelException; import java.security.KeyPair; import java.security.PrivateKey; import java.security.cert.CertificateException; import java.security.cert.CertificateExpiredException; import java.security.cert.CertificateNotYetValidException; import java.security.cert.X509Certificate; import java.time.Duration; import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.stream.Stream; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLException; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLParameters; import javax.net.ssl.SSLSession; import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; /** * Unit tests for {@link SslServerInitializer}. * * <p>To validate that the handler accepts & rejects connections as expected, a test server and a * test client are spun up, and both connect to the {@link LocalAddress} within the JVM. This avoids * the overhead of routing traffic through the network layer, even if it were to go through * loopback. It also alleviates the need to pick a free port to use. * * <p>The local addresses used in each test method must to be different, otherwise tests run in * parallel may interfere with each other. */ class SslServerInitializerTest { /** Fake host to test if the SSL engine gets the correct peer host. */ private static final String SSL_HOST = "www.example.tld"; /** Fake port to test if the SSL engine gets the correct peer port. */ private static final int SSL_PORT = 12345; @RegisterExtension NettyExtension nettyExtension = new NettyExtension(); @SuppressWarnings("unused") static Stream<Arguments> provideTestCombinations() { Stream.Builder<Arguments> args = Stream.builder(); // We do our best effort to test all available SSL providers. args.add(Arguments.of(SslProvider.JDK)); if (OpenSsl.isAvailable()) { args.add(Arguments.of(SslProvider.OPENSSL)); } return args.build(); } private ChannelHandler getServerHandler( boolean requireClientCert, boolean validateClientCert, SslProvider sslProvider, PrivateKey privateKey, X509Certificate... certificates) { return new SslServerInitializer<LocalChannel>( requireClientCert, validateClientCert, sslProvider, Suppliers.ofInstance(privateKey), Suppliers.ofInstance(ImmutableList.copyOf(certificates))); } private ChannelHandler getClientHandler( SslProvider sslProvider, X509Certificate trustedCertificate, PrivateKey privateKey, X509Certificate certificate, String protocol, List<String> cipher) { return new ChannelInitializer<LocalChannel>() { @Override protected void initChannel(LocalChannel ch) throws Exception { SslContextBuilder sslContextBuilder = SslContextBuilder.forClient() .trustManager(trustedCertificate) .sslProvider(sslProvider) .ciphers(cipher); if (protocol != null) { sslContextBuilder.protocols(protocol); } if (privateKey != null && certificate != null) { sslContextBuilder.keyManager(privateKey, certificate); } SslHandler sslHandler = sslContextBuilder.build().newHandler(ch.alloc(), SSL_HOST, SSL_PORT); // Enable hostname verification. SSLEngine sslEngine = sslHandler.engine(); SSLParameters sslParameters = sslEngine.getSSLParameters(); sslParameters.setEndpointIdentificationAlgorithm("HTTPS"); sslEngine.setSSLParameters(sslParameters); ch.pipeline().addLast(sslHandler); } }; } private ChannelHandler getClientHandler( SslProvider sslProvider, X509Certificate trustedCertificate, PrivateKey privateKey, X509Certificate certificate) { return getClientHandler(sslProvider, trustedCertificate, privateKey, certificate, null, null); } @ParameterizedTest @MethodSource("provideTestCombinations") void testSuccess_swappedInitializerWithSslHandler(SslProvider sslProvider) throws Exception { SelfSignedCaCertificate ssc = SelfSignedCaCertificate.create(SSL_HOST); SslServerInitializer<EmbeddedChannel> sslServerInitializer = new SslServerInitializer<>( true, false, sslProvider, Suppliers.ofInstance(ssc.key()), Suppliers.ofInstance(ImmutableList.of(ssc.cert()))); EmbeddedChannel channel = new EmbeddedChannel(); ChannelPipeline pipeline = channel.pipeline(); pipeline.addLast(sslServerInitializer); ChannelHandler firstHandler = pipeline.first(); assertThat(firstHandler.getClass()).isEqualTo(SslHandler.class); SslHandler sslHandler = (SslHandler) firstHandler; assertThat(sslHandler.engine().getNeedClientAuth()).isTrue(); assertThat(channel.isActive()).isTrue(); } @ParameterizedTest @MethodSource("provideTestCombinations") void testSuccess_trustAnyClientCert(SslProvider sslProvider) throws Exception { SelfSignedCaCertificate serverSsc = SelfSignedCaCertificate.create(SSL_HOST); LocalAddress localAddress = new LocalAddress("TRUST_ANY_CLIENT_CERT_" + sslProvider); nettyExtension.setUpServer( localAddress, getServerHandler(true, false, sslProvider, serverSsc.key(), serverSsc.cert())); SelfSignedCaCertificate clientSsc = SelfSignedCaCertificate.create(); nettyExtension.setUpClient( localAddress, getClientHandler(sslProvider, serverSsc.cert(), clientSsc.key(), clientSsc.cert())); SSLSession sslSession = setUpSslChannel(nettyExtension.getClientChannel(), serverSsc.cert()); nettyExtension.assertThatMessagesWork(); // Verify that the SSL session gets the client cert. Note that this SslSession is for the client // channel, therefore its local certificates are the remote certificates of the SslSession for // the server channel, and vice versa. assertThat(sslSession.getLocalCertificates()).asList().containsExactly(clientSsc.cert()); assertThat(sslSession.getPeerCertificates()).asList().containsExactly(serverSsc.cert()); } @ParameterizedTest @MethodSource("provideTestCombinations") void testFailure_cipherNotAccepted(SslProvider sslProvider) throws Exception { SelfSignedCaCertificate serverSsc = SelfSignedCaCertificate.create(SSL_HOST); LocalAddress localAddress = new LocalAddress("CIPHER_NOT_ACCEPTED_" + sslProvider); nettyExtension.setUpServer( localAddress, getServerHandler(true, true, sslProvider, serverSsc.key(), serverSsc.cert())); SelfSignedCaCertificate clientSsc = SelfSignedCaCertificate.create( "CLIENT", Date.from(Instant.now().minus(Duration.ofDays(2))), Date.from(Instant.now().plus(Duration.ofDays(1)))); nettyExtension.setUpClient( localAddress, getClientHandler( sslProvider, serverSsc.cert(), clientSsc.key(), clientSsc.cert(), "TLSv1.2", Collections.singletonList("TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA"))); verifySslException( nettyExtension.getServerChannel(), channel -> channel.attr(CLIENT_CERTIFICATE_PROMISE_KEY).get().get(), SSLHandshakeException.class); } @ParameterizedTest @MethodSource("provideTestCombinations") void testSuccess_someCiphersNotAccepted(SslProvider sslProvider) throws Exception { SelfSignedCaCertificate serverSsc = SelfSignedCaCertificate.create(SSL_HOST); LocalAddress localAddress = new LocalAddress("SOME_CIPHERS_NOT_ACCEPTED_" + sslProvider); nettyExtension.setUpServer( localAddress, new SslServerInitializer<LocalChannel>( true, true, sslProvider, Suppliers.ofInstance(serverSsc.key()), Suppliers.ofInstance(ImmutableList.of(serverSsc.cert())))); SelfSignedCaCertificate clientSsc = SelfSignedCaCertificate.create( "CLIENT", Date.from(Instant.now().minus(Duration.ofDays(2))), Date.from(Instant.now().plus(Duration.ofDays(1)))); nettyExtension.setUpClient( localAddress, getClientHandler( sslProvider, serverSsc.cert(), clientSsc.key(), clientSsc.cert(), "TLSv1.2", ImmutableList.of( "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", // Only accepted cipher "TLS_RSA_WITH_AES_256_CBC_SHA"))); SSLSession sslSession = setUpSslChannel(nettyExtension.getClientChannel(), serverSsc.cert()); nettyExtension.assertThatMessagesWork(); assertThat(sslSession.getLocalCertificates()).asList().containsExactly(clientSsc.cert()); assertThat(sslSession.getPeerCertificates()).asList().containsExactly(serverSsc.cert()); assertThat(sslSession.getCipherSuite()).isEqualTo("TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256"); } @ParameterizedTest @MethodSource("provideTestCombinations") void testFailure_protocolNotAccepted(SslProvider sslProvider) throws Exception { SelfSignedCaCertificate serverSsc = SelfSignedCaCertificate.create(SSL_HOST); LocalAddress localAddress = new LocalAddress("PROTOCOL_NOT_ACCEPTED_" + sslProvider); nettyExtension.setUpServer( localAddress, getServerHandler(true, true, sslProvider, serverSsc.key(), serverSsc.cert())); SelfSignedCaCertificate clientSsc = SelfSignedCaCertificate.create( "CLIENT", Date.from(Instant.now().minus(Duration.ofDays(2))), Date.from(Instant.now().plus(Duration.ofDays(1)))); nettyExtension.setUpClient( localAddress, getClientHandler( sslProvider, serverSsc.cert(), clientSsc.key(), clientSsc.cert(), "TLSv1.1", null)); ImmutableList<Integer> jdkVersion = Arrays.asList(System.getProperty("java.version").split("\\.")).stream() .map(Integer::parseInt) .collect(ImmutableList.toImmutableList()); // In JDK v11.0.11 and above TLS 1.1 is not supported any more, in which case attempting to // connect with TLS 1.1 results in a ClosedChannelException instead of a SSLHandShakeException. // See https://www.oracle.com/java/technologies/javase/11-0-11-relnotes.html#JDK-8202343 Class<? extends Exception> rootCause = sslProvider == SslProvider.JDK && compareSemanticVersion(jdkVersion, ImmutableList.of(11, 0, 11)) ? ClosedChannelException.class : SSLHandshakeException.class; verifySslException( nettyExtension.getServerChannel(), channel -> channel.attr(CLIENT_CERTIFICATE_PROMISE_KEY).get().get(), rootCause); } @ParameterizedTest @MethodSource("provideTestCombinations") void testFailure_clientCertExpired(SslProvider sslProvider) throws Exception { SelfSignedCaCertificate serverSsc = SelfSignedCaCertificate.create(SSL_HOST); LocalAddress localAddress = new LocalAddress("CLIENT_CERT_EXPIRED_" + sslProvider); nettyExtension.setUpServer( localAddress, getServerHandler(true, true, sslProvider, serverSsc.key(), serverSsc.cert())); SelfSignedCaCertificate clientSsc = SelfSignedCaCertificate.create( "CLIENT", Date.from(Instant.now().minus(Duration.ofDays(2))), Date.from(Instant.now().minus(Duration.ofDays(1)))); nettyExtension.setUpClient( localAddress, getClientHandler(sslProvider, serverSsc.cert(), clientSsc.key(), clientSsc.cert())); verifySslException( nettyExtension.getServerChannel(), channel -> channel.attr(CLIENT_CERTIFICATE_PROMISE_KEY).get().get(), CertificateExpiredException.class); } @ParameterizedTest @MethodSource("provideTestCombinations") void testFailure_clientCertNotYetValid(SslProvider sslProvider) throws Exception { SelfSignedCaCertificate serverSsc = SelfSignedCaCertificate.create(SSL_HOST); LocalAddress localAddress = new LocalAddress("CLIENT_CERT_EXPIRED_" + sslProvider); nettyExtension.setUpServer( localAddress, getServerHandler(true, true, sslProvider, serverSsc.key(), serverSsc.cert())); SelfSignedCaCertificate clientSsc = SelfSignedCaCertificate.create( "CLIENT", Date.from(Instant.now().plus(Duration.ofDays(1))), Date.from(Instant.now().plus(Duration.ofDays(2)))); nettyExtension.setUpClient( localAddress, getClientHandler(sslProvider, serverSsc.cert(), clientSsc.key(), clientSsc.cert())); verifySslException( nettyExtension.getServerChannel(), channel -> channel.attr(CLIENT_CERTIFICATE_PROMISE_KEY).get().get(), CertificateNotYetValidException.class); } @ParameterizedTest @MethodSource("provideTestCombinations") void testSuccess_doesNotRequireClientCert(SslProvider sslProvider) throws Exception { SelfSignedCaCertificate serverSsc = SelfSignedCaCertificate.create(SSL_HOST); LocalAddress localAddress = new LocalAddress("DOES_NOT_REQUIRE_CLIENT_CERT_" + sslProvider); nettyExtension.setUpServer( localAddress, getServerHandler(false, false, sslProvider, serverSsc.key(), serverSsc.cert())); nettyExtension.setUpClient( localAddress, getClientHandler(sslProvider, serverSsc.cert(), null, null)); SSLSession sslSession = setUpSslChannel(nettyExtension.getClientChannel(), serverSsc.cert()); nettyExtension.assertThatMessagesWork(); // Verify that the SSL session does not contain any client cert. Note that this SslSession is // for the client channel, therefore its local certificates are the remote certificates of the // SslSession for the server channel, and vice versa. assertThat(sslSession.getLocalCertificates()).isNull(); assertThat(sslSession.getPeerCertificates()).asList().containsExactly(serverSsc.cert()); } @ParameterizedTest @MethodSource("provideTestCombinations") void testSuccess_CertSignedByOtherCa(SslProvider sslProvider) throws Exception { // The self-signed cert of the CA. SelfSignedCaCertificate caSsc = SelfSignedCaCertificate.create(); KeyPair keyPair = getKeyPair(); X509Certificate serverCert = signKeyPair(caSsc, keyPair, SSL_HOST); LocalAddress localAddress = new LocalAddress("CERT_SIGNED_BY_OTHER_CA_" + sslProvider); nettyExtension.setUpServer( localAddress, getServerHandler( true, false, sslProvider, keyPair.getPrivate(), // Serving both the server cert, and the CA cert serverCert, caSsc.cert())); SelfSignedCaCertificate clientSsc = SelfSignedCaCertificate.create(); nettyExtension.setUpClient( localAddress, getClientHandler( sslProvider, // Client trusts the CA cert caSsc.cert(), clientSsc.key(), clientSsc.cert())); SSLSession sslSession = setUpSslChannel(nettyExtension.getClientChannel(), serverCert, caSsc.cert()); nettyExtension.assertThatMessagesWork(); assertThat(sslSession.getLocalCertificates()).asList().containsExactly(clientSsc.cert()); assertThat(sslSession.getPeerCertificates()) .asList() .containsExactly(serverCert, caSsc.cert()) .inOrder(); } @ParameterizedTest @MethodSource("provideTestCombinations") void testFailure_requireClientCertificate(SslProvider sslProvider) throws Exception { SelfSignedCaCertificate serverSsc = SelfSignedCaCertificate.create(SSL_HOST); LocalAddress localAddress = new LocalAddress("REQUIRE_CLIENT_CERT_" + sslProvider); nettyExtension.setUpServer( localAddress, getServerHandler(true, false, sslProvider, serverSsc.key(), serverSsc.cert())); nettyExtension.setUpClient( localAddress, getClientHandler( sslProvider, serverSsc.cert(), // No client cert/private key used. null, null)); // When the server rejects the client during handshake due to lack of client certificate, both // should throw exceptions. nettyExtension.assertThatServerRootCause().isInstanceOf(SSLHandshakeException.class); nettyExtension.assertThatClientRootCause().isInstanceOf(SSLException.class); assertThat(nettyExtension.getClientChannel().isActive()).isFalse(); } @ParameterizedTest @MethodSource("provideTestCombinations") void testFailure_wrongHostnameInCertificate(SslProvider sslProvider) throws Exception { SelfSignedCaCertificate serverSsc = SelfSignedCaCertificate.create("wrong.com"); LocalAddress localAddress = new LocalAddress("WRONG_HOSTNAME_" + sslProvider); nettyExtension.setUpServer( localAddress, getServerHandler(true, false, sslProvider, serverSsc.key(), serverSsc.cert())); SelfSignedCaCertificate clientSsc = SelfSignedCaCertificate.create(); nettyExtension.setUpClient( localAddress, getClientHandler(sslProvider, serverSsc.cert(), clientSsc.key(), clientSsc.cert())); // When the client rejects the server cert due to wrong hostname, both the server and the client // throw exceptions. nettyExtension.assertThatClientRootCause().isInstanceOf(CertificateException.class); nettyExtension.assertThatClientRootCause().hasMessageThat().contains(SSL_HOST); nettyExtension.assertThatServerRootCause().isInstanceOf(SSLException.class); assertThat(nettyExtension.getClientChannel().isActive()).isFalse(); } /** Returns true if v1 is larger or equals to v2. */ private static boolean compareSemanticVersion( ImmutableList<Integer> v1, ImmutableList<Integer> v2) { for (int i : ImmutableList.of(0, 1, 2)) { if (v1.get(i) > v2.get(i)) { return true; } if (v1.get(i) < v2.get(i)) { return false; } } return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.validator; import java.io.Serializable; import java.text.DateFormat; import java.text.NumberFormat; import java.text.ParseException; import java.text.ParsePosition; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; import com.google.gwt.core.shared.GWT; import com.google.gwt.core.shared.GwtIncompatible; /** * This class contains basic methods for performing validations that return the * correctly typed class based on the validation performed. * * @version $Revision: 1716212 $ */ public class GenericTypeValidator implements Serializable { private static final long serialVersionUID = 5487162314134261703L; /** * Checks if the value can safely be converted to a byte primitive. * * @param value The value validation is being performed on. * @return the converted Byte value. */ public static Byte formatByte(String value) { if (value == null) { return null; } try { return Byte.valueOf(value); } catch (NumberFormatException e) { return null; } } /** * Checks if the value can safely be converted to a byte primitive. * * @param value The value validation is being performed on. * @param locale The locale to use to parse the number (system default if * null) * @return the converted Byte value. */ @GwtIncompatible("incompatible method") public static Byte formatByte(String value, Locale locale) { Byte result = null; if (value != null) { NumberFormat formatter = null; if (locale != null) { formatter = NumberFormat.getNumberInstance(locale); } else { formatter = NumberFormat.getNumberInstance(Locale.getDefault()); } formatter.setParseIntegerOnly(true); ParsePosition pos = new ParsePosition(0); Number num = formatter.parse(value, pos); // If there was no error and we used the whole string if (pos.getErrorIndex() == -1 && pos.getIndex() == value.length() && num.doubleValue() >= Byte.MIN_VALUE && num.doubleValue() <= Byte.MAX_VALUE) { result = Byte.valueOf(num.byteValue()); } } return result; } /** * Checks if the value can safely be converted to a short primitive. * * @param value The value validation is being performed on. * @return the converted Short value. */ public static Short formatShort(String value) { if (value == null) { return null; } try { return Short.valueOf(value); } catch (NumberFormatException e) { return null; } } /** * Checks if the value can safely be converted to a short primitive. * * @param value The value validation is being performed on. * @param locale The locale to use to parse the number (system default if * null) * @return the converted Short value. */ @GwtIncompatible("incompatible method") public static Short formatShort(String value, Locale locale) { Short result = null; if (value != null) { NumberFormat formatter = null; if (locale != null) { formatter = NumberFormat.getNumberInstance(locale); } else { formatter = NumberFormat.getNumberInstance(Locale.getDefault()); } formatter.setParseIntegerOnly(true); ParsePosition pos = new ParsePosition(0); Number num = formatter.parse(value, pos); // If there was no error and we used the whole string if (pos.getErrorIndex() == -1 && pos.getIndex() == value.length() && num.doubleValue() >= Short.MIN_VALUE && num.doubleValue() <= Short.MAX_VALUE) { result = Short.valueOf(num.shortValue()); } } return result; } /** * Checks if the value can safely be converted to a int primitive. * * @param value The value validation is being performed on. * @return the converted Integer value. */ public static Integer formatInt(String value) { if (value == null) { return null; } try { return Integer.valueOf(value); } catch (NumberFormatException e) { return null; } } /** * Checks if the value can safely be converted to an int primitive. * * @param value The value validation is being performed on. * @param locale The locale to use to parse the number (system default if * null) * @return the converted Integer value. */ @GwtIncompatible("incompatible method") public static Integer formatInt(String value, Locale locale) { Integer result = null; if (value != null) { NumberFormat formatter = null; if (locale != null) { formatter = NumberFormat.getNumberInstance(locale); } else { formatter = NumberFormat.getNumberInstance(Locale.getDefault()); } formatter.setParseIntegerOnly(true); ParsePosition pos = new ParsePosition(0); Number num = formatter.parse(value, pos); // If there was no error and we used the whole string if (pos.getErrorIndex() == -1 && pos.getIndex() == value.length() && num.doubleValue() >= Integer.MIN_VALUE && num.doubleValue() <= Integer.MAX_VALUE) { result = Integer.valueOf(num.intValue()); } } return result; } /** * Checks if the value can safely be converted to a long primitive. * * @param value The value validation is being performed on. * @return the converted Long value. */ public static Long formatLong(String value) { if (value == null) { return null; } try { return Long.valueOf(value); } catch (NumberFormatException e) { return null; } } /** * Checks if the value can safely be converted to a long primitive. * * @param value The value validation is being performed on. * @param locale The locale to use to parse the number (system default if * null) * @return the converted Long value. */ @GwtIncompatible("incompatible method") public static Long formatLong(String value, Locale locale) { Long result = null; if (value != null) { NumberFormat formatter = null; if (locale != null) { formatter = NumberFormat.getNumberInstance(locale); } else { formatter = NumberFormat.getNumberInstance(Locale.getDefault()); } formatter.setParseIntegerOnly(true); ParsePosition pos = new ParsePosition(0); Number num = formatter.parse(value, pos); // If there was no error and we used the whole string if (pos.getErrorIndex() == -1 && pos.getIndex() == value.length() && num.doubleValue() >= Long.MIN_VALUE && num.doubleValue() <= Long.MAX_VALUE) { result = Long.valueOf(num.longValue()); } } return result; } /** * Checks if the value can safely be converted to a float primitive. * * @param value The value validation is being performed on. * @return the converted Float value. */ public static Float formatFloat(String value) { if (value == null) { return null; } try { return new Float(value); } catch (NumberFormatException e) { return null; } } /** * Checks if the value can safely be converted to a float primitive. * * @param value The value validation is being performed on. * @param locale The locale to use to parse the number (system default if * null) * @return the converted Float value. */ @GwtIncompatible("incompatible method") public static Float formatFloat(String value, Locale locale) { Float result = null; if (value != null) { NumberFormat formatter = null; if (locale != null) { formatter = NumberFormat.getInstance(locale); } else { formatter = NumberFormat.getInstance(Locale.getDefault()); } ParsePosition pos = new ParsePosition(0); Number num = formatter.parse(value, pos); // If there was no error and we used the whole string if (pos.getErrorIndex() == -1 && pos.getIndex() == value.length() && num.doubleValue() >= (Float.MAX_VALUE * -1) && num.doubleValue() <= Float.MAX_VALUE) { result = new Float(num.floatValue()); } } return result; } /** * Checks if the value can safely be converted to a double primitive. * * @param value The value validation is being performed on. * @return the converted Double value. */ public static Double formatDouble(String value) { if (value == null) { return null; } try { return new Double(value); } catch (NumberFormatException e) { return null; } } /** * Checks if the value can safely be converted to a double primitive. * * @param value The value validation is being performed on. * @param locale The locale to use to parse the number (system default if * null) * @return the converted Double value. */ @GwtIncompatible("incompatible method") public static Double formatDouble(String value, Locale locale) { Double result = null; if (value != null) { NumberFormat formatter = null; if (locale != null) { formatter = NumberFormat.getInstance(locale); } else { formatter = NumberFormat.getInstance(Locale.getDefault()); } ParsePosition pos = new ParsePosition(0); Number num = formatter.parse(value, pos); // If there was no error and we used the whole string if (pos.getErrorIndex() == -1 && pos.getIndex() == value.length() && num.doubleValue() >= (Double.MAX_VALUE * -1) && num.doubleValue() <= Double.MAX_VALUE) { result = new Double(num.doubleValue()); } } return result; } /** * Checks if the field is a valid date. * * <p>The {@code Locale} is used with {@code java.text.DateFormat}. The {@link java.text.DateFormat#setLenient(boolean)} * method is set to {@code false} for all. * </p> * * @param value The value validation is being performed on. * @param locale The Locale to use to parse the date (system default if null) * @return the converted Date value. */ @GwtIncompatible("incompatible method") public static Date formatDate(String value, Locale locale) { Date date = null; if (value == null) { return null; } try { // Get the formatters to check against DateFormat formatterShort = null; DateFormat formatterDefault = null; if (locale != null) { formatterShort = DateFormat.getDateInstance(DateFormat.SHORT, locale); formatterDefault = DateFormat.getDateInstance(DateFormat.DEFAULT, locale); } else { formatterShort = DateFormat.getDateInstance( DateFormat.SHORT, Locale.getDefault()); formatterDefault = DateFormat.getDateInstance( DateFormat.DEFAULT, Locale.getDefault()); } // Turn off lenient parsing formatterShort.setLenient(false); formatterDefault.setLenient(false); // Firstly, try with the short form try { date = formatterShort.parse(value); } catch (ParseException e) { // Fall back on the default one date = formatterDefault.parse(value); } } catch (ParseException e) { // Bad date, so LOG and return null GWT.log("Date parse failed value=[" + value + "], " + "locale=[" + locale + "] ", e); } return date; } /** * Checks if the field is a valid date. * * <p>The pattern is used with {@code java.text.SimpleDateFormat}. * If strict is true, then the length will be checked so '2/12/1999' will * not pass validation with the format 'MM/dd/yyyy' because the month isn't * two digits. The {@link java.text.SimpleDateFormat#setLenient(boolean)} * method is set to {@code false} for all. * </p> * * @param value The value validation is being performed on. * @param datePattern The pattern passed to {@code SimpleDateFormat}. * @param strict Whether or not to have an exact match of the * datePattern. * @return the converted Date value. */ @GwtIncompatible("incompatible method") public static Date formatDate(String value, String datePattern, boolean strict) { Date date = null; if (value == null || datePattern == null || datePattern.length() == 0) { return null; } try { SimpleDateFormat formatter = new SimpleDateFormat(datePattern); formatter.setLenient(false); date = formatter.parse(value); if (strict && datePattern.length() != value.length()) { date = null; } } catch (ParseException e) { // Bad date so return null GWT.log("Date parse failed value=[" + value + "], " + "pattern=[" + datePattern + "], " + "strict=[" + strict + "] ", e); } return date; } /** * Checks if the field is a valid credit card number. * * <p>Reference Sean M. Burke's <a href="http://www.ling.nwu.edu/~sburke/pub/luhn_lib.pl"> * script</a>.</p> * * @param value The value validation is being performed on. * @return the converted Credit Card number. */ public static Long formatCreditCard(String value) { return GenericValidator.isCreditCard(value) ? Long.valueOf(value) : null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.test; import org.apache.calcite.linq4j.function.Function1; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import java.io.PrintStream; import java.net.URL; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Properties; /** * Unit test of the Calcite adapter for CSV. */ public class CsvTest { private void close(Connection connection, Statement statement) { if (statement != null) { try { statement.close(); } catch (SQLException e) { // ignore } } if (connection != null) { try { connection.close(); } catch (SQLException e) { // ignore } } } public static String toLinux(String s) { return s.replaceAll("\r\n", "\n"); } /** * Tests the vanity driver. */ @Ignore @Test public void testVanityDriver() throws SQLException { Properties info = new Properties(); Connection connection = DriverManager.getConnection("jdbc:csv:", info); connection.close(); } /** * Tests the vanity driver with properties in the URL. */ @Ignore @Test public void testVanityDriverArgsInUrl() throws SQLException { Connection connection = DriverManager.getConnection("jdbc:csv:" + "directory='foo'"); connection.close(); } /** Tests an inline schema with a non-existent directory. */ @Test public void testBadDirectory() throws SQLException { Properties info = new Properties(); info.put("model", "inline:" + "{\n" + " version: '1.0',\n" + " schemas: [\n" + " {\n" + " type: 'custom',\n" + " name: 'bad',\n" + " factory: 'org.apache.calcite.adapter.csv.CsvSchemaFactory',\n" + " operand: {\n" + " directory: '/does/not/exist'\n" + " }\n" + " }\n" + " ]\n" + "}"); Connection connection = DriverManager.getConnection("jdbc:calcite:", info); // must print "directory ... not found" to stdout, but not fail ResultSet tables = connection.getMetaData().getTables(null, null, null, null); tables.next(); tables.close(); connection.close(); } /** * Reads from a table. */ @Test public void testSelect() throws SQLException { checkSql("model", "select * from EMPS"); } @Test public void testSelectSingleProjectGz() throws SQLException { checkSql("smart", "select name from EMPS"); } @Test public void testSelectSingleProject() throws SQLException { checkSql("smart", "select name from DEPTS"); } @Test public void testCustomTable() throws SQLException { checkSql("model-with-custom-table", "select * from CUSTOM_TABLE.EMPS"); } @Test public void testPushDownProjectDumb() throws SQLException { // rule does not fire, because we're using 'dumb' tables in simple model checkSql("model", "explain plan for select * from EMPS", "PLAN=EnumerableTableScan(table=[[SALES, EMPS]])\n"); } @Test public void testPushDownProject() throws SQLException { checkSql("smart", "explain plan for select * from EMPS", "PLAN=CsvTableScan(table=[[SALES, EMPS]], fields=[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]])\n"); } @Test public void testPushDownProject2() throws SQLException { checkSql("smart", "explain plan for select name, empno from EMPS", "PLAN=CsvTableScan(table=[[SALES, EMPS]], fields=[[1, 0]])\n"); // make sure that it works... checkSql("smart", "select name, empno from EMPS", "NAME=Fred; EMPNO=100", "NAME=Eric; EMPNO=110", "NAME=John; EMPNO=110", "NAME=Wilma; EMPNO=120", "NAME=Alice; EMPNO=130"); } @Test public void testFilterableSelect() throws SQLException { checkSql("filterable-model", "select name from EMPS"); } @Test public void testFilterableSelectStar() throws SQLException { checkSql("filterable-model", "select * from EMPS"); } /** Filter that can be fully handled by CsvFilterableTable. */ @Test public void testFilterableWhere() throws SQLException { checkSql("filterable-model", "select empno, gender, name from EMPS where name = 'John'", "EMPNO=110; GENDER=M; NAME=John"); } /** Filter that can be partly handled by CsvFilterableTable. */ @Test public void testFilterableWhere2() throws SQLException { checkSql("filterable-model", "select empno, gender, name from EMPS where gender = 'F' and empno > 125", "EMPNO=130; GENDER=F; NAME=Alice"); } @Test public void testJson() throws SQLException { checkSql("bug", "select _MAP['id'] as id,\n" + " _MAP['title'] as title,\n" + " CHAR_LENGTH(CAST(_MAP['title'] AS VARCHAR(30))) as len\n" + " from \"archers\"", "ID=19990101; TITLE=Washday blues.; LEN=14", "ID=19990103; TITLE=Daniel creates a drama.; LEN=23"); } private void checkSql(String model, String sql) throws SQLException { checkSql(sql, model, output()); } private Function1<ResultSet, Void> output() { return new Function1<ResultSet, Void>() { public Void apply(ResultSet resultSet) { try { output(resultSet, System.out); } catch (SQLException e) { throw new RuntimeException(e); } return null; } }; } private void checkSql(String model, String sql, final String... expected) throws SQLException { checkSql(sql, model, expect(expected)); } /** Returns a function that checks the contents of a result set against an * expected string. */ private static Function1<ResultSet, Void> expect(final String... expected) { return new Function1<ResultSet, Void>() { public Void apply(ResultSet resultSet) { try { final List<String> lines = new ArrayList<String>(); CsvTest.collect(lines, resultSet); Assert.assertEquals(Arrays.asList(expected), lines); } catch (SQLException e) { throw new RuntimeException(e); } return null; } }; } private void checkSql(String sql, String model, Function1<ResultSet, Void> fn) throws SQLException { Connection connection = null; Statement statement = null; try { Properties info = new Properties(); info.put("model", jsonPath(model)); connection = DriverManager.getConnection("jdbc:calcite:", info); statement = connection.createStatement(); final ResultSet resultSet = statement.executeQuery( sql); fn.apply(resultSet); } finally { close(connection, statement); } } private String jsonPath(String model) { final URL url = CsvTest.class.getResource("/" + model + ".json"); String s = url.toString(); if (s.startsWith("file:")) { s = s.substring("file:".length()); } return s; } private static void collect(List<String> result, ResultSet resultSet) throws SQLException { final StringBuilder buf = new StringBuilder(); while (resultSet.next()) { buf.setLength(0); int n = resultSet.getMetaData().getColumnCount(); String sep = ""; for (int i = 1; i <= n; i++) { buf.append(sep) .append(resultSet.getMetaData().getColumnLabel(i)) .append("=") .append(resultSet.getString(i)); sep = "; "; } result.add(toLinux(buf.toString())); } } private void output(ResultSet resultSet, PrintStream out) throws SQLException { final ResultSetMetaData metaData = resultSet.getMetaData(); final int columnCount = metaData.getColumnCount(); while (resultSet.next()) { for (int i = 1;; i++) { out.print(resultSet.getString(i)); if (i < columnCount) { out.print(", "); } else { out.println(); break; } } } } @Test public void testJoinOnString() throws SQLException { checkSql("smart", "select * from emps join depts on emps.name = depts.name"); } @Test public void testWackyColumns() throws SQLException { checkSql("select * from wacky_column_names where false", "bug", expect()); checkSql( "select \"joined at\", \"naME\" from wacky_column_names where \"2gender\" = 'F'", "bug", expect( "joined at=2005-09-07; naME=Wilma", "joined at=2007-01-01; naME=Alice")); } @Test public void testBoolean() throws SQLException { checkSql("smart", "select empno, slacker from emps where slacker", "EMPNO=100; SLACKER=true"); } @Test public void testReadme() throws SQLException { checkSql("SELECT d.name, COUNT(*) cnt" + " FROM emps AS e" + " JOIN depts AS d ON e.deptno = d.deptno" + " GROUP BY d.name", "smart", expect("NAME=Sales; CNT=1", "NAME=Marketing; CNT=2")); } @Test public void testDateType() throws SQLException { Properties info = new Properties(); info.put("model", jsonPath("bug")); Connection connection = DriverManager.getConnection("jdbc:calcite:", info); try { ResultSet res = connection.getMetaData().getColumns(null, null, "DATE", "JOINEDAT"); res.next(); Assert.assertEquals(res.getInt("DATA_TYPE"), java.sql.Types.DATE); res = connection.getMetaData().getColumns(null, null, "DATE", "JOINTIME"); res.next(); Assert.assertEquals(res.getInt("DATA_TYPE"), java.sql.Types.TIME); res = connection.getMetaData().getColumns(null, null, "DATE", "JOINTIMES"); res.next(); Assert.assertEquals(res.getInt("DATA_TYPE"), java.sql.Types.TIMESTAMP); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery( "select \"JOINEDAT\", \"JOINTIME\", \"JOINTIMES\" from \"DATE\" where EMPNO = 100"); resultSet.next(); // date Assert.assertEquals(java.sql.Date.class, resultSet.getDate(1).getClass()); Assert.assertEquals(java.sql.Date.valueOf("1996-08-03"), resultSet.getDate(1)); // time Assert.assertEquals(java.sql.Time.class, resultSet.getTime(2).getClass()); Assert.assertEquals(java.sql.Time.valueOf("00:01:02"), resultSet.getTime(2)); // timestamp Assert.assertEquals(java.sql.Timestamp.class, resultSet.getTimestamp(3).getClass()); Assert.assertEquals(java.sql.Timestamp.valueOf("1996-08-03 00:01:02"), resultSet.getTimestamp(3)); } finally { connection.close(); } } } // End CsvTest.java
package org.saiku.service.util.export.excel; import org.saiku.olap.dto.resultset.AbstractBaseCell; import org.saiku.olap.dto.resultset.CellDataSet; import org.saiku.olap.dto.resultset.DataCell; import org.saiku.olap.query2.ThinHierarchy; import org.saiku.olap.query2.ThinLevel; import org.saiku.olap.query2.ThinMember; import org.saiku.olap.util.SaikuProperties; import org.saiku.service.util.exception.SaikuServiceException; import org.apache.commons.lang.StringUtils; import org.apache.poi.hssf.usermodel.HSSFPalette; import org.apache.poi.hssf.usermodel.HSSFWorkbook; import org.apache.poi.ss.SpreadsheetVersion; import org.apache.poi.ss.usermodel.*; import org.apache.poi.ss.util.CellRangeAddress; import org.apache.poi.xssf.usermodel.XSSFCellStyle; import org.apache.poi.xssf.usermodel.XSSFColor; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.text.SimpleDateFormat; import java.util.*; /** * Created with IntelliJ IDEA. * User: sramazzina * Date: 21/06/12 * Time: 7.35 * To change this template use File | Settings | File Templates. */ public class ExcelWorksheetBuilder { private static final String BASIC_SHEET_FONT_FAMILY = "Arial"; private static final short BASIC_SHEET_FONT_SIZE = 11; private static final String EMPTY_STRING = ""; private static final String CSS_COLORS_CODE_PROPERTIES = "css-colors-codes.properties"; private int maxRows = -1; private int maxColumns = -1; private AbstractBaseCell[][] rowsetHeader; private AbstractBaseCell[][] rowsetBody; private Workbook excelWorkbook; private Sheet workbookSheet; private String sheetName; private int topLeftCornerWidth; private int topLeftCornerHeight; private CellStyle basicCS; private CellStyle numberCS; private CellStyle lighterHeaderCellCS; private List<ThinHierarchy> queryFilters; private Map<String, Integer> colorCodesMap; private int nextAvailableColorCode = 41; private Properties cssColorCodesProperties; private HSSFPalette customColorsPalette; private ExcelBuilderOptions options; private final Map<String, CellStyle> cellStyles = new HashMap<>(); private static final Logger log = LoggerFactory.getLogger(ExcelWorksheetBuilder.class); public ExcelWorksheetBuilder(CellDataSet table, List<ThinHierarchy> filters, ExcelBuilderOptions options) { init(table, filters, options); } private void init(CellDataSet table, List<ThinHierarchy> filters, ExcelBuilderOptions options) { this.options = options; queryFilters = filters; maxRows = SpreadsheetVersion.EXCEL2007.getMaxRows(); maxColumns = SpreadsheetVersion.EXCEL2007.getMaxColumns(); if ("xls".equals(SaikuProperties.webExportExcelFormat)) { HSSFWorkbook wb = new HSSFWorkbook(); customColorsPalette = wb.getCustomPalette(); excelWorkbook = wb; maxRows = SpreadsheetVersion.EXCEL97.getMaxRows(); maxColumns = SpreadsheetVersion.EXCEL97.getMaxColumns(); } else if ("xlsx".equals(SaikuProperties.webExportExcelFormat)) { excelWorkbook = new XSSFWorkbook(); } else { excelWorkbook = new XSSFWorkbook(); } CreationHelper createHelper = excelWorkbook.getCreationHelper(); colorCodesMap = new HashMap<>(); this.sheetName = options.sheetName; rowsetHeader = table.getCellSetHeaders(); rowsetBody = table.getCellSetBody(); topLeftCornerWidth = findTopLeftCornerWidth(); topLeftCornerHeight = findTopLeftCornerHeight(); initCellStyles(); } private void initCellStyles() { Font font = excelWorkbook.createFont(); font.setFontHeightInPoints((short) BASIC_SHEET_FONT_SIZE); font.setFontName(BASIC_SHEET_FONT_FAMILY); basicCS = excelWorkbook.createCellStyle(); basicCS.setFont(font); basicCS.setAlignment(CellStyle.ALIGN_LEFT); setCellBordersColor(basicCS); numberCS = excelWorkbook.createCellStyle(); numberCS.setFont(font); numberCS.setAlignment(CellStyle.ALIGN_RIGHT); setCellBordersColor(numberCS); /* justasg: Let's set default format, used if measure has no format at all. More info: http://poi.apache.org/apidocs/org/apache/poi/ss/usermodel/BuiltinFormats.html#getBuiltinFormat(int) If we don't have default format, it will output values up to maximum detail, i.e. 121212.3456789 and we like them as 121,212.346 */ DataFormat fmt = excelWorkbook.createDataFormat(); short dataFormat = fmt.getFormat(SaikuProperties.webExportExcelDefaultNumberFormat); numberCS.setDataFormat(dataFormat); Font headerFont = excelWorkbook.createFont(); headerFont.setFontHeightInPoints((short) BASIC_SHEET_FONT_SIZE); headerFont.setFontName(BASIC_SHEET_FONT_FAMILY); headerFont.setBoldweight(Font.BOLDWEIGHT_BOLD); lighterHeaderCellCS = excelWorkbook.createCellStyle(); lighterHeaderCellCS.setFont(headerFont); lighterHeaderCellCS.setAlignment(CellStyle.ALIGN_CENTER); lighterHeaderCellCS.setFillForegroundColor(IndexedColors.GREY_25_PERCENT.getIndex()); lighterHeaderCellCS.setFillPattern(CellStyle.SOLID_FOREGROUND); setCellBordersColor(lighterHeaderCellCS); CellStyle darkerHeaderCellCS = excelWorkbook.createCellStyle(); darkerHeaderCellCS.setFont(headerFont); darkerHeaderCellCS.setAlignment(CellStyle.ALIGN_CENTER); darkerHeaderCellCS.setFillForegroundColor(IndexedColors.GREY_40_PERCENT.getIndex()); darkerHeaderCellCS.setFillPattern(CellStyle.SOLID_FOREGROUND); setCellBordersColor(darkerHeaderCellCS); } private void setCellBordersColor(CellStyle style) { style.setBorderBottom(CellStyle.BORDER_THIN); style.setBottomBorderColor(IndexedColors.GREY_80_PERCENT.getIndex()); style.setBorderTop(CellStyle.BORDER_THIN); style.setTopBorderColor(IndexedColors.GREY_80_PERCENT.getIndex()); style.setBorderLeft(CellStyle.BORDER_THIN); style.setLeftBorderColor(IndexedColors.GREY_80_PERCENT.getIndex()); style.setBorderRight(CellStyle.BORDER_THIN); style.setRightBorderColor(IndexedColors.GREY_80_PERCENT.getIndex()); } public byte[] build() throws SaikuServiceException { Long start = (new Date()).getTime(); int startRow = initExcelSheet(); Long init = (new Date()).getTime(); int lastHeaderRow = buildExcelTableHeader(startRow); Long header = (new Date()).getTime(); addExcelTableRows(lastHeaderRow); Long content = (new Date()).getTime(); finalizeExcelSheet(startRow); Long finalizing = (new Date()).getTime(); log.debug("Init: " + (init - start) + "ms header: " + (header - init) + "ms content: " + (content - header) + "ms finalizing: " + (finalizing - content) + "ms "); ByteArrayOutputStream bout = new ByteArrayOutputStream(); try { excelWorkbook.write(bout); } catch (IOException e) { throw new SaikuServiceException("Error creating excel export for query", e); } return bout.toByteArray(); } private void finalizeExcelSheet(int startRow) { boolean autoSize = (rowsetBody != null && rowsetBody.length > 0 && rowsetBody.length < 10000 && rowsetHeader != null && rowsetHeader.length > 0 && rowsetHeader[0].length < 200); if (autoSize) { log.warn("Skipping auto-sizing columns, more than 10000 rows and/or 200 columns"); } Long start = (new Date()).getTime(); if (autoSize) { // Autosize columns for (int i=0; i < maxColumns && i < rowsetBody[0].length; i++) { workbookSheet.autoSizeColumn(i); } } Long end = (new Date()).getTime(); log.debug("Autosizing: " + (end - start) + "ms"); // Freeze the header columns int headerWidth = rowsetHeader.length; workbookSheet.createFreezePane( 0, startRow + headerWidth, 0, startRow + headerWidth ); } private int initExcelSheet() { // Main Workbook Sheet if (StringUtils.isNotBlank(options.sheetName)) { workbookSheet = excelWorkbook.createSheet(this.sheetName); } else { workbookSheet = excelWorkbook.createSheet(); } initSummarySheet(); return 0; } private void initSummarySheet() { // Main Workbook Sheet Sheet summarySheet = excelWorkbook.createSheet("Summary page"); int row = 1; Row sheetRow = summarySheet.createRow((int) row); Cell cell = sheetRow.createCell(0); String todayDate = (new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")).format(new Date()); cell.setCellValue("Export date and time: " + todayDate); summarySheet.addMergedRegion(new CellRangeAddress(1, 1, 0, 2)); row = row+2; sheetRow = summarySheet.createRow((int) row); cell = sheetRow.createCell(0); cell.setCellValue("Dimension"); cell = sheetRow.createCell(1); cell.setCellValue("Level"); cell = sheetRow.createCell(2); cell.setCellValue("Filter Applied"); row++; if (queryFilters != null) { for (ThinHierarchy item : queryFilters) { for (ThinLevel s : item.getLevels().values()) { for (ThinMember i : s.getSelection().getMembers()) { sheetRow = summarySheet.createRow((short) row); cell = sheetRow.createCell(0); cell.setCellValue(item.getCaption()); cell = sheetRow.createCell(1); cell.setCellValue(s.getCaption()); cell = sheetRow.createCell(2); cell.setCellValue(i.getCaption()); row++; } } } } row += 2; int rowLength = (rowsetBody != null) ? rowsetBody.length : 0; int columnCount = (rowsetHeader != null && rowsetHeader.length > 0) ? rowsetHeader[0].length : 0; int headerLength = (rowsetHeader != null) ? rowsetHeader.length : 0; if (columnCount > maxColumns) { sheetRow = summarySheet.createRow((int) row); cell = sheetRow.createCell(0); cell.setCellValue("Excel sheet is truncated, only contains " + maxColumns + " columns of " + (columnCount)); summarySheet.addMergedRegion(new CellRangeAddress(row, row, 0, 10)); row++; } if ((headerLength + rowLength) > maxRows) { sheetRow = summarySheet.createRow((int) row); cell = sheetRow.createCell(0); cell.setCellValue("Excel sheet is truncated, only contains " + maxRows + " rows of " + ( headerLength + rowLength)); summarySheet.addMergedRegion(new CellRangeAddress(row, row, 0, 10)); row++; } row++; sheetRow = summarySheet.createRow((int) row); cell = sheetRow.createCell(0); cell.setCellValue(SaikuProperties.webExportExcelPoweredBy); summarySheet.addMergedRegion(new CellRangeAddress(row, row, 0, 10)); // Autosize columns for summary sheet for (int i=0; i<5; i++) { summarySheet.autoSizeColumn(i); } } private void addExcelTableRows(int startingRow) { Row sheetRow = null; Cell cell = null; String formatString = null; if ((startingRow + rowsetBody.length) > maxRows) { log.warn("Excel sheet is truncated, only outputting " + maxRows + " rows of " + (rowsetBody.length + startingRow)); } if (rowsetBody.length > 0 && rowsetBody[0].length > maxColumns) { log.warn("Excel sheet is truncated, only outputting " + maxColumns + " columns of " + (rowsetBody[0].length)); } for (int x = 0; (x + startingRow) < maxRows && x < rowsetBody.length; x++) { sheetRow = workbookSheet.createRow((int) x + startingRow); for (int y = 0; y < maxColumns && y < rowsetBody[x].length; y++) { cell = sheetRow.createCell(y); String value = rowsetBody[x][y].getFormattedValue(); if (value == null && options.repeatValues) { // If the row cells has a null values it means the value is repeated in the data internally // but not in the interface. To properly format the Excel export file we need that value so we // get it from the same position in the prev row value = workbookSheet.getRow(sheetRow.getRowNum()-1).getCell(y).getStringCellValue(); } if (rowsetBody[x][y] instanceof DataCell && ((DataCell) rowsetBody[x][y]).getRawNumber() != null) { Number numberValue = ((DataCell) rowsetBody[x][y]).getRawNumber(); cell.setCellValue(numberValue.doubleValue()); applyCellFormatting(cell, x, y); } else { cell.setCellStyle(basicCS); cell.setCellValue(value); } } } } private void applyCellFormatting(Cell cell, int x, int y) { String formatString; formatString = ((DataCell) rowsetBody[x][y]).getFormatString(); if ((formatString != null) && (formatString.trim().length() > 0)) { String formatKey = "" + formatString; if (!cellStyles.containsKey(formatKey)) { // Inherit formatting from cube schema FORMAT_STRING CellStyle numberCSClone = excelWorkbook.createCellStyle(); numberCSClone.cloneStyleFrom(numberCS); DataFormat fmt = excelWorkbook.createDataFormat(); // the format string can contain macro values such as "Standard" from mondrian.util.Format // try and look it up, otherwise use the given one formatString = FormatUtil.getFormatString(formatString); try { short dataFormat = fmt.getFormat(formatString); numberCSClone.setDataFormat(dataFormat); } catch (Exception e) { // we tried to apply the mondrian format, but probably failed, so lets use the standard one //short dataFormat = fmt.getFormat(SaikuProperties.webExportExcelDefaultNumberFormat); //numberCSClone.setDataFormat(dataFormat); } cellStyles.put(formatKey, numberCSClone); } CellStyle numberCSClone = cellStyles.get(formatKey); // Check for cell background Map<String, String> properties = ((DataCell) rowsetBody[x][y]).getProperties(); if (properties.containsKey("style")) { String colorCode = properties.get("style"); short colorCodeIndex = getColorFromCustomPalette(colorCode); if (colorCodeIndex != -1) { numberCSClone.setFillForegroundColor(colorCodeIndex); numberCSClone.setFillPattern(CellStyle.SOLID_FOREGROUND); } else if (customColorsPalette == null) { try { if (cssColorCodesProperties != null && cssColorCodesProperties.containsKey(colorCode)) { colorCode = cssColorCodesProperties.getProperty(colorCode); } int redCode = Integer.parseInt(colorCode.substring(1, 3), 16); int greenCode = Integer.parseInt(colorCode.substring(3, 5), 16); int blueCode = Integer.parseInt(colorCode.substring(5, 7), 16); numberCSClone.setFillPattern(CellStyle.SOLID_FOREGROUND); ((XSSFCellStyle) numberCSClone).setFillForegroundColor(new XSSFColor(new java.awt.Color(redCode, greenCode, blueCode))); ((XSSFCellStyle) numberCSClone).setFillBackgroundColor(new XSSFColor(new java.awt.Color(redCode, greenCode, blueCode))); } catch (Exception e) { // we tried to set the color, no luck, lets continue without } } } else { numberCSClone.setFillForegroundColor(numberCS.getFillForegroundColor()); numberCSClone.setFillBackgroundColor(numberCS.getFillBackgroundColor()); } cell.setCellStyle(numberCSClone); } else { cell.setCellStyle(numberCS); } } private short getColorFromCustomPalette(String style) { short returnedColorIndex = -1; InputStream is = null; if (colorCodesMap.containsKey(style)) { returnedColorIndex = colorCodesMap.get(style).shortValue(); } else { try { if (cssColorCodesProperties == null) { is = getClass().getResourceAsStream(CSS_COLORS_CODE_PROPERTIES); if (is != null) { cssColorCodesProperties = new Properties(); cssColorCodesProperties.load(is); } } String colorCode = cssColorCodesProperties.getProperty(style); if (colorCode != null) { try { int redCode = Integer.parseInt(colorCode.substring(1, 3), 16); int greenCode = Integer.parseInt(colorCode.substring(3, 5), 16); int blueCode = Integer.parseInt(colorCode.substring(5, 7), 16); if (customColorsPalette != null) { customColorsPalette.setColorAtIndex((byte) nextAvailableColorCode, (byte) redCode, (byte) greenCode, (byte) blueCode); returnedColorIndex = customColorsPalette.getColor(nextAvailableColorCode).getIndex(); colorCodesMap.put(style, (int) returnedColorIndex); } else { return -1; } nextAvailableColorCode++; } catch (Exception e) { // we tried to set the color, no luck, lets continue without return -1; } } } catch (IOException e) { log.error("IO Exception", e); } finally { try { if (is != null) is.close(); } catch (IOException e) { log.error("IO Exception", e); } } } return returnedColorIndex; //To change body of created methods use File | Settings | File Templates. } private int buildExcelTableHeader(int startRow) { Row sheetRow = null; int x = 0; int y = 0; int startSameFromPos = 0; int mergedCellsWidth = 0; boolean isLastHeaderRow = false; boolean isLastColumn = false; String nextHeader = EMPTY_STRING; String currentHeader = EMPTY_STRING; ArrayList<ExcelMergedRegionItemConfig> mergedItemsConfig = new ArrayList<>(); for (x = 0; x < rowsetHeader.length; x++) { sheetRow = workbookSheet.createRow((int) x + startRow); nextHeader = EMPTY_STRING; isLastColumn = false; startSameFromPos = 0; mergedCellsWidth = 0; if (x + 1 == rowsetHeader.length) isLastHeaderRow = true; for (y = 0; y < maxColumns && y < rowsetHeader[x].length; y++) { currentHeader = rowsetHeader[x][y].getFormattedValue(); if (currentHeader != null) { if (rowsetHeader[x].length == y+1) isLastColumn = true; else nextHeader = rowsetHeader[x][y+1].getFormattedValue(); manageColumnHeaderDisplay(sheetRow, x, y, currentHeader); if (!isLastHeaderRow) { if (nextHeader!=null && !nextHeader.equals(currentHeader) || isLastColumn) { manageCellsMerge(y, x + startRow, mergedCellsWidth + 1, startSameFromPos, mergedItemsConfig); startSameFromPos = y+1; mergedCellsWidth = 0; } else if (nextHeader != null && nextHeader.equals(currentHeader)) { mergedCellsWidth++; } } } else startSameFromPos++; } // Manage the merge condition on exit from columns scan if (!isLastHeaderRow) manageCellsMerge(y - 1, x, mergedCellsWidth+1, startSameFromPos, mergedItemsConfig); } if (topLeftCornerHeight > 0 && topLeftCornerWidth > 0) { workbookSheet.addMergedRegion(new CellRangeAddress(startRow, startRow + topLeftCornerHeight - 1, 0, topLeftCornerWidth - 1)); } if (mergedItemsConfig.size()>0) { for (ExcelMergedRegionItemConfig item : mergedItemsConfig) { int lastCol = item.getStartX() + item.getWidth() - 1; lastCol = lastCol >= maxColumns ? maxColumns - 1 : lastCol; workbookSheet.addMergedRegion(new CellRangeAddress(item.getStartY(), item.getStartY() + item.getHeight(), item.getStartX(), lastCol)); } } return x + startRow; } private void manageColumnHeaderDisplay(Row sheetRow, int x, int y, String currentHeader) { if (topLeftCornerHeight > 0 && x >= topLeftCornerHeight) { fillHeaderCell(sheetRow, currentHeader, y); } else if ((topLeftCornerHeight > 0 && x < topLeftCornerHeight) && (topLeftCornerWidth > 0 && y >= topLeftCornerWidth)) { fillHeaderCell(sheetRow, currentHeader, y); } else if (topLeftCornerHeight == 0 && topLeftCornerWidth == 0) fillHeaderCell(sheetRow, currentHeader, y); } private void manageCellsMerge(int rowPos, int colPos, int width, int startSameFromPos, ArrayList<ExcelMergedRegionItemConfig> mergedItemsConfig) { ExcelMergedRegionItemConfig foundItem = null; boolean itemGetFromList = false; if (width == 1) return; for (ExcelMergedRegionItemConfig item : mergedItemsConfig) { if (item.getStartY() == colPos && item.getStartX() == rowPos) { foundItem = item; itemGetFromList = true; } } if (foundItem == null) foundItem = new ExcelMergedRegionItemConfig(); foundItem.setHeight(0); foundItem.setWidth(width); foundItem.setStartX(startSameFromPos); foundItem.setStartY(colPos); if (mergedItemsConfig.isEmpty() || !itemGetFromList) mergedItemsConfig.add(foundItem); } private void fillHeaderCell(Row sheetRow, String formattedValue, int y) { Cell cell = sheetRow.createCell(y); cell.setCellValue(formattedValue); cell.setCellStyle(lighterHeaderCellCS); } /** * Find the width in cells of the top left corner of the table * * @return */ private int findTopLeftCornerWidth() { int width = 0; int x = 0; boolean exit = (rowsetHeader.length < 1 || rowsetHeader[0][0].getRawValue() != null); String cellValue = null; for (x = 0; (!exit && rowsetHeader[0].length > x); x++) { cellValue = rowsetHeader[0][x].getRawValue(); if (cellValue == null) { width = x + 1; } else { exit = true; } } return width; } /** * Find the height in cells of the top left corner of the table * * @return */ private int findTopLeftCornerHeight() { return rowsetHeader.length > 0 ? rowsetHeader.length - 1 : 0; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.core; import com.carrotsearch.hppc.ObjectArrayList; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.Base64; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import static org.elasticsearch.index.mapper.MapperBuilders.binaryField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseField; /** * */ public class BinaryFieldMapper extends AbstractFieldMapper { public static final String CONTENT_TYPE = "binary"; private static final ParseField COMPRESS = new ParseField("compress").withAllDeprecated("no replacement, implemented at the codec level"); private static final ParseField COMPRESS_THRESHOLD = new ParseField("compress_threshold").withAllDeprecated("no replacement"); public static class Defaults extends AbstractFieldMapper.Defaults { public static final MappedFieldType FIELD_TYPE = new BinaryFieldType(); static { FIELD_TYPE.setIndexOptions(IndexOptions.NONE); FIELD_TYPE.freeze(); } } public static class Builder extends AbstractFieldMapper.Builder<Builder, BinaryFieldMapper> { public Builder(String name) { super(name, Defaults.FIELD_TYPE); builder = this; } @Override public BinaryFieldMapper build(BuilderContext context) { setupFieldType(context); ((BinaryFieldType)fieldType).setTryUncompressing(context.indexCreatedVersion().before(Version.V_2_0_0)); return new BinaryFieldMapper(name, fieldType, docValues, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { BinaryFieldMapper.Builder builder = binaryField(name); parseField(builder, name, node, parserContext); for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String fieldName = entry.getKey(); if (parserContext.indexVersionCreated().before(Version.V_2_0_0) && (parserContext.parseFieldMatcher().match(fieldName, COMPRESS) || parserContext.parseFieldMatcher().match(fieldName, COMPRESS_THRESHOLD))) { iterator.remove(); } } return builder; } } static final class BinaryFieldType extends MappedFieldType { private boolean tryUncompressing = false; public BinaryFieldType() {} protected BinaryFieldType(BinaryFieldType ref) { super(ref); this.tryUncompressing = ref.tryUncompressing; } @Override public MappedFieldType clone() { return new BinaryFieldType(this); } @Override public boolean equals(Object o) { if (!super.equals(o)) return false; BinaryFieldType that = (BinaryFieldType) o; return Objects.equals(tryUncompressing, that.tryUncompressing); } @Override public int hashCode() { return Objects.hash(super.hashCode(), tryUncompressing); } @Override public String typeName() { return CONTENT_TYPE; } public boolean tryUncompressing() { return tryUncompressing; } public void setTryUncompressing(boolean tryUncompressing) { checkIfFrozen(); this.tryUncompressing = tryUncompressing; } @Override public BytesReference value(Object value) { if (value == null) { return null; } BytesReference bytes; if (value instanceof BytesRef) { bytes = new BytesArray((BytesRef) value); } else if (value instanceof BytesReference) { bytes = (BytesReference) value; } else if (value instanceof byte[]) { bytes = new BytesArray((byte[]) value); } else { try { bytes = new BytesArray(Base64.decode(value.toString())); } catch (IOException e) { throw new ElasticsearchParseException("failed to convert bytes", e); } } try { if (tryUncompressing) { // backcompat behavior return CompressorFactory.uncompressIfNeeded(bytes); } else { return bytes; } } catch (IOException e) { throw new ElasticsearchParseException("failed to decompress source", e); } } @Override public Object valueForSearch(Object value) { return value(value); } } protected BinaryFieldMapper(String simpleName, MappedFieldType fieldType, Boolean docValues, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, docValues, fieldDataSettings, indexSettings, multiFields, copyTo); } @Override public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @Override public FieldDataType defaultFieldDataType() { return new FieldDataType("binary"); } @Override protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { if (!fieldType().stored() && !fieldType().hasDocValues()) { return; } byte[] value = context.parseExternalValue(byte[].class); if (value == null) { if (context.parser().currentToken() == XContentParser.Token.VALUE_NULL) { return; } else { value = context.parser().binaryValue(); } } if (value == null) { return; } if (fieldType().stored()) { fields.add(new Field(fieldType().names().indexName(), value, fieldType())); } if (fieldType().hasDocValues()) { CustomBinaryDocValuesField field = (CustomBinaryDocValuesField) context.doc().getByKey(fieldType().names().indexName()); if (field == null) { field = new CustomBinaryDocValuesField(fieldType().names().indexName(), value); context.doc().addWithKey(fieldType().names().indexName(), field); } else { field.add(value); } } } @Override protected String contentType() { return CONTENT_TYPE; } public static class CustomBinaryDocValuesField extends NumberFieldMapper.CustomNumericDocValuesField { private final ObjectArrayList<byte[]> bytesList; private int totalSize = 0; public CustomBinaryDocValuesField(String name, byte[] bytes) { super(name); bytesList = new ObjectArrayList<>(); add(bytes); } public void add(byte[] bytes) { bytesList.add(bytes); totalSize += bytes.length; } @Override public BytesRef binaryValue() { try { CollectionUtils.sortAndDedup(bytesList); int size = bytesList.size(); final byte[] bytes = new byte[totalSize + (size + 1) * 5]; ByteArrayDataOutput out = new ByteArrayDataOutput(bytes); out.writeVInt(size); // write total number of values for (int i = 0; i < size; i ++) { final byte[] value = bytesList.get(i); int valueLength = value.length; out.writeVInt(valueLength); out.writeBytes(value, 0, valueLength); } return new BytesRef(bytes, 0, out.getPosition()); } catch (IOException e) { throw new ElasticsearchException("Failed to get binary value", e); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.betwixt; import junit.framework.Test; import junit.framework.TestSuite; import junit.textui.TestRunner; import org.apache.commons.betwixt.io.BeanWriter; import org.apache.commons.betwixt.io.CyclicReferenceException; import org.apache.commons.betwixt.strategy.CapitalizeNameMapper; import org.apache.commons.betwixt.strategy.HyphenatedNameMapper; import java.io.ByteArrayOutputStream; import java.io.StringWriter; import java.util.ArrayList; import java.util.Collection; /** Test harness for the BeanWriter * * @author <a href="mailto:jstrachan@apache.org">James Strachan</a> * @author <a href="mailto:martin@mvdb.net">Martin van den Bemt</a> * @version $Revision$ */ public class TestBeanWriter extends AbstractTestCase { public static void main(String[] args) { TestRunner.run(suite()); } public static Test suite() { return new TestSuite(TestBeanWriter.class); } public TestBeanWriter(String testName) { super(testName); } public void testBeanWriter() throws Exception { Object bean = createBean(); System.out.println("Now trying pretty print"); BeanWriter writer = new BeanWriter(); writer.setWriteEmptyElements(true); writer.setEndOfLine("\n"); writer.enablePrettyPrint(); writer.write(bean); } public void testLooping() throws Exception { StringWriter out = new StringWriter(); out.write("<?xml version='1.0'?>"); BeanWriter writer = new BeanWriter(out); writer.setWriteEmptyElements(true); // logging for debugging jsut this method writer.setEndOfLine("\n"); writer.enablePrettyPrint(); writer.write(LoopBean.createNoLoopExampleBean()); String xml = "<?xml version='1.0'?><LoopBean id='1'><name>Root</name><friend id='2'><name>level1</name>" + "<friend id='3'><name>level2</name><friend id='4'><name>level3</name><friend id='5'><name>level4</name>" + "<friend id='6'><name>level5</name></friend></friend></friend></friend></friend></LoopBean>"; String xmlOut = out.getBuffer().toString(); xmlAssertIsomorphicContent( "Test no loop", parseString(xmlOut), parseString(xml), true); out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.write(LoopBean.createLoopExampleBean()); xml = "<?xml version='1.0'?><LoopBean id='1'><name>Root</name><friend id='2'><name>level1</name>" + "<friend id='3'><name>level2</name><friend id='4'><name>level3</name><friend id='5'><name>level4</name>" + "<friend id='6'><name>level5</name><friend idref='1'/></friend></friend></friend>" + "</friend></friend></LoopBean>"; xmlAssertIsomorphicContent( "Test loop", parseString(out.getBuffer().toString()), parseString(xml), true); // test not writing IDs // log.info("Writing LoopBean.createNoLoopExampleBean..."); out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getBindingConfiguration().setMapIDs(false); writer.write(LoopBean.createNoLoopExampleBean()); xml = "<?xml version='1.0'?><LoopBean><name>Root</name><friend><name>level1</name><friend>" + "<name>level2</name><friend><name>level3</name><friend><name>level4</name><friend>" + "<name>level5</name></friend></friend>" + "</friend></friend></friend></LoopBean>"; xmlAssertIsomorphicContent( "Test no loop, no ids", parseString(out.getBuffer().toString()), parseString(xml), true); // log.info("Writing LoopBean.createIdOnlyLoopExampleBean..."); out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getBindingConfiguration().setMapIDs(false); writer.write(LoopBean.createIdOnlyLoopExampleBean()); xml = "<?xml version='1.0'?><LoopBean><name>Root</name><friend><name>level1</name>" + "<friend><name>level2</name><friend><name>level3</name><friend><name>level4</name>" + "<friend><name>level5</name><friend><name>Root</name></friend></friend>" + "</friend></friend></friend></friend></LoopBean>"; xmlAssertIsomorphicContent( "Test id only loop", parseString(out.getBuffer().toString()), parseString(xml), true); try { // log.info("Writing LoopBean.createLoopExampleBean...") out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getBindingConfiguration().setMapIDs(false); writer.write(LoopBean.createLoopExampleBean()); fail("CyclicReferenceException not thrown!"); } catch (CyclicReferenceException e) { // everything's fine } } public void testEscaping() throws Exception { //XXX find a way to automatically verify test ByteArrayOutputStream out = new ByteArrayOutputStream(); BeanWriter writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getBindingConfiguration().setMapIDs(false); writer.setEndOfLine("\n"); writer.enablePrettyPrint(); XMLIntrospector introspector = new XMLIntrospector(); introspector.getConfiguration().setAttributesForPrimitives(true); writer.setIntrospector(introspector); writer.write(new LoopBean("Escape<LessThan")); writer.write(new LoopBean("Escape>GreaterThan")); writer.write(new LoopBean("Escape&amphersand")); writer.write(new LoopBean("Escape'apostrophe")); writer.write(new LoopBean("Escape\"Quote")); CustomerBean bean = new CustomerBean(); bean.setEmails( new String[]{ "Escape<LessThan", "Escape>GreaterThan", "Escape&amphersand", "Escape'apostrophe", "Escape\"Quote"}); // The attribute value escaping needs test too.. bean.setName("Escape<LessThan"); AddressBean address = new AddressBean(); address.setCode("Escape>GreaterThan"); address.setCountry("Escape&amphersand"); address.setCity("Escape'apostrophe"); address.setStreet("Escape\"Quote"); bean.setAddress(address); writer.write(bean); out.flush(); String result = "<?xml version='1.0'?><beans>" + out.toString() + "</beans>"; // check for the elemant content.. assertTrue(result.indexOf("<email>Escape&lt;LessThan</email>") > -1); assertTrue(result.indexOf("<email>Escape&gt;GreaterThan</email>") > -1); assertTrue(result.indexOf("<email>Escape&amp;amphersand</email>") != -1); assertTrue(result.indexOf("<email>Escape'apostrophe</email>") != -1); assertTrue(result.indexOf("<email>Escape\"Quote</email>") != -1); // check for the attributes.. assertTrue(result.indexOf("name=\"Escape&lt;LessThan\"") > -1); assertTrue(result.indexOf("code=\"Escape&gt;GreaterThan\"") > -1); assertTrue(result.indexOf("country=\"Escape&amp;amphersand\"") != -1); assertTrue(result.indexOf("city=\"Escape&apos;apostrophe\"") != -1); assertTrue(result.indexOf("street=\"Escape&quot;Quote\"") != -1); String xml = "<?xml version='1.0'?><beans> <LoopBean name='Escape&lt;LessThan'/>" + "<LoopBean name='Escape&gt;GreaterThan'/><LoopBean name='Escape&amp;amphersand'/>" + "<LoopBean name='Escape&apos;apostrophe'/><LoopBean name='Escape&quot;Quote'/>" + "<CustomerBean name='Escape&lt;LessThan' >" + "<projectMap/><projectNames/><emails><email>Escape&lt;LessThan</email>" + "<email>Escape&gt;GreaterThan</email><email>Escape&amp;amphersand</email>" + "<email>Escape'apostrophe</email><email>Escape\"Quote</email></emails>" + "<locations/><projectURLs/>" + "<address code='Escape&gt;GreaterThan' city='Escape&apos;apostrophe' " + "country='Escape&amp;amphersand' street='Escape&quot;Quote'/>" + "<numbers/></CustomerBean></beans>"; xmlAssertIsomorphicContent( "Test escaping ", parseString(result), parseString(xml), true); } /** Test simplest case for writing empty elements */ public void testSimpleWriteEmptyElements() throws Exception { // use same bean for both tests AddressBean bean = new AddressBean(); bean.setStreet("Pasture Lane"); bean.setCity("Bradford"); // SimpleLog log = new SimpleLog( "[SimpleEmpty:AbstractBeanWriter]" ); // log.setLevel(SimpleLog.LOG_LEVEL_TRACE); // SimpleLog baseLog = new SimpleLog( "[SimpleEmpty]" ); // baseLog.setLevel(SimpleLog.LOG_LEVEL_TRACE); // test output when writing empty elements StringWriter out = new StringWriter(); out.write("<?xml version='1.0'?>"); BeanWriter writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getBindingConfiguration().setMapIDs(false); writer.write(bean); // baseLog.debug("SIMPLE EMPTY"); // baseLog.debug(out.getBuffer().toString()); String xml = "<?xml version='1.0'?><AddressBean><street>Pasture Lane</street><city>Bradford</city>" + "<code/><country/></AddressBean>"; // baseLog.debug(xml); xmlAssertIsomorphicContent(parseString(out.getBuffer().toString()), parseString(xml), true); // test output when not writing empty elements out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(false); writer.getBindingConfiguration().setMapIDs(false); // writer.setAbstractBeanWriterLog(log); writer.write(bean); xml = "<?xml version='1.0'?><AddressBean><street>Pasture Lane</street><city>Bradford</city>" + "</AddressBean>"; // baseLog.debug("SIMPLE NOT EMPTY"); // baseLog.debug(out.getBuffer().toString()); xmlAssertIsomorphicContent(parseString(out.getBuffer().toString()), parseString(xml), true); } public void testArrayWrite() throws Exception { ArrayBean bean = new ArrayBean("Rob"); bean.addHobby("Hacking open source software"); bean.addHobby("Playing cricket"); bean.addHobby("Watching rugby league"); bean.addHobby("Havin' it large"); StringWriter out = new StringWriter(); out.write("<?xml version='1.0'?>"); BeanWriter writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getBindingConfiguration().setMapIDs(false); writer.write(bean); String xml = "<?xml version='1.0'?><ArrayBean><name>Rob</name><hobbies>" + "<hobby>Hacking open source software</hobby>" + "<hobby>Playing cricket</hobby>" + "<hobby>Watching rugby league</hobby>" + "<hobby>Havin' it large</hobby>" + "</hobbies></ArrayBean>"; xmlAssertIsomorphicContent( parseString(out.getBuffer().toString()), parseString(xml), true); String[] array = {"This", "That", "The Other"}; out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getBindingConfiguration().setMapIDs(false); writer.write(array); xml = "<?xml version='1.0'?><Array>" + "<String>This</String>" + "<String>That</String>" + "<String>The Other</String>" + "</Array>"; xmlAssertIsomorphicContent( parseString(out.getBuffer().toString()), parseString(xml), true); } /** Test nested case for writing empty elements */ public void testListedWriteEmptyElements() throws Exception { ListOfNames names = new ListOfNames(); names.addName(new NameBean("Tom")); names.addName(new NameBean("Dick")); names.addName(new NameBean("Harry")); names.addName(new NameBean("")); StringWriter out = new StringWriter(); out.write("<?xml version='1.0'?>"); BeanWriter writer = new BeanWriter(out); //SimpleLog log = new SimpleLog("[testListedWriteEmptyElements:AbstractBeanWriter]"); //log.setLevel(SimpleLog.LOG_LEVEL_TRACE); //writer.setAbstractBeanWriterLog(log); //log = new SimpleLog("[testListedWriteEmptyElements:XMLIntrospector]"); //log.setLevel(SimpleLog.LOG_LEVEL_TRACE); //writer.getXMLIntrospector().setLog(log); //log = new SimpleLog("[testListedWriteEmptyElements:XMLIntrospectorHelper]"); //log.setLevel(SimpleLog.LOG_LEVEL_TRACE); //XMLIntrospectorHelper.setLog(log); writer.setWriteEmptyElements(false); writer.getIntrospector().getConfiguration().setWrapCollectionsInElement(false); writer.getBindingConfiguration().setMapIDs(false); writer.write("Names", names); String xml = "<?xml version='1.0'?><Names>" + "<name><name>Tom</name></name>" + "<name><name>Dick</name></name>" + "<name><name>Harry</name></name>" + "</Names>"; xmlAssertIsomorphicContent( parseString(out.getBuffer().toString()), parseString(xml), true); out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getIntrospector().getConfiguration().setWrapCollectionsInElement(false); writer.getBindingConfiguration().setMapIDs(false); writer.write("Names", names); xml = "<?xml version='1.0'?><Names>" + "<name><name>Tom</name></name>" + "<name><name>Dick</name></name>" + "<name><name>Harry</name></name>" + "<name><name/></name>" + "</Names>"; xmlAssertIsomorphicContent( parseString(out.getBuffer().toString()), parseString(xml), true); out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getIntrospector().getConfiguration().setWrapCollectionsInElement(true); writer.getBindingConfiguration().setMapIDs(false); writer.write("Names", names); xml = "<?xml version='1.0'?><Names><names>" + "<name><name>Tom</name></name>" + "<name><name>Dick</name></name>" + "<name><name>Harry</name></name>" + "<name><name/></name></names>" + "</Names>"; xmlAssertIsomorphicContent( parseString(out.getBuffer().toString()), parseString(xml), true); out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(false); writer.getIntrospector().getConfiguration().setWrapCollectionsInElement(true); writer.getBindingConfiguration().setMapIDs(false); writer.write("Names", names); xml = "<?xml version='1.0'?><Names><names>" + "<name><name>Tom</name></name>" + "<name><name>Dick</name></name>" + "<name><name>Harry</name></name>" + "</names>" + "</Names>"; xmlAssertIsomorphicContent( parseString(out.getBuffer().toString()), parseString(xml), true); } public void testWriteNameMapperStrategy() throws Exception { ListOfNames names = new ListOfNames(); names.addName(new NameBean("Sid James")); names.addName(new NameBean("Kenneth Williams")); names.addName(new NameBean("Joan Simms")); names.addName(new NameBean("Charles Hawtrey")); StringWriter out = new StringWriter(); out.write("<?xml version='1.0'?>"); BeanWriter writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getIntrospector().getConfiguration().setWrapCollectionsInElement(true); writer.getBindingConfiguration().setMapIDs(false); writer.write("CarryOn", names); String xml = "<?xml version='1.0'?><CarryOn><names>" + "<name><name>Sid James</name></name>" + "<name><name>Kenneth Williams</name></name>" + "<name><name>Joan Simms</name></name>" + "<name><name>Charles Hawtrey</name></name>" + "</names>" + "</CarryOn>"; xmlAssertIsomorphicContent( parseString(out.getBuffer().toString()), parseString(xml), true); out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getIntrospector().getConfiguration().setWrapCollectionsInElement(true); writer.getBindingConfiguration().setMapIDs(false); writer.getIntrospector().getConfiguration().setElementNameMapper(new CapitalizeNameMapper()); writer.write("CarryOn", names); xml = "<?xml version='1.0'?><CarryOn><Names>" + "<Name><Name>Sid James</Name></Name>" + "<Name><Name>Kenneth Williams</Name></Name>" + "<Name><Name>Joan Simms</Name></Name>" + "<Name><Name>Charles Hawtrey</Name></Name>" + "</Names>" + "</CarryOn>"; xmlAssertIsomorphicContent( parseString(out.getBuffer().toString()), parseString(xml), true); ArrayList things = new ArrayList(); things.add(new NameBean("Sugar")); things.add(new NameBean("Spice")); things.add(new NameBean("All Things Nice")); NoAdderBean bean = new NoAdderBean(); bean.setThings(things); out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getIntrospector().getConfiguration().setWrapCollectionsInElement(true); writer.getBindingConfiguration().setMapIDs(false); writer.write(bean); xml = "<?xml version='1.0'?><NoAdderBean><things>" + "<NameBean><name>Sugar</name></NameBean>" + "<NameBean><name>Spice</name></NameBean>" + "<NameBean><name>All Things Nice</name></NameBean>" + "</things>" + "</NoAdderBean>"; xmlAssertIsomorphicContent( parseString(out.getBuffer().toString()), parseString(xml), true); out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getIntrospector().getConfiguration().setWrapCollectionsInElement(true); writer.getBindingConfiguration().setMapIDs(false); writer.getIntrospector().getConfiguration().setElementNameMapper(new CapitalizeNameMapper()); writer.write(bean); xml = "<?xml version='1.0'?><NoAdderBean><Things>" + "<NameBean><Name>Sugar</Name></NameBean>" + "<NameBean><Name>Spice</Name></NameBean>" + "<NameBean><Name>All Things Nice</Name></NameBean>" + "</Things>" + "</NoAdderBean>"; xmlAssertIsomorphicContent( parseString(out.getBuffer().toString()), parseString(xml), true); out = new StringWriter(); out.write("<?xml version='1.0'?>"); writer = new BeanWriter(out); writer.setWriteEmptyElements(true); writer.getIntrospector().getConfiguration().setWrapCollectionsInElement(true); writer.getBindingConfiguration().setMapIDs(false); writer.getIntrospector().getConfiguration().setElementNameMapper(new HyphenatedNameMapper(false)); writer.write(bean); xml = "<?xml version='1.0'?><no-adder-bean><things>" + "<name-bean><name>Sugar</name></name-bean>" + "<name-bean><name>Spice</name></name-bean>" + "<name-bean><name>All Things Nice</name></name-bean>" + "</things>" + "</no-adder-bean>"; xmlAssertIsomorphicContent( parseString(out.getBuffer().toString()), parseString(xml), true); } public void testBeanWriterWorksWithAnAddMethodAndACollection() throws Exception { BeanWriter bw = new BeanWriter(); try { bw.write(new BeanWithAddMethod()); } catch (IllegalArgumentException e) { fail("BeanWriter fails when a method is just called add(<type>) and there is also a collection"); } } // used in testBeanWriterWorksWithAnAddMethodAndACollection public static class BeanWithAddMethod { private Collection x; public void add(Object x) { // do nothing } public Collection getX() { return x; } public void setX(Collection x) { this.x = x; } } }
package tasks; import org.junit.Assert; import org.junit.Test; import static org.junit.Assert.*; import java.io.Closeable; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * unit tests for the tasks library */ public class TaskTests { @Test public void testDelay() throws Exception { long start = System.nanoTime(); final Ref<Long> end = new Ref<>(); Task<Void> delayTask = Task.delay(400); delayTask.then(new Function<Void, Task<Void>>() { @Override public Task<Void> call(Void arg) throws Exception { end.value = System.nanoTime(); return Task.fromResult(null); } }).result(); Assert.assertTrue(delayTask.isDone()); Assert.assertTrue(end.value - start >= 1000000 * 390); } @Test public void testThen() throws Exception { Task<Integer> t1 = Task.delay(30).then(new Function<Void, Task<Integer>>() { @Override public Task<Integer> call(Void arg) throws Exception { return Task.fromResult(66); } }); Assert.assertEquals(66, (int) t1.result()); } @Test public void testWhenAny() throws Exception { Task<Void> t1 = Task.delay(100); Task<Void> t2 = Task.delay(200); Task<Void> t3 = Task.delay(500); Task<Void> t4 = Task.delay(5000); Task<Task<?>> first = Task.whenAny(t4, t2, t1, t3); Assert.assertEquals(t1, first.result()); Assert.assertEquals(Task.State.Succeeded, first.result().getState()); Assert.assertFalse(t4.isDone()); } @Test public void testWhenAnySucceeds() throws Exception { Task<Void> t1 = Task.fromException(new Exception("OOPS!")); Task<Void> t2 = Task.delay(30).thenSync(new Action<Void>() { @Override public void call(Void arg) throws Exception { throw new Exception("OOPS! 2.0"); } }); Task<Void> t3 = Task.delay(30); Task<Void> t4 = Task.delay(2000); Task<Void> t5 = Task.delay(5000); Task<Task<?>> first = Task.whenAnySucceeds(t5, t4, t2, t1, t3); Assert.assertEquals(t3, first.result()); Assert.assertEquals(Task.State.Succeeded, first.result().getState()); Assert.assertFalse(t4.isDone()); } @Test public void testWhenAnySucceeds2() throws Exception { Task<Void> t1 = Task.fromException(new Exception("OOPS!")); Task<Void> t2 = Task.delay(30).thenSync(new Action<Void>() { @Override public void call(Void arg) throws Exception { throw new Exception("OOPS! 2.0"); } }); Task<Void> t3 = Task.delay(30).then(new Function<Void, Task<Void>>() { @Override public Task<Void> call(Void x) throws Exception { return Task.fromException(new Exception("OOPS! 3.0")); } }); Task<Task<?>> first = Task.whenAnySucceeds(t2, t1, t3); first.waitForCompletion(); Assert.assertEquals(Task.State.Failed, first.getState()); Assert.assertTrue(t2.isDone()); Assert.assertTrue(t3.isDone()); } @Test public void testWhenAnyAndWhenAll() throws Exception { Task<Void> tMother = Task.delay(10).then(new Function<Void, Task<Void>>() { @Override public Task<Void> call(Void arg) throws Exception { return Task.delay(5); } }); Task<Void> t1 = tMother.then(new Function<Void, Task<Void>>() { @Override public Task<Void> call(Void arg) throws Exception { return Task.delay(30); } }); Task<Void> t2 = tMother.then(new Function<Void, Task<Void>>() { @Override public Task<Void> call(Void arg) throws Exception { return Task.delay(100); } }); Task<Void> t3 = tMother.then(new Function<Void, Task<Void>>() { @Override public Task<Void> call(Void arg) throws Exception { return Task.delay(150); } }); Task<Void> tBad = tMother.then(new Function<Void, Task<Void>>() { @Override public Task<Void> call(Void arg) throws Exception { return Task.delay(160); } }).thenSync(new Action<Void>() { @Override public void call(Void arg) throws Exception { throw new Exception("task gone awry!"); } }); Task<Task<?>> first = Task.whenAny(tBad, t2, t1, t3); Assert.assertEquals(t1, first.result()); Assert.assertEquals(Task.State.Succeeded, first.result().getState()); Assert.assertFalse(tBad.isDone()); final Task<?>[] whenAllResult = Task.whenAll(t3, t2, t1, tBad).result(); Assert.assertEquals(Task.State.Failed, whenAllResult[3].getState()); for (Task<Void> t : new Task[]{t1, t2, t3, tBad}) { Assert.assertTrue(t.isDone()); } } @Test public void testZip() throws Exception { Task<Integer> t1 = Task.delay(10).map(new Function<Void, Integer>() { @Override public Integer call(Void aVoid) throws Exception { return 42; } }); assertEquals(new Pair<>(42, "hi"), t1.zip(Task.fromResult("hi")).result()); Task<String> tFail = Task.fromException(new Exception("oh crap!")); Task<Pair<Void, String>> zip = Task.delay(50000000).zip(tFail); zip.waitForCompletion(); assertEquals("oh crap!", zip.getException().getMessage()); assertEquals(new Pair<>(42, "hi"), t1.zip(Task.fromResult("hi")).result()); } @Test public void testTryCatch2() throws Exception { Task<String> t0 = Task.delay(10).then(new Function<Void, Task<String>>() { @Override public Task<String> call(Void arg) throws Exception { throw new Exception("no!"); } }); Task<String> t1 = t0.tryCatch(new Function<Exception, Task<String>>() { @Override public Task<String> call(Exception arg) throws Exception { return Task.fromResult("yes!"); } }); Thread.sleep(30); Assert.assertEquals("t0 should be in error state", Task.State.Failed, t0.getState()); //Assert.assertEquals("t1 should be done", Task.State.Succeeded, t1.getState()); t1.result(); } @Test public void testTryCatch() throws Exception { TaskBuilder<Integer> taskBuilder = new TaskBuilder<>(); taskBuilder.setException(new Exception(":]")); Task<Integer> t0 = taskBuilder.getTask().tryCatch(new Function<Exception, Task<Integer>>() { @Override public Task<Integer> call(Exception arg) throws Exception { return Task.fromResult(42); } }); Assert.assertEquals(42, ((int) t0.result())); Task<Integer> t1 = Task.delay(30).then(new Function<Void, Task<Integer>>() { @Override public Task<Integer> call(Void arg) throws Exception { throw new Exception(":)"); } }); // Exception thrown= null; // try{ // t1.result(); // }catch (Exception ex){thrown = ex;} // Assert.assertNotNull(thrown); Task<Integer> t2 = t1.tryCatch(new Function<Exception, Task<Integer>>() { @Override public Task<Integer> call(Exception arg) throws Exception { return Task.fromResult(42); } }); int result = t2.result(); Assert.assertEquals(42, result); } @Test public void testTryCatch3() throws Exception { Task<Integer> t0 = Task.<Integer>fromException(new IllegalArgumentException("yup, somebody threw me")) .tryCatch(ArithmeticException.class, new Function<ArithmeticException, Task<Integer>>() { @Override public Task<Integer> call(ArithmeticException x) throws Exception { return Task.fromResult(666); } }); final Task<Integer> t1 = t0.tryCatch(IllegalArgumentException.class, new Function<IllegalArgumentException, Task<Integer>>() { @Override public Task<Integer> call(IllegalArgumentException x) throws Exception { return Task.fromResult(4); } }); Task<Integer> t2 = t0.tryCatchSync(IllegalArgumentException.class, new Function<IllegalArgumentException, Integer>() { @Override public Integer call(IllegalArgumentException x) throws Exception { return 5; } }); t0.waitForCompletion(); Assert.assertEquals(Task.State.Failed, t0.getState()); Assert.assertEquals(4, (int) t1.result()); Assert.assertEquals(5, (int) t2.result()); } @Test public void testTryFinally() throws Exception { final Ref<Boolean> ranFinally = new Ref<>(false); Task<Integer> t0 = Task.<Integer>fromException(new IllegalArgumentException("yup, somebody threw me")) .tryFinally(new Function<Void, Task<Void>>() { @Override public Task<Void> call(Void x) throws Exception { ranFinally.value = true; return Task.fromResult(null); } }); t0.waitForCompletion(); Assert.assertEquals(Task.State.Failed, t0.getState()); Assert.assertTrue(ranFinally.value); final Task<Integer> t1 = Task.delay(5).thenSync(new Function<Void, Integer>() { @Override public Integer call(Void x) throws Exception { return 42; } }).tryFinally(new Function<Void, Task<Void>>() { @Override public Task<Void> call(Void x) throws Exception { throw new Exception("Finally blew up!"); } }); t1.waitForCompletion(); Assert.assertEquals("Finally blew up!", t1.getException().getMessage()); } @Test public void testTryWithResource() { //test with failing body final Ref<Integer> closeCalls = new Ref<>(0); Closeable res = new Closeable() { @Override public void close() { closeCalls.value++; } }; Task<Integer> task = Task.tryWithResource(res, new Function<Closeable, Task<Integer>>() { @Override public Task<Integer> call(Closeable closeable) throws Exception { throw new Exception("HEHE"); } }); task.waitForCompletion(); assertTrue(task.getException().getMessage().equals("HEHE")); assertEquals((Integer) 1, closeCalls.value); //test with resource that throws in close() Closeable badRes = new Closeable() { @Override public void close() throws IOException { throw new IOException("From badRes"); } }; Task<Integer> taskWithBadRes = Task.tryWithResource(badRes, new Function<Closeable, Task<Integer>>() { @Override public Task<Integer> call(Closeable closeable) throws Exception { return Task.delay(5).thenSync(new Function<Void, Integer>() { @Override public Integer call(Void aVoid) throws Exception { return 42; } }); } }); taskWithBadRes.waitForCompletion(); assertEquals("From badRes", taskWithBadRes.getException().getMessage()); //test with failing body and resource that throws in close(). the returned Task's exception must come from body Task<Integer> badTaskWithBadRes = Task.tryWithResource(badRes, new Function<Closeable, Task<Integer>>() { @Override public Task<Integer> call(Closeable closeable) throws Exception { return Task.fromException(new Exception("from badTask")); } }); badTaskWithBadRes.waitForCompletion(); assertEquals("from badTask", badTaskWithBadRes.getException().getMessage()); } @Test public void testWithTimeout() throws Exception { Task<Void> task = Task.delay(10000); Task<Void> taskWithTimeOut = task.withTimeout(10); taskWithTimeOut.waitForCompletion(); assertTrue(taskWithTimeOut.getException() instanceof TaskTimeoutException && ((TaskTimeoutException) taskWithTimeOut.getException()).getTimedOutTask() == task); Task<Integer> task2 = Task.delay(10).thenSync(new Function<Void, Integer>() { @Override public Integer call(Void aVoid) throws Exception { return 42; } }); Task<Integer> task2WithTimeOut = task2.withTimeout(20); task2WithTimeOut.waitForCompletion(); assertEquals((Integer)42, task2WithTimeOut.result()); } @Test public void testForeachGenerate() throws Exception { final List<Integer> input = Arrays.asList(1, 2, 3, 4); final Task<List<Integer>> t = Task.forEachGenerate(input, new Function<Integer, Task<Integer>>() { @Override public Task<Integer> call(final Integer x) throws Exception { return Task.delay(10).then(new Function<Void, Task<Integer>>() { @Override public Task<Integer> call(Void _) throws Exception { return Task.fromResult(x * x); } }); } }); Assert.assertEquals(Task.State.NotDone, t.getState()); final List<Integer> result = t.result(); for (int i = 0; i < input.size(); i++) { Assert.assertEquals(input.get(i) * input.get(i), (int) result.get(i)); } } @Test public void testWhileLoop() throws Exception { final Ref<Integer> i = new Ref<>(0); final Ref<Integer> res = new Ref<>(0); final Task<Void> t = Task.whileLoop(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return i.value <= 10; } }, new Callable<Task<Void>>() { @Override public Task<Void> call() throws Exception { return Task.delay(5).thenSync(new Action<Void>() { @Override public void call(Void x) throws Exception { res.value += i.value; i.value++; } }); } }); t.result(); Assert.assertEquals(10 * 11 / 2, (int) res.value); } @Test public void testContinueWith() throws Exception { Task<Integer> t1 = Task.delay(30).continueWithSync(new Function<Task<Void>, Integer>() { @Override public Integer call(Task<Void> x) throws Exception { if (x.getState() == Task.State.Failed) { throw x.getException(); } return 66; } }); Assert.assertEquals(66, (int) t1.result()); Task<Integer> t2 = Task.delay(25).continueWith(new Function<Task<Void>, Task<Integer>>() { @Override public Task<Integer> call(Task<Void> x) throws Exception { throw new Exception("I throw"); } }).continueWith(new Function<Task<Integer>, Task<Integer>>() { @Override public Task<Integer> call(Task<Integer> x) throws Exception { if (x.getState() == Task.State.Failed) { return Task.fromResult(42); } return Task.fromResult(9); } }); Assert.assertEquals(42, (int) t2.result()); } @Test public void testContinueOn() throws Exception { final Ref<Exception> whatWentWrong = new Ref<Exception>(); final Ref<Thread> currentThread = new Ref<>(null); final ExecutorService executorBase = Executors.newFixedThreadPool(14); Executor executor = new Executor() { @Override public void execute(final Runnable command) { executorBase.execute(new Runnable() { @Override public void run() { currentThread.value = Thread.currentThread(); command.run(); currentThread.value = null; } }); } }; Task.delay(5).continueOn(executor).thenSync(new Action<Void>() { @Override public void call(Void arg) throws Exception { try { Assert.assertEquals(Thread.currentThread(), currentThread.value); } catch (Exception ex) { whatWentWrong.value = ex; } } }).then(new Function<Void, Task<Void>>() { @Override public Task<Void> call(Void x) throws Exception { return Task.delay(50); } }).then(new Function<Void, Task<Integer>>() { @Override public Task<Integer> call(Void x) throws Exception { try { Assert.assertEquals(Thread.currentThread(), currentThread.value); } catch (Exception ex) { whatWentWrong.value = ex; } return Task.fromResult(42); } }).result(); Assert.assertNull(whatWentWrong.value); } @Test public void testContinueOn2() throws Exception { final ExecutorService executor1 = Executors.newSingleThreadExecutor(); final ExecutorService executor2 = Executors.newSingleThreadExecutor(); final Ref<Thread> thread1 = new Ref<>(); final Ref<Thread> thread2 = new Ref<>(); executor1.submit(new Runnable() { @Override public void run() { thread1.value = Thread.currentThread(); } }).get(); executor2.submit(new Runnable() { @Override public void run() { thread2.value = Thread.currentThread(); } }).get(); final Ref<Exception> whatWentWrong = new Ref<Exception>(); Task.delay(5).continueOn(executor1).then(new Function<Void, Task<Integer>>() { @Override public Task<Integer> call(Void x) throws Exception { return Task.fromResult(42); } }).thenSync(new Action<Integer>() { @Override public void call(Integer arg) throws Exception { try { Assert.assertEquals(Thread.currentThread(), thread1.value); } catch (Exception ex) { whatWentWrong.value = ex; } } }).continueOn(executor2).then(new Function<Void, Task<Void>>() { @Override public Task<Void> call(Void x) throws Exception { return Task.delay(50); } }).then(new Function<Void, Task<Integer>>() { @Override public Task<Integer> call(Void x) throws Exception { try { Assert.assertEquals(Thread.currentThread(), thread2.value); } catch (Exception ex) { whatWentWrong.value = ex; } return Task.fromResult(42); } }).result(); Assert.assertNull(whatWentWrong.value); } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.cordova; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.Locale; import org.apache.cordova.Config; import org.apache.cordova.CordovaInterface; import org.apache.cordova.LOG; import org.apache.cordova.PluginManager; import org.apache.cordova.PluginResult; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import android.content.pm.PackageManager.NameNotFoundException; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.util.AttributeSet; import android.util.Log; import android.view.Gravity; import android.view.KeyEvent; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.view.inputmethod.InputMethodManager; import android.webkit.WebBackForwardList; import android.webkit.WebHistoryItem; import android.webkit.WebChromeClient; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebSettings.LayoutAlgorithm; import android.widget.FrameLayout; /* * This class is our web view. * * @see <a href="http://developer.android.com/guide/webapps/webview.html">WebView guide</a> * @see <a href="http://developer.android.com/reference/android/webkit/WebView.html">WebView</a> */ public class CordovaWebView extends WebView { public static final String TAG = "CordovaWebView"; public static final String CORDOVA_VERSION = "3.3.0-rc1"; private ArrayList<Integer> keyDownCodes = new ArrayList<Integer>(); private ArrayList<Integer> keyUpCodes = new ArrayList<Integer>(); public PluginManager pluginManager; private boolean paused; private BroadcastReceiver receiver; /** Activities and other important classes **/ private CordovaInterface cordova; CordovaWebViewClient viewClient; @SuppressWarnings("unused") private CordovaChromeClient chromeClient; private String url; // Flag to track that a loadUrl timeout occurred int loadUrlTimeout = 0; private boolean bound; private boolean handleButton = false; private long lastMenuEventTime = 0; NativeToJsMessageQueue jsMessageQueue; ExposedJsApi exposedJsApi; /** custom view created by the browser (a video player for example) */ private View mCustomView; private WebChromeClient.CustomViewCallback mCustomViewCallback; private ActivityResult mResult = null; private CordovaResourceApi resourceApi; class ActivityResult { int request; int result; Intent incoming; public ActivityResult(int req, int res, Intent intent) { request = req; result = res; incoming = intent; } } static final FrameLayout.LayoutParams COVER_SCREEN_GRAVITY_CENTER = new FrameLayout.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, Gravity.CENTER); /** * Constructor. * * @param context */ public CordovaWebView(Context context) { super(context); if (CordovaInterface.class.isInstance(context)) { this.cordova = (CordovaInterface) context; } else { Log.d(TAG, "Your activity must implement CordovaInterface to work"); } this.loadConfiguration(); this.setup(); } /** * Constructor. * * @param context * @param attrs */ public CordovaWebView(Context context, AttributeSet attrs) { super(context, attrs); if (CordovaInterface.class.isInstance(context)) { this.cordova = (CordovaInterface) context; } else { Log.d(TAG, "Your activity must implement CordovaInterface to work"); } this.setWebChromeClient(new CordovaChromeClient(this.cordova, this)); this.initWebViewClient(this.cordova); this.loadConfiguration(); this.setup(); } /** * Constructor. * * @param context * @param attrs * @param defStyle * */ public CordovaWebView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); if (CordovaInterface.class.isInstance(context)) { this.cordova = (CordovaInterface) context; } else { Log.d(TAG, "Your activity must implement CordovaInterface to work"); } this.setWebChromeClient(new CordovaChromeClient(this.cordova, this)); this.loadConfiguration(); this.setup(); } /** * Constructor. * * @param context * @param attrs * @param defStyle * @param privateBrowsing */ @TargetApi(11) public CordovaWebView(Context context, AttributeSet attrs, int defStyle, boolean privateBrowsing) { super(context, attrs, defStyle, privateBrowsing); if (CordovaInterface.class.isInstance(context)) { this.cordova = (CordovaInterface) context; } else { Log.d(TAG, "Your activity must implement CordovaInterface to work"); } this.setWebChromeClient(new CordovaChromeClient(this.cordova)); this.initWebViewClient(this.cordova); this.loadConfiguration(); this.setup(); } /** * set the WebViewClient, but provide special case handling for IceCreamSandwich. */ private void initWebViewClient(CordovaInterface cordova) { if(android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.HONEYCOMB || android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) { this.setWebViewClient(new CordovaWebViewClient(this.cordova, this)); } else { this.setWebViewClient(new IceCreamCordovaWebViewClient(this.cordova, this)); } } /** * Initialize webview. */ @SuppressWarnings("deprecation") @SuppressLint("NewApi") private void setup() { this.setInitialScale(0); this.setVerticalScrollBarEnabled(false); if (shouldRequestFocusOnInit()) { this.requestFocusFromTouch(); } // Enable JavaScript WebSettings settings = this.getSettings(); settings.setJavaScriptEnabled(true); settings.setJavaScriptCanOpenWindowsAutomatically(true); settings.setLayoutAlgorithm(LayoutAlgorithm.NORMAL); // Set the nav dump for HTC 2.x devices (disabling for ICS, deprecated entirely for Jellybean 4.2) try { Method gingerbread_getMethod = WebSettings.class.getMethod("setNavDump", new Class[] { boolean.class }); String manufacturer = android.os.Build.MANUFACTURER; Log.d(TAG, "CordovaWebView is running on device made by: " + manufacturer); if(android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.HONEYCOMB && android.os.Build.MANUFACTURER.contains("HTC")) { gingerbread_getMethod.invoke(settings, true); } } catch (NoSuchMethodException e) { Log.d(TAG, "We are on a modern version of Android, we will deprecate HTC 2.3 devices in 2.8"); } catch (IllegalArgumentException e) { Log.d(TAG, "Doing the NavDump failed with bad arguments"); } catch (IllegalAccessException e) { Log.d(TAG, "This should never happen: IllegalAccessException means this isn't Android anymore"); } catch (InvocationTargetException e) { Log.d(TAG, "This should never happen: InvocationTargetException means this isn't Android anymore."); } //We don't save any form data in the application settings.setSaveFormData(false); settings.setSavePassword(false); // Jellybean rightfully tried to lock this down. Too bad they didn't give us a whitelist // while we do this if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1) Level16Apis.enableUniversalAccess(settings); // Enable database // We keep this disabled because we use or shim to get around DOM_EXCEPTION_ERROR_16 String databasePath = this.cordova.getActivity().getApplicationContext().getDir("database", Context.MODE_PRIVATE).getPath(); settings.setDatabaseEnabled(true); settings.setDatabasePath(databasePath); //Determine whether we're in debug or release mode, and turn on Debugging! try { final String packageName = this.cordova.getActivity().getPackageName(); final PackageManager pm = this.cordova.getActivity().getPackageManager(); ApplicationInfo appInfo; appInfo = pm.getApplicationInfo(packageName, PackageManager.GET_META_DATA); if((appInfo.flags & ApplicationInfo.FLAG_DEBUGGABLE) != 0 && android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.KITKAT) { setWebContentsDebuggingEnabled(true); } } catch (IllegalArgumentException e) { Log.d(TAG, "You have one job! To turn on Remote Web Debugging! YOU HAVE FAILED! "); e.printStackTrace(); } catch (NameNotFoundException e) { Log.d(TAG, "This should never happen: Your application's package can't be found."); e.printStackTrace(); } settings.setGeolocationDatabasePath(databasePath); // Enable DOM storage settings.setDomStorageEnabled(true); // Enable built-in geolocation settings.setGeolocationEnabled(true); // Enable AppCache // Fix for CB-2282 settings.setAppCacheMaxSize(5 * 1048576); String pathToCache = this.cordova.getActivity().getApplicationContext().getDir("database", Context.MODE_PRIVATE).getPath(); settings.setAppCachePath(pathToCache); settings.setAppCacheEnabled(true); // Fix for CB-1405 // Google issue 4641 this.updateUserAgentString(); IntentFilter intentFilter = new IntentFilter(); intentFilter.addAction(Intent.ACTION_CONFIGURATION_CHANGED); if (this.receiver == null) { this.receiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { updateUserAgentString(); } }; this.cordova.getActivity().registerReceiver(this.receiver, intentFilter); } // end CB-1405 pluginManager = new PluginManager(this, this.cordova); jsMessageQueue = new NativeToJsMessageQueue(this, cordova); exposedJsApi = new ExposedJsApi(pluginManager, jsMessageQueue); resourceApi = new CordovaResourceApi(this.getContext(), pluginManager); exposeJsInterface(); } /** * Override this method to decide whether or not you need to request the * focus when your application start * * @return true unless this method is overriden to return a different value */ protected boolean shouldRequestFocusOnInit() { return true; } private void updateUserAgentString() { this.getSettings().getUserAgentString(); } private void exposeJsInterface() { int SDK_INT = Build.VERSION.SDK_INT; boolean isHoneycomb = (SDK_INT >= Build.VERSION_CODES.HONEYCOMB && SDK_INT <= Build.VERSION_CODES.HONEYCOMB_MR2); if (isHoneycomb || (SDK_INT < Build.VERSION_CODES.GINGERBREAD)) { Log.i(TAG, "Disabled addJavascriptInterface() bridge since Android version is old."); // Bug being that Java Strings do not get converted to JS strings automatically. // This isn't hard to work-around on the JS side, but it's easier to just // use the prompt bridge instead. return; } else if (SDK_INT < Build.VERSION_CODES.HONEYCOMB && Build.MANUFACTURER.equals("unknown")) { // addJavascriptInterface crashes on the 2.3 emulator. Log.i(TAG, "Disabled addJavascriptInterface() bridge callback due to a bug on the 2.3 emulator"); return; } this.addJavascriptInterface(exposedJsApi, "_cordovaNative"); } /** * Set the WebViewClient. * * @param client */ public void setWebViewClient(CordovaWebViewClient client) { this.viewClient = client; super.setWebViewClient(client); } /** * Set the WebChromeClient. * * @param client */ public void setWebChromeClient(CordovaChromeClient client) { this.chromeClient = client; super.setWebChromeClient(client); } public CordovaChromeClient getWebChromeClient() { return this.chromeClient; } /** * Load the url into the webview. * * @param url */ @Override public void loadUrl(String url) { if (url.equals("about:blank") || url.startsWith("javascript:")) { this.loadUrlNow(url); } else { String initUrl = this.getProperty("url", null); // If first page of app, then set URL to load to be the one passed in if (initUrl == null) { this.loadUrlIntoView(url); } // Otherwise use the URL specified in the activity's extras bundle else { this.loadUrlIntoView(initUrl); } } } /** * Load the url into the webview after waiting for period of time. * This is used to display the splashscreen for certain amount of time. * * @param url * @param time The number of ms to wait before loading webview */ public void loadUrl(final String url, int time) { String initUrl = this.getProperty("url", null); // If first page of app, then set URL to load to be the one passed in if (initUrl == null) { this.loadUrlIntoView(url, time); } // Otherwise use the URL specified in the activity's extras bundle else { this.loadUrlIntoView(initUrl); } } /** * Load the url into the webview. * * @param url */ public void loadUrlIntoView(final String url) { LOG.d(TAG, ">>> loadUrl(" + url + ")"); this.url = url; this.pluginManager.init(); // Create a timeout timer for loadUrl final CordovaWebView me = this; final int currentLoadUrlTimeout = me.loadUrlTimeout; final int loadUrlTimeoutValue = Integer.parseInt(this.getProperty("LoadUrlTimeoutValue", "20000")); // Timeout error method final Runnable loadError = new Runnable() { public void run() { me.stopLoading(); LOG.e(TAG, "CordovaWebView: TIMEOUT ERROR!"); if (viewClient != null) { viewClient.onReceivedError(me, -6, "The connection to the server was unsuccessful.", url); } } }; // Timeout timer method final Runnable timeoutCheck = new Runnable() { public void run() { try { synchronized (this) { wait(loadUrlTimeoutValue); } } catch (InterruptedException e) { e.printStackTrace(); } // If timeout, then stop loading and handle error if (me.loadUrlTimeout == currentLoadUrlTimeout) { me.cordova.getActivity().runOnUiThread(loadError); } } }; // Load url this.cordova.getActivity().runOnUiThread(new Runnable() { public void run() { Thread thread = new Thread(timeoutCheck); thread.start(); me.loadUrlNow(url); } }); } /** * Load URL in webview. * * @param url */ void loadUrlNow(String url) { if (LOG.isLoggable(LOG.DEBUG) && !url.startsWith("javascript:")) { LOG.d(TAG, ">>> loadUrlNow()"); } if (url.startsWith("file://") || url.startsWith("javascript:") || Config.isUrlWhiteListed(url)) { super.loadUrl(url); } } /** * Load the url into the webview after waiting for period of time. * This is used to display the splashscreen for certain amount of time. * * @param url * @param time The number of ms to wait before loading webview */ public void loadUrlIntoView(final String url, final int time) { // If not first page of app, then load immediately // Add support for browser history if we use it. if ((url.startsWith("javascript:")) || this.canGoBack()) { } // If first page, then show splashscreen else { LOG.d(TAG, "loadUrlIntoView(%s, %d)", url, time); // Send message to show splashscreen now if desired this.postMessage("splashscreen", "show"); } // Load url this.loadUrlIntoView(url); } public void onScrollChanged(int l, int t, int oldl, int oldt) { super.onScrollChanged(l, t, oldl, oldt); //We should post a message that the scroll changed ScrollEvent myEvent = new ScrollEvent(l, t, oldl, oldt, this); this.postMessage("onScrollChanged", myEvent); } /** * Send JavaScript statement back to JavaScript. * (This is a convenience method) * * @param statement */ public void sendJavascript(String statement) { this.jsMessageQueue.addJavaScript(statement); } /** * Send a plugin result back to JavaScript. * (This is a convenience method) * * @param result * @param callbackId */ public void sendPluginResult(PluginResult result, String callbackId) { this.jsMessageQueue.addPluginResult(result, callbackId); } /** * Send a message to all plugins. * * @param id The message id * @param data The message data */ public void postMessage(String id, Object data) { if (this.pluginManager != null) { this.pluginManager.postMessage(id, data); } } /** * Go to previous page in history. (We manage our own history) * * @return true if we went back, false if we are already at top */ public boolean backHistory() { // Check webview first to see if there is a history // This is needed to support curPage#diffLink, since they are added to appView's history, but not our history url array (JQMobile behavior) if (super.canGoBack()) { printBackForwardList(); super.goBack(); return true; } return false; } /** * Load the specified URL in the Cordova webview or a new browser instance. * * NOTE: If openExternal is false, only URLs listed in whitelist can be loaded. * * @param url The url to load. * @param openExternal Load url in browser instead of Cordova webview. * @param clearHistory Clear the history stack, so new page becomes top of history * @param params Parameters for new app */ public void showWebPage(String url, boolean openExternal, boolean clearHistory, HashMap<String, Object> params) { LOG.d(TAG, "showWebPage(%s, %b, %b, HashMap", url, openExternal, clearHistory); // If clearing history if (clearHistory) { this.clearHistory(); } // If loading into our webview if (!openExternal) { // Make sure url is in whitelist if (url.startsWith("file://") || Config.isUrlWhiteListed(url)) { // TODO: What about params? // Load new URL this.loadUrl(url); } // Load in default viewer if not else { LOG.w(TAG, "showWebPage: Cannot load URL into webview since it is not in white list. Loading into browser instead. (URL=" + url + ")"); try { Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse(url)); cordova.getActivity().startActivity(intent); } catch (android.content.ActivityNotFoundException e) { LOG.e(TAG, "Error loading url " + url, e); } } } // Load in default view intent else { try { Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse(url)); cordova.getActivity().startActivity(intent); } catch (android.content.ActivityNotFoundException e) { LOG.e(TAG, "Error loading url " + url, e); } } } /** * Check configuration parameters from Config. * Approved list of URLs that can be loaded into Cordova * <access origin="http://server regexp" subdomains="true" /> * Log level: ERROR, WARN, INFO, DEBUG, VERBOSE (default=ERROR) * <log level="DEBUG" /> */ private void loadConfiguration() { if ("true".equals(this.getProperty("Fullscreen", "false"))) { this.cordova.getActivity().getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN); this.cordova.getActivity().getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); } } /** * Get string property for activity. * * @param name * @param defaultValue * @return the String value for the named property */ public String getProperty(String name, String defaultValue) { Bundle bundle = this.cordova.getActivity().getIntent().getExtras(); if (bundle == null) { return defaultValue; } name = name.toLowerCase(Locale.getDefault()); Object p = bundle.get(name); if (p == null) { return defaultValue; } return p.toString(); } /* * onKeyDown */ @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if(keyDownCodes.contains(keyCode)) { if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) { // only override default behavior is event bound LOG.d(TAG, "Down Key Hit"); this.loadUrl("javascript:cordova.fireDocumentEvent('volumedownbutton');"); return true; } // If volumeup key else if (keyCode == KeyEvent.KEYCODE_VOLUME_UP) { LOG.d(TAG, "Up Key Hit"); this.loadUrl("javascript:cordova.fireDocumentEvent('volumeupbutton');"); return true; } else { return super.onKeyDown(keyCode, event); } } else if(keyCode == KeyEvent.KEYCODE_BACK) { return !(this.startOfHistory()) || this.bound; } else if(keyCode == KeyEvent.KEYCODE_MENU) { //How did we get here? Is there a childView? View childView = this.getFocusedChild(); if(childView != null) { //Make sure we close the keyboard if it's present InputMethodManager imm = (InputMethodManager) cordova.getActivity().getSystemService(Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(childView.getWindowToken(), 0); cordova.getActivity().openOptionsMenu(); return true; } else { return super.onKeyDown(keyCode, event); } } return super.onKeyDown(keyCode, event); } @Override public boolean onKeyUp(int keyCode, KeyEvent event) { // If back key if (keyCode == KeyEvent.KEYCODE_BACK) { // A custom view is currently displayed (e.g. playing a video) if(mCustomView != null) { this.hideCustomView(); } else { // The webview is currently displayed // If back key is bound, then send event to JavaScript if (this.bound) { this.loadUrl("javascript:cordova.fireDocumentEvent('backbutton');"); return true; } else { // If not bound // Go to previous page in webview if it is possible to go back if (this.backHistory()) { return true; } // If not, then invoke default behaviour else { //this.activityState = ACTIVITY_EXITING; //return false; // If they hit back button when app is initializing, app should exit instead of hang until initilazation (CB2-458) this.cordova.getActivity().finish(); } } } } // Legacy else if (keyCode == KeyEvent.KEYCODE_MENU) { if (this.lastMenuEventTime < event.getEventTime()) { this.loadUrl("javascript:cordova.fireDocumentEvent('menubutton');"); } this.lastMenuEventTime = event.getEventTime(); return super.onKeyUp(keyCode, event); } // If search key else if (keyCode == KeyEvent.KEYCODE_SEARCH) { this.loadUrl("javascript:cordova.fireDocumentEvent('searchbutton');"); return true; } else if(keyUpCodes.contains(keyCode)) { //What the hell should this do? return super.onKeyUp(keyCode, event); } //Does webkit change this behavior? return super.onKeyUp(keyCode, event); } public void bindButton(boolean override) { this.bound = override; } public void bindButton(String button, boolean override) { // TODO Auto-generated method stub if (button.compareTo("volumeup")==0) { keyDownCodes.add(KeyEvent.KEYCODE_VOLUME_UP); } else if (button.compareTo("volumedown")==0) { keyDownCodes.add(KeyEvent.KEYCODE_VOLUME_DOWN); } } public void bindButton(int keyCode, boolean keyDown, boolean override) { if(keyDown) { keyDownCodes.add(keyCode); } else { keyUpCodes.add(keyCode); } } public boolean isBackButtonBound() { return this.bound; } public void handlePause(boolean keepRunning) { LOG.d(TAG, "Handle the pause"); // Send pause event to JavaScript this.loadUrl("javascript:try{cordova.fireDocumentEvent('pause');}catch(e){console.log('exception firing pause event from native');};"); // Forward to plugins if (this.pluginManager != null) { this.pluginManager.onPause(keepRunning); } // If app doesn't want to run in background if (!keepRunning) { // Pause JavaScript timers (including setInterval) this.pauseTimers(); } paused = true; } public void handleResume(boolean keepRunning, boolean activityResultKeepRunning) { this.loadUrl("javascript:try{cordova.fireDocumentEvent('resume');}catch(e){console.log('exception firing resume event from native');};"); // Forward to plugins if (this.pluginManager != null) { this.pluginManager.onResume(keepRunning); } // Resume JavaScript timers (including setInterval) this.resumeTimers(); paused = false; } public void handleDestroy() { // Send destroy event to JavaScript this.loadUrl("javascript:try{cordova.require('cordova/channel').onDestroy.fire();}catch(e){console.log('exception firing destroy event from native');};"); // Load blank page so that JavaScript onunload is called this.loadUrl("about:blank"); // Forward to plugins if (this.pluginManager != null) { this.pluginManager.onDestroy(); } // unregister the receiver if (this.receiver != null) { try { this.cordova.getActivity().unregisterReceiver(this.receiver); } catch (Exception e) { Log.e(TAG, "Error unregistering configuration receiver: " + e.getMessage(), e); } } } public void onNewIntent(Intent intent) { //Forward to plugins if (this.pluginManager != null) { this.pluginManager.onNewIntent(intent); } } public boolean isPaused() { return paused; } public boolean hadKeyEvent() { return handleButton; } // Wrapping these functions in their own class prevents warnings in adb like: // VFY: unable to resolve virtual method 285: Landroid/webkit/WebSettings;.setAllowUniversalAccessFromFileURLs @TargetApi(16) private static class Level16Apis { static void enableUniversalAccess(WebSettings settings) { settings.setAllowUniversalAccessFromFileURLs(true); } } public void printBackForwardList() { WebBackForwardList currentList = this.copyBackForwardList(); int currentSize = currentList.getSize(); for(int i = 0; i < currentSize; ++i) { WebHistoryItem item = currentList.getItemAtIndex(i); String url = item.getUrl(); LOG.d(TAG, "The URL at index: " + Integer.toString(i) + " is " + url ); } } //Can Go Back is BROKEN! public boolean startOfHistory() { WebBackForwardList currentList = this.copyBackForwardList(); WebHistoryItem item = currentList.getItemAtIndex(0); if( item!=null){ // Null-fence in case they haven't called loadUrl yet (CB-2458) String url = item.getUrl(); String currentUrl = this.getUrl(); LOG.d(TAG, "The current URL is: " + currentUrl); LOG.d(TAG, "The URL at item 0 is: " + url); return currentUrl.equals(url); } return false; } public void showCustomView(View view, WebChromeClient.CustomViewCallback callback) { // This code is adapted from the original Android Browser code, licensed under the Apache License, Version 2.0 Log.d(TAG, "showing Custom View"); // if a view already exists then immediately terminate the new one if (mCustomView != null) { callback.onCustomViewHidden(); return; } // Store the view and its callback for later (to kill it properly) mCustomView = view; mCustomViewCallback = callback; // Add the custom view to its container. ViewGroup parent = (ViewGroup) this.getParent(); parent.addView(view, COVER_SCREEN_GRAVITY_CENTER); // Hide the content view. this.setVisibility(View.GONE); // Finally show the custom view container. parent.setVisibility(View.VISIBLE); parent.bringToFront(); } public void hideCustomView() { // This code is adapted from the original Android Browser code, licensed under the Apache License, Version 2.0 Log.d(TAG, "Hidding Custom View"); if (mCustomView == null) return; // Hide the custom view. mCustomView.setVisibility(View.GONE); // Remove the custom view from its container. ViewGroup parent = (ViewGroup) this.getParent(); parent.removeView(mCustomView); mCustomView = null; mCustomViewCallback.onCustomViewHidden(); // Show the content view. this.setVisibility(View.VISIBLE); } /** * if the video overlay is showing then we need to know * as it effects back button handling * * @return true if custom view is showing */ public boolean isCustomViewShowing() { return mCustomView != null; } public WebBackForwardList restoreState(Bundle savedInstanceState) { WebBackForwardList myList = super.restoreState(savedInstanceState); Log.d(TAG, "WebView restoration crew now restoring!"); //Initialize the plugin manager once more this.pluginManager.init(); return myList; } public void storeResult(int requestCode, int resultCode, Intent intent) { mResult = new ActivityResult(requestCode, resultCode, intent); } public CordovaResourceApi getResourceApi() { return resourceApi; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators; import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pig.LoadFunc; import org.apache.pig.PigException; import org.apache.pig.backend.executionengine.ExecException; import org.apache.pig.data.Tuple; import org.apache.pig.impl.PigContext; import org.apache.pig.impl.io.FileSpec; import org.apache.pig.impl.io.ReadToEndLoader; import org.apache.pig.impl.plan.OperatorKey; import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhyPlanVisitor; import org.apache.pig.impl.plan.VisitorException; import org.apache.pig.pen.util.ExampleTuple; /** * The load operator which is used in two ways: * 1) As a local operator it can be used to load files * 2) In the Map Reduce setting, it is used to create jobs * from MapReduce operators which keep the loads and * stores in the Map and Reduce Plans till the job is created * */ public class POLoad extends PhysicalOperator { /** * */ private static final long serialVersionUID = 1L; // The user defined load function or a default load function private transient LoadFunc loader = null; // The filespec on which the operator is based FileSpec lFile; // PigContext passed to us by the operator creator PigContext pc; //Indicates whether the loader setup is done or not boolean setUpDone = false; // Alias for the POLoad private String signature; transient private final Log log = LogFactory.getLog(getClass()); public POLoad(OperatorKey k) { this(k,-1, null); } public POLoad(OperatorKey k, FileSpec lFile){ this(k,-1,lFile); } public POLoad(OperatorKey k, int rp, FileSpec lFile) { super(k, rp); this.lFile = lFile; } public POLoad(OperatorKey k, LoadFunc lf){ this(k); this.loader = lf; } /** * Set up the loader by * 1) Instantiating the load func * 2) Opening an input stream to the specified file and * 3) Binding to the input stream at the specified offset. * @throws IOException */ public void setUp() throws IOException{ loader = new ReadToEndLoader((LoadFunc) PigContext.instantiateFuncFromSpec(lFile.getFuncSpec()), ConfigurationUtil.toConfiguration(pc.getProperties()), lFile.getFileName(),0); } /** * At the end of processing, the inputstream is closed * using this method * @throws IOException */ public void tearDown() throws IOException{ setUpDone = false; } /** * The main method used by this operator's successor * to read tuples from the specified file using the * specified load function. * * @return Whatever the loader returns * A null from the loader is indicative * of EOP and hence the tearDown of connection */ @Override public Result getNext(Tuple t) throws ExecException { if(!setUpDone && lFile!=null){ try { setUp(); } catch (IOException ioe) { int errCode = 2081; String msg = "Unable to setup the load function."; throw new ExecException(msg, errCode, PigException.BUG, ioe); } setUpDone = true; } Result res = new Result(); try { res.result = loader.getNext(); if(res.result==null){ res.returnStatus = POStatus.STATUS_EOP; tearDown(); } else res.returnStatus = POStatus.STATUS_OK; if(lineageTracer != null) { ExampleTuple tOut = new ExampleTuple((Tuple) res.result); res.result = tOut; } } catch (IOException e) { log.error("Received error from loader function: " + e); return res; } return res; } @Override public String name() { return (lFile != null) ? getAliasString() + "Load" + "(" + lFile.toString() + ")" + " - " + mKey.toString() : getAliasString() + "Load" + "(" + "DummyFil:DummyLdr" + ")" + " - " + mKey.toString(); } @Override public boolean supportsMultipleInputs() { return false; } @Override public boolean supportsMultipleOutputs() { return false; } @Override public void visit(PhyPlanVisitor v) throws VisitorException { v.visitLoad(this); } public FileSpec getLFile() { return lFile; } public void setLFile(FileSpec file) { lFile = file; } public PigContext getPc() { return pc; } public void setPc(PigContext pc) { this.pc = pc; } public String getSignature() { return signature; } public void setSignature(String signature) { this.signature = signature; } public LoadFunc getLoadFunc(){ return this.loader; } }
/* Copyright 2010 Semantic Discovery, Inc. (www.semanticdiscovery.com) This file is part of the Semantic Discovery Toolkit. The Semantic Discovery Toolkit is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The Semantic Discovery Toolkit is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with The Semantic Discovery Toolkit. If not, see <http://www.gnu.org/licenses/>. */ package org.sd.atn; import java.io.Writer; import java.io.File; import java.io.IOException; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import org.sd.io.FileUtil; import org.sd.util.InputContext; import org.sd.util.MathUtil; import org.sd.atn.extract.Extraction; import org.sd.util.MathUtil; import org.sd.util.tree.Tree; import org.sd.util.tree.Tree2Dot; import org.sd.xml.DomContext; /** * Utility class for building html parse output. * <p> * @author Spence Koehler */ public class HtmlParseOutput { private File tempDir; private Set<String> extractionTypes; private boolean filterInterps; private StringBuilder output; public HtmlParseOutput() { this(null, null, false); } public HtmlParseOutput(File tempDir, String extractionTypes, boolean filterInterps) { this.tempDir = tempDir; this.extractionTypes = null; if (extractionTypes != null && !"".equals(extractionTypes.trim())) { this.extractionTypes = new HashSet<String>(); final String[] types = extractionTypes.split(","); for (String type : types) this.extractionTypes.add(type); } this.filterInterps = filterInterps; this.output = new StringBuilder(); } public void addExtractionGroups(ExtractionGroups extractionGroups, boolean briefResults) { if (extractionGroups == null) return; if (briefResults) { output.append("<table border=\"1\" cellpadding=\"1\" cellspacing=\"1\" style=\"font-size: 80%;\">\n"); output.append(" <tr><th>group</th><th>text</th><th>interp</th><th>label</th><th>conf</th><th>path</th></tr>\n"); extractionGroups.visitExtractions( null, true, new ExtractionGroups.ExtractionVisitor() { ExtractionContainer.ExtractionData priorExtraction = null; public boolean visitExtractionGroup(String source, int groupNum, String groupKey, ExtractionGroup group) { return true; } public void visitInterpretation(String source, int groupNum, String groupKey, ExtractionContainer.ExtractionData theExtraction, int interpNum, ParseInterpretation theInterpretation, String extractionKey) { if (addBriefExtraction(groupNum, groupKey, theExtraction, interpNum, theInterpretation, extractionKey, priorExtraction)) { priorExtraction = theExtraction; } } }); output.append("</table>\n"); } else { for (ExtractionGroup extractionGroup : extractionGroups.getExtractionGroups()) { openExtractionGroup(extractionGroup); for (ExtractionContainer extractionContainer : extractionGroup.getExtractions()) { addExtractionContainer(extractionContainer); } closeExtractionGroup(); } } } public String getOutput() { final StringBuilder result = new StringBuilder(); result. append("<div>\n"). append(output). append("</div>\n"); return result.toString(); } private final void openExtractionGroup(ExtractionGroup extractionGroup) { output. append("<div>\n"). append(" <div>").append(extractionGroup.getKey()).append(" </div>\n"). append(" <table border=\"1\">\n"); } private final void closeExtractionGroup() { output. append(" </table>\n"). append("</div>\n"); } private final boolean addBriefExtraction(int groupNum, String groupKey, ExtractionContainer.ExtractionData theExtraction, int interpNum, ParseInterpretation theInterpretation, String extractionKey, ExtractionContainer.ExtractionData priorExtraction) { if (filterInterps && interpNum > 0) return false; if (extractionTypes != null) { final String extractionType = theExtraction.getExtraction().getType(); if (!extractionTypes.contains(extractionType)) return false; } // ignore consecutive duplicates if (priorExtraction != null) { final Tree<String> priorParseTree = priorExtraction.getParseTree(); final Tree<String> curParseTree = theExtraction.getParseTree(); if (curParseTree.equals(priorParseTree)) return false; } final String parsedText = theExtraction.getParsedText(); output. append("<tr>\n"). append("<td>").append(groupKey).append("</td>\n"). append("<td>").append(getParsedTextHtml(theExtraction)).append("</td>\n"). append("<td>").append(interpNum).append("</td>\n"). append("<td>").append(theInterpretation.getClassification()).append("</td>\n"). append("<td>").append(MathUtil.doubleString(theInterpretation.getConfidence(), 6)).append("</td>\n"). append("<td>").append(extractionKey).append("</td>\n"). append("</tr>\n"); return true; } private String getParsedTextHtml(ExtractionContainer.ExtractionData theExtraction) { if (theExtraction == null) return "???"; final String parsedText = theExtraction.getParsedText(); final StringBuilder result = new StringBuilder(); if (tempDir != null) { final Tree2Dot<String> tree2dot = new Tree2Dot<String>(theExtraction.getParseTree()); tree2dot.setNodeAttribute("fontsize", "8"); File dotFile = null; File dotPng = null; try { dotFile = File.createTempFile("parse.", ".dot", tempDir); final Writer writer = FileUtil.getWriter(dotFile); tree2dot.writeDot(writer); writer.close(); } catch (IOException e) { System.err.println(new Date() + ": WARNING: Unable to convert parseTree to dotFile '" + dotFile + "'!"); e.printStackTrace(System.err); dotFile = null; } if (dotFile != null) { // <a href='webex/genParseGraph.jsp?dotFile=tmp/....dot' target='parseTree'>parsedText</a> result. append("<a href='genParseGraph.jsp?dotFile="). append(dotFile.getName()). append("' target='parseTree'>"). append(parsedText). append("</a>"); dotFile.deleteOnExit(); } } if (result.length() == 0) { result.append(parsedText); } return result.toString(); } private final void addExtractionContainer(ExtractionContainer extractionContainer) { final int curOutputLength = output.length(); openParseResult(extractionContainer.getGlobalStartPosition()); boolean addedOne = false; final ParseInterpretation theInterpretation = extractionContainer.getTheInterpretation(); if (theInterpretation != null) { addedOne = addExtractionData(extractionContainer.getTheExtraction(), extractionContainer.getTheInterpretation()); } else { for (ExtractionContainer.ExtractionData extractionData : extractionContainer.getExtractions()) { addedOne |= addExtractionData(extractionData, null); } } if (!addedOne) { output.setLength(curOutputLength); } else { closeParseResult(); } } private final void openParseResult(int globalStartPos) { output.append("<tr>\n"); } private final void closeParseResult() { output.append("</tr>\n"); } private final boolean addExtractionData(ExtractionContainer.ExtractionData extractionData, ParseInterpretation theInterpretation) { if (extractionData == null) return false; openParse(true, extractionData.getParseNum()); addParseContext(extractionData.getParsedText(), extractionData.getStartPos(), extractionData.getEndPos(), extractionData.getLength(), extractionData.getContext().getInputContext()); addParseExtraction(extractionData.getExtraction()); output.append("<td><table border=\"1\">\n"); // open parseInterpretations if (theInterpretation != null) { output.append('\n'); addParseInterpretation(theInterpretation, 1); } else if (extractionData.getInterpretations() != null && extractionData.getInterpretations().size() > 0) { output.append('\n'); int interpNum = 1; for (ParseInterpretation interpretation : extractionData.getInterpretations()) { addParseInterpretation(interpretation, interpNum++); } } output.append("</table></td>\n"); // close parseInterpretations closeParse(); return true; } private final void addParseContext(String parsedText, int startIndex, int endIndex, int length, InputContext inputContext) { output.append("<td>").append(parsedText).append("</td>\n"); output.append("<td>").append(startIndex).append("</td>\n"); output.append("<td>").append(endIndex).append("</td>\n"); output.append("<td>").append(length).append("</td>\n"); String key = ""; if (inputContext != null && inputContext instanceof DomContext) { final DomContext domContext = (DomContext)inputContext; key = domContext.getIndexedPathString(); } output.append("<td>").append(key).append("</td>\n"); } private final void addParseExtraction(Extraction extraction) { if (extraction == null) return; output.append("<td><table>\n"); doAddParseExtraction(extraction); output.append("</table></td>\n"); } private final void doAddParseExtraction(Extraction extraction) { if (extraction == null) return; output. append("<tr><td align=\"top\">").append(extraction.getType()).append(':').append("</td>\n"). append("<td>\n"); if (extraction.hasFields()) { output.append("<table>\n"); for (List<Extraction> fieldExtractions : extraction.getFields().values()) { if (fieldExtractions != null) { for (Extraction fieldExtraction : fieldExtractions) { doAddParseExtraction(fieldExtraction); } } } output.append("</table>\n"); } else { output.append(extraction.getText()); } output.append("</td>\n"); } private final void addParseInterpretation(ParseInterpretation interpretation, int interpNum) { output.append("<tr><td>").append(interpretation.getClassification()).append("</td>\n"); final double confidence = interpretation.getConfidence(); final String cString = MathUtil.doubleString(confidence, 6); output.append("<td>").append(cString).append("</td>\n"); output.append("<td>").append(interpretation.getToStringOverride()).append("</td>\n"); output.append("</tr>\n"); } private final void openParse(boolean isSelected, int parseNum) { output.append("<td>"); if (isSelected) output.append("*"); output.append("</td><td>").append(parseNum).append("</td>\n"); } private final void closeParse() { } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http.multipart; import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelException; import io.netty.handler.codec.http.HttpHeaderNames; import io.netty.handler.codec.http.HttpHeaderValues; import io.netty.util.internal.ObjectUtil; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; /** * Disk FileUpload implementation that stores file into real files */ public class DiskFileUpload extends AbstractDiskHttpData implements FileUpload { public static String baseDirectory; public static boolean deleteOnExitTemporaryFile = true; public static final String prefix = "FUp_"; public static final String postfix = ".tmp"; private final String baseDir; private final boolean deleteOnExit; private String filename; private String contentType; private String contentTransferEncoding; public DiskFileUpload(String name, String filename, String contentType, String contentTransferEncoding, Charset charset, long size, String baseDir, boolean deleteOnExit) { super(name, charset, size); setFilename(filename); setContentType(contentType); setContentTransferEncoding(contentTransferEncoding); this.baseDir = baseDir == null ? baseDirectory : baseDir; this.deleteOnExit = deleteOnExit; } public DiskFileUpload(String name, String filename, String contentType, String contentTransferEncoding, Charset charset, long size) { this(name, filename, contentType, contentTransferEncoding, charset, size, baseDirectory, deleteOnExitTemporaryFile); } @Override public HttpDataType getHttpDataType() { return HttpDataType.FileUpload; } @Override public String getFilename() { return filename; } @Override public void setFilename(String filename) { this.filename = ObjectUtil.checkNotNull(filename, "filename"); } @Override public int hashCode() { return FileUploadUtil.hashCode(this); } @Override public boolean equals(Object o) { return o instanceof FileUpload && FileUploadUtil.equals(this, (FileUpload) o); } @Override public int compareTo(InterfaceHttpData o) { if (!(o instanceof FileUpload)) { throw new ClassCastException("Cannot compare " + getHttpDataType() + " with " + o.getHttpDataType()); } return compareTo((FileUpload) o); } public int compareTo(FileUpload o) { return FileUploadUtil.compareTo(this, o); } @Override public void setContentType(String contentType) { this.contentType = ObjectUtil.checkNotNull(contentType, "contentType"); } @Override public String getContentType() { return contentType; } @Override public String getContentTransferEncoding() { return contentTransferEncoding; } @Override public void setContentTransferEncoding(String contentTransferEncoding) { this.contentTransferEncoding = contentTransferEncoding; } @Override public String toString() { File file = null; try { file = getFile(); } catch (IOException e) { // Should not occur. } return HttpHeaderNames.CONTENT_DISPOSITION + ": " + HttpHeaderValues.FORM_DATA + "; " + HttpHeaderValues.NAME + "=\"" + getName() + "\"; " + HttpHeaderValues.FILENAME + "=\"" + filename + "\"\r\n" + HttpHeaderNames.CONTENT_TYPE + ": " + contentType + (getCharset() != null? "; " + HttpHeaderValues.CHARSET + '=' + getCharset().name() + "\r\n" : "\r\n") + HttpHeaderNames.CONTENT_LENGTH + ": " + length() + "\r\n" + "Completed: " + isCompleted() + "\r\nIsInMemory: " + isInMemory() + "\r\nRealFile: " + (file != null ? file.getAbsolutePath() : "null") + " DefaultDeleteAfter: " + deleteOnExitTemporaryFile; } @Override protected boolean deleteOnExit() { return deleteOnExit; } @Override protected String getBaseDirectory() { return baseDir; } @Override protected String getDiskFilename() { return "upload"; } @Override protected String getPostfix() { return postfix; } @Override protected String getPrefix() { return prefix; } @Override public FileUpload copy() { final ByteBuf content = content(); return replace(content != null ? content.copy() : null); } @Override public FileUpload duplicate() { final ByteBuf content = content(); return replace(content != null ? content.duplicate() : null); } @Override public FileUpload retainedDuplicate() { ByteBuf content = content(); if (content != null) { content = content.retainedDuplicate(); boolean success = false; try { FileUpload duplicate = replace(content); success = true; return duplicate; } finally { if (!success) { content.release(); } } } else { return replace(null); } } @Override public FileUpload replace(ByteBuf content) { DiskFileUpload upload = new DiskFileUpload( getName(), getFilename(), getContentType(), getContentTransferEncoding(), getCharset(), size, baseDir, deleteOnExit); if (content != null) { try { upload.setContent(content); } catch (IOException e) { throw new ChannelException(e); } } return upload; } @Override public FileUpload retain(int increment) { super.retain(increment); return this; } @Override public FileUpload retain() { super.retain(); return this; } @Override public FileUpload touch() { super.touch(); return this; } @Override public FileUpload touch(Object hint) { super.touch(hint); return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysds.test.functions.privacy; import java.util.Arrays; import org.apache.sysds.api.DMLScript; import org.apache.sysds.common.Types; import org.apache.sysds.runtime.DMLRuntimeException; import org.apache.sysds.runtime.meta.MatrixCharacteristics; import org.apache.sysds.runtime.privacy.PrivacyConstraint; import org.apache.sysds.runtime.privacy.PrivacyConstraint.PrivacyLevel; import org.apache.sysds.test.AutomatedTestBase; import org.apache.sysds.test.TestConfiguration; import org.apache.sysds.test.TestUtils; import org.junit.Ignore; import org.junit.Test; import static java.lang.Thread.sleep; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @net.jcip.annotations.NotThreadSafe public class FederatedWorkerHandlerTest extends AutomatedTestBase { private static final String TEST_DIR = "functions/privacy/"; private static final String TEST_DIR_fed = "functions/federated/"; private static final String TEST_DIR_SCALAR = TEST_DIR_fed + "matrix_scalar/"; private final static String TEST_CLASS_DIR = TEST_DIR + FederatedWorkerHandlerTest.class.getSimpleName() + "/"; private final static String TEST_CLASS_DIR_SCALAR = TEST_DIR + FederatedWorkerHandlerTest.class.getSimpleName() + "/"; private static final String TEST_PROG_SCALAR_ADDITION_MATRIX = "FederatedScalarAdditionMatrix"; private final static String AGGREGATION_TEST_NAME = "FederatedSumTest"; private final static String TRANSFER_TEST_NAME = "FederatedRCBindTest"; private final static String MATVECMULT_TEST_NAME = "FederatedMultiplyTest"; private static final String FEDERATED_WORKER_HOST = "localhost"; private static final int FEDERATED_WORKER_PORT = 1222; private final static int blocksize = 1024; private final int rows = 10; private final int cols = 10; @Override public void setUp() { TestUtils.clearAssertionInformation(); addTestConfiguration("scalar", new TestConfiguration(TEST_CLASS_DIR_SCALAR, TEST_PROG_SCALAR_ADDITION_MATRIX, new String [] {"R"})); addTestConfiguration("aggregation", new TestConfiguration(TEST_CLASS_DIR, AGGREGATION_TEST_NAME, new String[] {"S.scalar", "R", "C"})); addTestConfiguration("transfer", new TestConfiguration(TEST_CLASS_DIR, TRANSFER_TEST_NAME, new String[] {"R", "C"})); addTestConfiguration("matvecmult", new TestConfiguration(TEST_CLASS_DIR, MATVECMULT_TEST_NAME, new String[] {"Z"})); } @Test public void scalarPrivateTest(){ scalarTest(PrivacyLevel.Private, DMLRuntimeException.class); } @Test public void scalarPrivateAggregationTest(){ scalarTest(PrivacyLevel.PrivateAggregation, DMLRuntimeException.class); } @Test public void scalarNonePrivateTest(){ scalarTest(PrivacyLevel.None, null); } private void scalarTest(PrivacyLevel privacyLevel, Class<?> expectedException){ getAndLoadTestConfiguration("scalar"); double[][] m = getRandomMatrix(this.rows, this.cols, -1, 1, 1.0, 1); PrivacyConstraint pc = new PrivacyConstraint(privacyLevel); writeInputMatrixWithMTD("M", m, false, new MatrixCharacteristics(rows, cols, blocksize, rows * cols), pc); int s = TestUtils.getRandomInt(); double[][] r = new double[rows][cols]; for(int i = 0; i < rows; i++) { for(int j = 0; j < cols; j++) { r[i][j] = m[i][j] + s; } } if (expectedException == null) writeExpectedMatrix("R", r); runGenericScalarTest(TEST_PROG_SCALAR_ADDITION_MATRIX, s, expectedException, privacyLevel); } private void runGenericScalarTest(String dmlFile, int s, Class<?> expectedException, PrivacyLevel privacyLevel) { boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG; Types.ExecMode platformOld = rtplatform; Thread t = null; try { // we need the reference file to not be written to hdfs, so we get the correct format rtplatform = Types.ExecMode.SINGLE_NODE; programArgs = new String[] {"-w", Integer.toString(FEDERATED_WORKER_PORT)}; t = new Thread(() -> runTest(true, false, null, -1)); t.start(); sleep(FED_WORKER_WAIT); fullDMLScriptName = SCRIPT_DIR + TEST_DIR_SCALAR + dmlFile + ".dml"; programArgs = new String[]{"-checkPrivacy", "-nvargs", "in=" + TestUtils.federatedAddress(FEDERATED_WORKER_HOST, FEDERATED_WORKER_PORT, input("M")), "rows=" + Integer.toString(rows), "cols=" + Integer.toString(cols), "scalar=" + Integer.toString(s), "out=" + output("R")}; boolean exceptionExpected = (expectedException != null); runTest(true, exceptionExpected, expectedException, -1); if ( !exceptionExpected ) compareResults(); } catch (InterruptedException e) { fail("InterruptedException thrown" + e.getMessage() + " " + Arrays.toString(e.getStackTrace())); } finally { assertTrue("The privacy level " + privacyLevel.toString() + " should have been checked during execution", checkedPrivacyConstraintsContains(privacyLevel)); rtplatform = platformOld; TestUtils.shutdownThread(t); rtplatform = platformOld; DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld; } } @Test public void aggregatePrivateTest() { federatedSum(Types.ExecMode.SINGLE_NODE, PrivacyLevel.Private, DMLRuntimeException.class); } @Test public void aggregatePrivateAggregationTest() { federatedSum(Types.ExecMode.SINGLE_NODE, PrivacyLevel.PrivateAggregation, null); } @Test public void aggregateNonePrivateTest() { federatedSum(Types.ExecMode.SINGLE_NODE, PrivacyLevel.None, null); } public void federatedSum(Types.ExecMode execMode, PrivacyLevel privacyLevel, Class<?> expectedException) { boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG; Types.ExecMode platformOld = rtplatform; getAndLoadTestConfiguration("aggregation"); String HOME = SCRIPT_DIR + TEST_DIR_fed; double[][] A = getRandomMatrix(rows, cols, -10, 10, 1, 1); writeInputMatrixWithMTD("A", A, false, new MatrixCharacteristics(rows, cols, blocksize, rows * cols), new PrivacyConstraint(privacyLevel)); int port = getRandomAvailablePort(); Thread t = startLocalFedWorkerThread(port); // we need the reference file to not be written to hdfs, so we get the correct format rtplatform = Types.ExecMode.SINGLE_NODE; // Run reference dml script with normal matrix for Row/Col sum fullDMLScriptName = HOME + AGGREGATION_TEST_NAME + "Reference.dml"; programArgs = new String[] {"-args", input("A"), input("A"), expected("R"), expected("C")}; runTest(true, false, null, -1); // write expected sum double sum = 0; for(double[] doubles : A) { sum += Arrays.stream(doubles).sum(); } sum *= 2; if ( expectedException == null ) writeExpectedScalar("S", sum); // reference file should not be written to hdfs, so we set platform here rtplatform = execMode; if(rtplatform == Types.ExecMode.SPARK) { DMLScript.USE_LOCAL_SPARK_CONFIG = true; } TestConfiguration config = availableTestConfigurations.get("aggregation"); loadTestConfiguration(config); fullDMLScriptName = HOME + AGGREGATION_TEST_NAME + ".dml"; programArgs = new String[] {"-checkPrivacy", "-nvargs", "in=" + TestUtils.federatedAddress(port, input("A")), "rows=" + rows, "cols=" + cols, "out_S=" + output("S"), "out_R=" + output("R"), "out_C=" + output("C")}; runTest(true, (expectedException != null), expectedException, -1); // compare all sums via files if ( expectedException == null ) compareResults(1e-11); assertTrue("The privacy level " + privacyLevel.toString() + " should have been checked during execution", checkedPrivacyConstraintsContains(privacyLevel)); TestUtils.shutdownThread(t); rtplatform = platformOld; DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld; } @Test @Ignore public void transferPrivateTest() { federatedRCBind(Types.ExecMode.SINGLE_NODE, PrivacyLevel.Private, DMLRuntimeException.class); } @Test @Ignore public void transferPrivateAggregationTest() { federatedRCBind(Types.ExecMode.SINGLE_NODE, PrivacyLevel.PrivateAggregation, DMLRuntimeException.class); } @Test @Ignore public void transferNonePrivateTest() { federatedRCBind(Types.ExecMode.SINGLE_NODE, PrivacyLevel.None, null); } public void federatedRCBind(Types.ExecMode execMode, PrivacyLevel privacyLevel, Class<?> expectedException) { boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG; Types.ExecMode platformOld = rtplatform; getAndLoadTestConfiguration("transfer"); String HOME = SCRIPT_DIR + TEST_DIR_fed; double[][] A = getRandomMatrix(rows, cols, -10, 10, 1, 1); writeInputMatrixWithMTD("A", A, false, new MatrixCharacteristics(rows, cols, blocksize, rows * cols), new PrivacyConstraint(privacyLevel)); int port = getRandomAvailablePort(); Thread t = startLocalFedWorkerThread(port); // we need the reference file to not be written to hdfs, so we get the correct format rtplatform = Types.ExecMode.SINGLE_NODE; // Run reference dml script with normal matrix for Row/Col sum fullDMLScriptName = HOME + TRANSFER_TEST_NAME + "Reference.dml"; programArgs = new String[] {"-checkPrivacy", "-args", input("A"), expected("R"), expected("C")}; runTest(true, false, null, -1); // reference file should not be written to hdfs, so we set platform here rtplatform = execMode; if(rtplatform == Types.ExecMode.SPARK) { DMLScript.USE_LOCAL_SPARK_CONFIG = true; } TestConfiguration config = availableTestConfigurations.get("transfer"); loadTestConfiguration(config); fullDMLScriptName = HOME + TRANSFER_TEST_NAME + ".dml"; programArgs = new String[] {"-checkPrivacy", "-nvargs", "in=" + TestUtils.federatedAddress(port, input("A")), "rows=" + rows, "cols=" + cols, "out_R=" + output("R"), "out_C=" + output("C")}; runTest(true, (expectedException != null), expectedException, -1); // compare all sums via files if ( expectedException == null ) compareResults(1e-11); assertTrue("Privacy constraint with level " + privacyLevel + " should have been checked during execution", checkedPrivacyConstraintsContains(privacyLevel)); TestUtils.shutdownThread(t); rtplatform = platformOld; DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld; } @Test public void matVecMultPrivateTest() { federatedMultiply(Types.ExecMode.SINGLE_NODE, PrivacyLevel.Private, DMLRuntimeException.class); } @Test public void matVecMultPrivateAggregationTest() { federatedMultiply(Types.ExecMode.SINGLE_NODE, PrivacyLevel.PrivateAggregation, null); } @Test public void matVecMultNonePrivateTest() { federatedMultiply(Types.ExecMode.SINGLE_NODE, PrivacyLevel.None, null); } public void federatedMultiply(Types.ExecMode execMode, PrivacyLevel privacyLevel, Class<?> expectedException) { boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG; Types.ExecMode platformOld = rtplatform; rtplatform = execMode; if(rtplatform == Types.ExecMode.SPARK) { DMLScript.USE_LOCAL_SPARK_CONFIG = true; } Thread t1, t2; getAndLoadTestConfiguration("matvecmult"); String HOME = SCRIPT_DIR + TEST_DIR_fed; // write input matrices int halfRows = rows / 2; // We have two matrices handled by a single federated worker double[][] X1 = getRandomMatrix(halfRows, cols, 0, 1, 1, 42); double[][] X2 = getRandomMatrix(halfRows, cols, 0, 1, 1, 1340); // And another two matrices handled by a single federated worker double[][] Y1 = getRandomMatrix(cols, halfRows, 0, 1, 1, 44); double[][] Y2 = getRandomMatrix(cols, halfRows, 0, 1, 1, 21); writeInputMatrixWithMTD("X1", X1, false, new MatrixCharacteristics(halfRows, cols, blocksize, halfRows * cols), new PrivacyConstraint(privacyLevel)); writeInputMatrixWithMTD("X2", X2, false, new MatrixCharacteristics(halfRows, cols, blocksize, halfRows * cols)); writeInputMatrixWithMTD("Y1", Y1, false, new MatrixCharacteristics(cols, halfRows, blocksize, halfRows * cols)); writeInputMatrixWithMTD("Y2", Y2, false, new MatrixCharacteristics(cols, halfRows, blocksize, halfRows * cols)); int port1 = getRandomAvailablePort(); int port2 = getRandomAvailablePort(); t1 = startLocalFedWorkerThread(port1); t2 = startLocalFedWorkerThread(port2); TestConfiguration config = availableTestConfigurations.get("matvecmult"); loadTestConfiguration(config); // Run reference dml script with normal matrix fullDMLScriptName = HOME + MATVECMULT_TEST_NAME + "Reference.dml"; programArgs = new String[] {"-nvargs", "X1=" + input("X1"), "X2=" + input("X2"), "Y1=" + input("Y1"), "Y2=" + input("Y2"), "Z=" + expected("Z")}; runTest(true, false, null, -1); // Run actual dml script with federated matrix fullDMLScriptName = HOME + MATVECMULT_TEST_NAME + ".dml"; programArgs = new String[] {"-checkPrivacy", "-nvargs", "X1=" + TestUtils.federatedAddress(port1, input("X1")), "X2=" + TestUtils.federatedAddress(port2, input("X2")), "Y1=" + TestUtils.federatedAddress(port1, input("Y1")), "Y2=" + TestUtils.federatedAddress(port2, input("Y2")), "r=" + rows, "c=" + cols, "hr=" + halfRows, "Z=" + output("Z")}; runTest(true, (expectedException != null), expectedException, -1); // compare via files if (expectedException == null) compareResults(1e-9); assertTrue("Privacy constraint with level " + privacyLevel + " should have been checked during execution", checkedPrivacyConstraintsContains(privacyLevel)); TestUtils.shutdownThreads(t1, t2); rtplatform = platformOld; DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld; } }
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.collection.primitive.hopscotch; import java.util.HashSet; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import org.junit.Test; import org.neo4j.collection.primitive.Primitive; import org.neo4j.collection.primitive.PrimitiveIntSet; import org.neo4j.collection.primitive.PrimitiveIntVisitor; import org.neo4j.collection.primitive.PrimitiveLongSet; import org.neo4j.collection.primitive.PrimitiveLongVisitor; import org.neo4j.collection.primitive.hopscotch.HopScotchHashingAlgorithm.Monitor; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.neo4j.collection.primitive.Primitive.VALUE_MARKER; import static org.neo4j.collection.primitive.hopscotch.HopScotchHashingAlgorithm.NO_MONITOR; public class PrimitiveLongSetTest { private PrimitiveLongHashSet newSet( int h ) { return newSet( h, NO_MONITOR ); } private PrimitiveLongHashSet newSet( int h, Monitor monitor ) { return new PrimitiveLongHashSet( new LongKeyTable<>( h, VALUE_MARKER ), VALUE_MARKER, monitor ); } @Test public void shouldContainAddedValues_generated_1() throws Exception { // GIVEN PrimitiveLongSet set = newSet( 15 ); Set<Long> expectedValues = new HashSet<>(); long[] valuesToAdd = new long[] { 1207043189, 380713862, 1902858197, 1996873101, 1357024628, 1044248801, 1558157493, 2040311008, 2017660098, 1332670047, 663662790, 2063747422, 1554358949, 1761477445, 1141526838, 1698679618, 1279767067, 508574, 2071755904 }; for ( long key : valuesToAdd ) { set.add( key ); expectedValues.add( key ); } // WHEN/THEN boolean existedBefore = set.contains( 679990875 ); boolean added = set.add( 679990875 ); boolean existsAfter = set.contains( 679990875 ); assertFalse( "679990875 should not exist before adding here", existedBefore ); assertTrue( "679990875 should be reported as added here", added ); assertTrue( "679990875 should exist", existsAfter ); expectedValues.add( 679990875L ); final Set<Long> visitedKeys = new HashSet<>(); set.visitKeys( new PrimitiveLongVisitor() { @Override public boolean visited( long value ) { assertTrue( visitedKeys.add( value ) ); return false; } } ); assertEquals( expectedValues, visitedKeys ); } @Test public void shouldContainAddedValues_generated_6() throws Exception { // GIVEN PrimitiveLongSet set = newSet( 11 ); set.add( 492321488 ); set.add( 877087251 ); set.add( 1809668113 ); set.add( 1766034186 ); set.add( 1879253467 ); set.add( 669295491 ); set.add( 176011165 ); set.add( 1638959981 ); set.add( 1093132636 ); set.add( 6133241 ); set.add( 486112773 ); set.add( 205218385 ); set.add( 1756491867 ); set.add( 90390732 ); set.add( 937266036 ); set.add( 1269020584 ); set.add( 521469166 ); set.add( 1314928747 ); // WHEN/THEN boolean existedBefore = set.contains( 2095121629 ); boolean added = set.add( 2095121629 ); boolean existsAfter = set.contains( 2095121629 ); assertFalse( "2095121629 should not exist before adding here", existedBefore ); assertTrue( "2095121629 should be reported as added here", added ); assertTrue( "2095121629 should exist", existsAfter ); } @Test public void shouldContainAddedValues_generated_4() throws Exception { // GIVEN PrimitiveLongSet set = newSet( 9 ); set.add( 1934106304 ); set.add( 783754072 ); set.remove( 1934106304 ); // WHEN/THEN boolean existedBefore = set.contains( 783754072 ); boolean added = set.add( 783754072 ); boolean existsAfter = set.contains( 783754072 ); assertTrue( "783754072 should exist before adding here", existedBefore ); assertFalse( "783754072 should not be reported as added here", added ); assertTrue( "783754072 should exist", existsAfter ); } @Test public void shouldOnlyContainAddedValues_generated_8() throws Exception { // GIVEN PrimitiveLongSet set = newSet( 7 ); set.add( 375712513 ); set.remove( 1507941820 ); set.add( 671750317 ); set.remove( 1054641019 ); set.add( 671750317 ); set.add( 1768202223 ); set.add( 1768202223 ); set.add( 1927780165 ); set.add( 2139399764 ); set.remove( 1243370828 ); set.add( 1768202223 ); set.add( 1335041891 ); set.remove( 1578984313 ); set.add( 1227954408 ); set.remove( 946917826 ); set.add( 1768202223 ); set.add( 375712513 ); set.add( 1668515054 ); set.add( 401047579 ); set.add( 33226244 ); set.add( 126791689 ); set.add( 401047579 ); set.add( 1963437853 ); set.add( 1739617766 ); set.add( 671750317 ); set.add( 401047579 ); set.add( 789094467 ); set.add( 1291421506 ); set.add( 1694968582 ); set.add( 1508353936 ); // WHEN/THEN boolean existedBefore = set.contains( 1739617766 ); boolean added = set.add( 1739617766 ); boolean existsAfter = set.contains( 1739617766 ); assertTrue( "1739617766 should exist before adding here", existedBefore ); assertFalse( "1739617766 should not be reported as added here", added ); assertTrue( "1739617766 should exist", existsAfter ); } @Test public void shouldContainReallyBigLongValue() throws Exception { // GIVEN PrimitiveLongSet set = newSet( 10 ); set.add( 7416509207113022571L ); // WHEN/THEN boolean existedBefore = set.contains( 7620037383187366331L ); boolean added = set.add( 7620037383187366331L ); boolean existsAfter = set.contains( 7620037383187366331L ); assertFalse( "7620037383187366331 should not exist before adding here", existedBefore ); assertTrue( "7620037383187366331 should be reported as added here", added ); assertTrue( "7620037383187366331 should exist", existsAfter ); } @Test public void shouldOnlyContainAddedValues() throws Exception { // GIVEN PrimitiveLongSet set = newSet( 13 ); set.add( 52450040186687566L ); set.add( 52450040186687566L ); set.add( 5165002753277288833L ); set.add( 4276883133717080762L ); set.add( 5547940863757133161L ); set.add( 8933830774911919116L ); set.add( 3298254474623565974L ); set.add( 3366017425691021883L ); set.add( 8933830774911919116L ); set.add( 2962608069916354604L ); set.add( 3366017425691021883L ); set.remove( 4008464697042048519L ); set.add( 5547940863757133161L ); set.add( 52450040186687566L ); set.add( 4276883133717080762L ); set.remove( 3298254474623565974L ); set.remove( 180852386934131061L ); set.add( 4835176885665539239L ); set.add( 52450040186687566L ); set.add( 4591251124405056753L ); set.add( 5165002753277288833L ); set.add( 8933830774911919116L ); set.remove( 3458250832356869483L ); set.add( 3038543946711308923L ); set.add( 8743060827282266460L ); set.add( 5771902951077476377L ); set.add( 4591251124405056753L ); set.add( 4835176885665539239L ); set.remove( 4827343064671369647L ); set.add( 1533535091190658734L ); set.remove( 7125666881901305989L ); set.add( 1533535091190658734L ); set.add( 52450040186687566L ); set.remove( 1333521853804287175L ); set.add( 2962608069916354604L ); set.add( 5914630622072544054L ); set.add( 52450040186687566L ); set.add( 8933830774911919116L ); set.add( 6198968672674664718L ); set.add( 6239021001199390909L ); set.add( 6563452500080365738L ); set.add( 6128819131542184648L ); set.add( 5914630622072544054L ); set.add( 7024933384543504364L ); set.remove( 3949644814017615281L ); set.add( 3459376060749741528L ); set.add( 3201250389951283395L ); set.add( 4463681497523421181L ); set.add( 4304197328678536531L ); set.remove( 4559066538220393098L ); set.add( 2870119173652414003L ); set.add( 4048902329274369372L ); set.add( 3366017425691021883L ); set.remove( 1092409052848583664L ); set.add( 7024933384543504364L ); set.add( 4276883133717080762L ); set.add( 5914630622072544054L ); set.add( 4048902329274369372L ); set.add( 4304197328678536531L ); set.add( 4151178923662618318L ); set.remove( 51389524801735953L ); set.add( 5371788772386487501L ); set.remove( 8933830774911919116L ); set.add( 4928410670964886834L ); set.add( 8306393274966855450L ); set.add( 2870119173652414003L ); set.add( 8281622709908651825L ); set.remove( 9194058056102544672L ); set.remove( 5547940863757133161L ); set.add( 9184590238993521817L ); set.add( 5085293141623130492L ); set.add( 5633993155928642090L ); set.remove( 8794875254017117580L ); set.add( 5894404415376700909L ); set.add( 4835176885665539239L ); set.remove( 8743060827282266460L ); set.remove( 3460096065015553722L ); set.remove( 3296380689310185627L ); set.add( 337242488691685550L ); set.add( 6239021001199390909L ); set.add( 9104240733803011297L ); set.add( 807326424150812437L ); set.add( 3336115330297894183L ); set.add( 1788796898879121715L ); set.add( 5756965080438171769L ); set.remove( 4366313798399763194L ); set.add( 6198968672674664718L ); set.add( 486897301084183614L ); set.add( 2870119173652414003L ); set.add( 5085293141623130492L ); set.add( 5771902951077476377L ); set.remove( 6563452500080365738L ); set.add( 5347453991851285676L ); set.add( 7437999035528158926L ); set.add( 3223908005448803428L ); set.add( 4300856565210203390L ); set.remove( 4732570527126410147L ); set.add( 2180591071166584277L ); set.add( 5160374384234262648L ); set.remove( 5165002753277288833L ); set.add( 4463681497523421181L ); set.add( 7360196143740041480L ); set.add( 4928410670964886834L ); set.add( 807326424150812437L ); set.remove( 4069279832998820447L ); set.remove( 337242488691685550L ); set.add( 3201250389951283395L ); set.add( 4012293068834101219L ); set.add( 2333643358471038273L ); set.add( 1158824602601458449L ); set.remove( 3906518453155830597L ); set.add( 7402912598585277900L ); set.add( 6556025329057634951L ); set.add( 6684709657047103197L ); set.remove( 3448774195820272496L ); set.add( 715736913341007544L ); set.add( 9104240733803011297L ); // WHEN/THEN boolean existedBefore = set.contains( 1103190229303827372L ); boolean added = set.add( 1103190229303827372L ); boolean existsAfter = set.contains( 1103190229303827372L ); assertFalse( "1103190229303827372 should not exist before adding here", existedBefore ); assertTrue( "1103190229303827372 should be reported as added here", added ); assertTrue( "1103190229303827372 should exist", existsAfter ); } @SuppressWarnings( "unchecked" ) @Test public void longVisitorShouldSeeAllEntriesIfItDoesNotBreakOut() { // GIVEN PrimitiveLongSet set = Primitive.longSet(); set.add( 1 ); set.add( 2 ); set.add( 3 ); PrimitiveLongVisitor<RuntimeException> visitor = mock( PrimitiveLongVisitor.class ); // WHEN set.visitKeys( visitor ); // THEN verify( visitor ).visited( 1 ); verify( visitor ).visited( 2 ); verify( visitor ).visited( 3 ); verifyNoMoreInteractions( visitor ); } @Test public void longVisitorShouldNotSeeEntriesAfterRequestingBreakOut() { // GIVEN PrimitiveIntSet map = Primitive.intSet(); map.add( 1 ); map.add( 2 ); map.add( 3 ); map.add( 4 ); final AtomicInteger counter = new AtomicInteger(); // WHEN map.visitKeys( new PrimitiveIntVisitor<RuntimeException>() { @Override public boolean visited( int value ) { return counter.incrementAndGet() > 2; } } ); // THEN assertThat( counter.get(), is( 3 ) ); } @SuppressWarnings( "unchecked" ) @Test public void intVisitorShouldSeeAllEntriesIfItDoesNotBreakOut() { // GIVEN PrimitiveIntSet set = Primitive.intSet(); set.add( 1 ); set.add( 2 ); set.add( 3 ); PrimitiveIntVisitor<RuntimeException> visitor = mock( PrimitiveIntVisitor.class ); // WHEN set.visitKeys( visitor ); // THEN verify( visitor ).visited( 1 ); verify( visitor ).visited( 2 ); verify( visitor ).visited( 3 ); verifyNoMoreInteractions( visitor ); } @Test public void intVisitorShouldNotSeeEntriesAfterRequestingBreakOut() { // GIVEN PrimitiveIntSet map = Primitive.intSet(); map.add( 1 ); map.add( 2 ); map.add( 3 ); map.add( 4 ); final AtomicInteger counter = new AtomicInteger(); // WHEN map.visitKeys( new PrimitiveIntVisitor<RuntimeException>() { @Override public boolean visited( int value ) { return counter.incrementAndGet() > 2; } } ); // THEN assertThat( counter.get(), is( 3 ) ); } @Test public void shouldHandleEmptySet() throws Exception { // GIVEN PrimitiveLongSet set = Primitive.longSet( 0 ); // THEN assertFalse( set.contains( 564 ) ); } }
/* * Javassist, a Java-bytecode translator toolkit. * Copyright (C) 1999- Shigeru Chiba. All Rights Reserved. * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. Alternatively, the contents of this file may be used under * the terms of the GNU Lesser General Public License Version 2.1 or later, * or the Apache License Version 2.0. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. */ package javassist.bytecode; import java.io.DataInputStream; import java.io.IOException; import java.util.Map; import java.util.ArrayList; import javassist.CtClass; /** * <code>Signature_attribute</code>. */ public class SignatureAttribute extends AttributeInfo { /** * The name of this attribute <code>"Signature"</code>. */ public static final String tag = "Signature"; SignatureAttribute(ConstPool cp, int n, DataInputStream in) throws IOException { super(cp, n, in); } /** * Constructs a <code>Signature</code> attribute. * * @param cp a constant pool table. * @param signature the signature represented by this attribute. */ public SignatureAttribute(ConstPool cp, String signature) { super(cp, tag); int index = cp.addUtf8Info(signature); byte[] bvalue = new byte[2]; bvalue[0] = (byte)(index >>> 8); bvalue[1] = (byte)index; set(bvalue); } /** * Returns the generic signature indicated by <code>signature_index</code>. * * @see #toClassSignature(String) * @see #toMethodSignature(String) * @see #toFieldSignature(String) */ public String getSignature() { return getConstPool().getUtf8Info(ByteArray.readU16bit(get(), 0)); } /** * Sets <code>signature_index</code> to the index of the given generic signature, * which is added to a constant pool. * * @param sig new signature. * @since 3.11 */ public void setSignature(String sig) { int index = getConstPool().addUtf8Info(sig); ByteArray.write16bit(index, info, 0); } /** * Makes a copy. Class names are replaced according to the * given <code>Map</code> object. * * @param newCp the constant pool table used by the new copy. * @param classnames pairs of replaced and substituted * class names. */ public AttributeInfo copy(ConstPool newCp, Map classnames) { return new SignatureAttribute(newCp, getSignature()); } void renameClass(String oldname, String newname) { String sig = renameClass(getSignature(), oldname, newname); setSignature(sig); } void renameClass(Map classnames) { String sig = renameClass(getSignature(), classnames); setSignature(sig); } static String renameClass(String desc, String oldname, String newname) { Map map = new java.util.HashMap(); map.put(oldname, newname); return renameClass(desc, map); } static String renameClass(String desc, Map map) { if (map == null) return desc; StringBuilder newdesc = new StringBuilder(); int head = 0; int i = 0; for (;;) { int j = desc.indexOf('L', i); if (j < 0) break; StringBuilder nameBuf = new StringBuilder(); int k = j; char c; try { while ((c = desc.charAt(++k)) != ';') { nameBuf.append(c); if (c == '<') { while ((c = desc.charAt(++k)) != '>') nameBuf.append(c); nameBuf.append(c); } } } catch (IndexOutOfBoundsException e) { break; } i = k + 1; String name = nameBuf.toString(); String name2 = (String)map.get(name); if (name2 != null) { newdesc.append(desc.substring(head, j)); newdesc.append('L'); newdesc.append(name2); newdesc.append(c); head = i; } } if (head == 0) return desc; else { int len = desc.length(); if (head < len) newdesc.append(desc.substring(head, len)); return newdesc.toString(); } } private static boolean isNamePart(int c) { return c != ';' && c != '<'; } static private class Cursor { int position = 0; int indexOf(String s, int ch) throws BadBytecode { int i = s.indexOf(ch, position); if (i < 0) throw error(s); else { position = i + 1; return i; } } } /** * Class signature. */ public static class ClassSignature { TypeParameter[] params; ClassType superClass; ClassType[] interfaces; /** * Constructs a class signature. * * @param params type parameters. * @param superClass the super class. * @param interfaces the interface types. */ public ClassSignature(TypeParameter[] params, ClassType superClass, ClassType[] interfaces) { this.params = params == null ? new TypeParameter[0] : params; this.superClass = superClass == null ? ClassType.OBJECT : superClass; this.interfaces = interfaces == null ? new ClassType[0] : interfaces; } /** * Constructs a class signature. * * @param p type parameters. */ public ClassSignature(TypeParameter[] p) { this(p, null, null); } /** * Returns the type parameters. * * @return a zero-length array if the type parameters are not specified. */ public TypeParameter[] getParameters() { return params; } /** * Returns the super class. */ public ClassType getSuperClass() { return superClass; } /** * Returns the super interfaces. * * @return a zero-length array if the super interfaces are not specified. */ public ClassType[] getInterfaces() { return interfaces; } /** * Returns the string representation. */ public String toString() { StringBuffer sbuf = new StringBuffer(); TypeParameter.toString(sbuf, params); sbuf.append(" extends ").append(superClass); if (interfaces.length > 0) { sbuf.append(" implements "); Type.toString(sbuf, interfaces); } return sbuf.toString(); } /** * Returns the encoded string representing the method type signature. */ public String encode() { StringBuffer sbuf = new StringBuffer(); if (params.length > 0) { sbuf.append('<'); for (int i = 0; i < params.length; i++) params[i].encode(sbuf); sbuf.append('>'); } superClass.encode(sbuf); for (int i = 0; i < interfaces.length; i++) interfaces[i].encode(sbuf); return sbuf.toString(); } } /** * Method type signature. */ public static class MethodSignature { TypeParameter[] typeParams; Type[] params; Type retType; ObjectType[] exceptions; /** * Constructs a method type signature. Any parameter can be null * to represent <code>void</code> or nothing. * * @param tp type parameters. * @param params parameter types. * @param ret a return type, or null if the return type is <code>void</code>. * @param ex exception types. */ public MethodSignature(TypeParameter[] tp, Type[] params, Type ret, ObjectType[] ex) { typeParams = tp == null ? new TypeParameter[0] : tp; this.params = params == null ? new Type[0] : params; retType = ret == null ? new BaseType("void") : ret; exceptions = ex == null ? new ObjectType[0] : ex; } /** * Returns the formal type parameters. * * @return a zero-length array if the type parameters are not specified. */ public TypeParameter[] getTypeParameters() { return typeParams; } /** * Returns the types of the formal parameters. * * @return a zero-length array if no formal parameter is taken. */ public Type[] getParameterTypes() { return params; } /** * Returns the type of the returned value. */ public Type getReturnType() { return retType; } /** * Returns the types of the exceptions that may be thrown. * * @return a zero-length array if exceptions are never thrown or * the exception types are not parameterized types or type variables. */ public ObjectType[] getExceptionTypes() { return exceptions; } /** * Returns the string representation. */ public String toString() { StringBuffer sbuf = new StringBuffer(); TypeParameter.toString(sbuf, typeParams); sbuf.append(" ("); Type.toString(sbuf, params); sbuf.append(") "); sbuf.append(retType); if (exceptions.length > 0) { sbuf.append(" throws "); Type.toString(sbuf, exceptions); } return sbuf.toString(); } /** * Returns the encoded string representing the method type signature. */ public String encode() { StringBuffer sbuf = new StringBuffer(); if (typeParams.length > 0) { sbuf.append('<'); for (int i = 0; i < typeParams.length; i++) typeParams[i].encode(sbuf); sbuf.append('>'); } sbuf.append('('); for (int i = 0; i < params.length; i++) params[i].encode(sbuf); sbuf.append(')'); retType.encode(sbuf); if (exceptions.length > 0) for (int i = 0; i < exceptions.length; i++) { sbuf.append('^'); exceptions[i].encode(sbuf); } return sbuf.toString(); } } /** * Formal type parameters. * * @see TypeArgument */ public static class TypeParameter { String name; ObjectType superClass; ObjectType[] superInterfaces; TypeParameter(String sig, int nb, int ne, ObjectType sc, ObjectType[] si) { name = sig.substring(nb, ne); superClass = sc; superInterfaces = si; } /** * Constructs a <code>TypeParameter</code> representing a type parametre * like <code>&lt;T extends ... &gt;<code>. * * @param name parameter name. * @param superClass an upper bound class-type (or null). * @param superInterfaces an upper bound interface-type (or null). */ public TypeParameter(String name, ObjectType superClass, ObjectType[] superInterfaces) { this.name = name; this.superClass = superClass; if (superInterfaces == null) this.superInterfaces = new ObjectType[0]; else this.superInterfaces = superInterfaces; } /** * Constructs a <code>TypeParameter</code> representing a type parameter * like <code>&lt;T&gt;<code>. * * @param name parameter name. */ public TypeParameter(String name) { this(name, null, null); } /** * Returns the name of the type parameter. */ public String getName() { return name; } /** * Returns the class bound of this parameter. */ public ObjectType getClassBound() { return superClass; } /** * Returns the interface bound of this parameter. * * @return a zero-length array if the interface bound is not specified. */ public ObjectType[] getInterfaceBound() { return superInterfaces; } /** * Returns the string representation. */ public String toString() { StringBuffer sbuf = new StringBuffer(getName()); if (superClass != null) sbuf.append(" extends ").append(superClass.toString()); int len = superInterfaces.length; if (len > 0) { for (int i = 0; i < len; i++) { if (i > 0 || superClass != null) sbuf.append(" & "); else sbuf.append(" extends "); sbuf.append(superInterfaces[i].toString()); } } return sbuf.toString(); } static void toString(StringBuffer sbuf, TypeParameter[] tp) { sbuf.append('<'); for (int i = 0; i < tp.length; i++) { if (i > 0) sbuf.append(", "); sbuf.append(tp[i]); } sbuf.append('>'); } void encode(StringBuffer sb) { sb.append(name); if (superClass == null) sb.append(":Ljava/lang/Object;"); else { sb.append(':'); superClass.encode(sb); } for (int i = 0; i < superInterfaces.length; i++) { sb.append(':'); superInterfaces[i].encode(sb); } } } /** * Type argument. * * @see TypeParameter */ public static class TypeArgument { ObjectType arg; char wildcard; TypeArgument(ObjectType a, char w) { arg = a; wildcard = w; } /** * Constructs a <code>TypeArgument</code>. * A type argument is <code>&lt;String&gt;</code>, <code>&lt;int[]&gt;</code>, * or a type variable <code>&lt;T&gt;</code>, etc. * * @param t a class type, an array type, or a type variable. */ public TypeArgument(ObjectType t) { this(t, ' '); } /** * Constructs a <code>TypeArgument</code> representing <code>&lt;?&gt;</code>. */ public TypeArgument() { this(null, '*'); } /** * A factory method constructing a <code>TypeArgument</code> with an upper bound. * It represents <code>&lt;? extends ... &gt;</code> * * @param t an upper bound type. */ public static TypeArgument subclassOf(ObjectType t) { return new TypeArgument(t, '+'); } /** * A factory method constructing a <code>TypeArgument</code> with an lower bound. * It represents <code>&lt;? super ... &gt;</code> * * @param t an lower bbound type. */ public static TypeArgument superOf(ObjectType t) { return new TypeArgument(t, '-'); } /** * Returns the kind of this type argument. * * @return <code>' '</code> (not-wildcard), <code>'*'</code> (wildcard), <code>'+'</code> (wildcard with * upper bound), or <code>'-'</code> (wildcard with lower bound). */ public char getKind() { return wildcard; } /** * Returns true if this type argument is a wildcard type * such as <code>?</code>, <code>? extends String</code>, or <code>? super Integer</code>. */ public boolean isWildcard() { return wildcard != ' '; } /** * Returns the type represented by this argument * if the argument is not a wildcard type. Otherwise, this method * returns the upper bound (if the kind is '+'), * the lower bound (if the kind is '-'), or null (if the upper or lower * bound is not specified). */ public ObjectType getType() { return arg; } /** * Returns the string representation. */ public String toString() { if (wildcard == '*') return "?"; String type = arg.toString(); if (wildcard == ' ') return type; else if (wildcard == '+') return "? extends " + type; else return "? super " + type; } static void encode(StringBuffer sb, TypeArgument[] args) { sb.append('<'); for (int i = 0; i < args.length; i++) { TypeArgument ta = args[i]; if (ta.isWildcard()) sb.append(ta.wildcard); if (ta.getType() != null) ta.getType().encode(sb); } sb.append('>'); } } /** * Primitive types and object types. */ public static abstract class Type { abstract void encode(StringBuffer sb); static void toString(StringBuffer sbuf, Type[] ts) { for (int i = 0; i < ts.length; i++) { if (i > 0) sbuf.append(", "); sbuf.append(ts[i]); } } } /** * Primitive types. */ public static class BaseType extends Type { char descriptor; BaseType(char c) { descriptor = c; } /** * Constructs a <code>BaseType</code>. * * @param typeName <code>void</code>, <code>int</code>, ... */ public BaseType(String typeName) { this(Descriptor.of(typeName).charAt(0)); } /** * Returns the descriptor representing this primitive type. * * @see javassist.bytecode.Descriptor */ public char getDescriptor() { return descriptor; } /** * Returns the <code>CtClass</code> representing this * primitive type. */ public CtClass getCtlass() { return Descriptor.toPrimitiveClass(descriptor); } /** * Returns the string representation. */ public String toString() { return Descriptor.toClassName(Character.toString(descriptor)); } void encode(StringBuffer sb) { sb.append(descriptor); } } /** * Class types, array types, and type variables. * This class is also used for representing a field type. */ public static abstract class ObjectType extends Type { /** * Returns the encoded string representing the object type signature. */ public String encode() { StringBuffer sb = new StringBuffer(); encode(sb); return sb.toString(); } } /** * Class types. */ public static class ClassType extends ObjectType { String name; TypeArgument[] arguments; static ClassType make(String s, int b, int e, TypeArgument[] targs, ClassType parent) { if (parent == null) return new ClassType(s, b, e, targs); else return new NestedClassType(s, b, e, targs, parent); } ClassType(String signature, int begin, int end, TypeArgument[] targs) { name = signature.substring(begin, end).replace('/', '.'); arguments = targs; } /** * A class type representing <code>java.lang.Object</code>. */ public static ClassType OBJECT = new ClassType("java.lang.Object", null); /** * Constructs a <code>ClassType</code>. It represents * the name of a non-nested class. * * @param className a fully qualified class name. * @param args type arguments or null. */ public ClassType(String className, TypeArgument[] args) { name = className; arguments = args; } /** * Constructs a <code>ClassType</code>. It represents * the name of a non-nested class. * * @param className a fully qualified class name. */ public ClassType(String className) { this(className, null); } /** * Returns the class name. */ public String getName() { return name; } /** * Returns the type arguments. * * @return null if no type arguments are given to this class. */ public TypeArgument[] getTypeArguments() { return arguments; } /** * If this class is a member of another class, returns the * class in which this class is declared. * * @return null if this class is not a member of another class. */ public ClassType getDeclaringClass() { return null; } /** * Returns the string representation. */ public String toString() { StringBuffer sbuf = new StringBuffer(); ClassType parent = getDeclaringClass(); if (parent != null) sbuf.append(parent.toString()).append('.'); sbuf.append(name); if (arguments != null) { sbuf.append('<'); int n = arguments.length; for (int i = 0; i < n; i++) { if (i > 0) sbuf.append(", "); sbuf.append(arguments[i].toString()); } sbuf.append('>'); } return sbuf.toString(); } void encode(StringBuffer sb) { sb.append('L'); encode2(sb); sb.append(';'); } void encode2(StringBuffer sb) { ClassType parent = getDeclaringClass(); if (parent != null) { parent.encode2(sb); sb.append('$'); } sb.append(name.replace('.', '/')); if (arguments != null) TypeArgument.encode(sb, arguments); } } /** * Nested class types. */ public static class NestedClassType extends ClassType { ClassType parent; NestedClassType(String s, int b, int e, TypeArgument[] targs, ClassType p) { super(s, b, e, targs); parent = p; } /** * Constructs a <code>NestedClassType</code>. * * @param parent the class surrounding this class type. * @param className a simple class name. It does not include * a package name or a parent's class name. * @param args type parameters or null. */ public NestedClassType(ClassType parent, String className, TypeArgument[] args) { super(className, args); this.parent = parent; } /** * Returns the class that declares this nested class. * This nested class is a member of that declaring class. */ public ClassType getDeclaringClass() { return parent; } } /** * Array types. */ public static class ArrayType extends ObjectType { int dim; Type componentType; /** * Constructs an <code>ArrayType</code>. * * @param d dimension. * @param comp the component type. */ public ArrayType(int d, Type comp) { dim = d; componentType = comp; } /** * Returns the dimension of the array. */ public int getDimension() { return dim; } /** * Returns the component type. */ public Type getComponentType() { return componentType; } /** * Returns the string representation. */ public String toString() { StringBuffer sbuf = new StringBuffer(componentType.toString()); for (int i = 0; i < dim; i++) sbuf.append("[]"); return sbuf.toString(); } void encode(StringBuffer sb) { for (int i = 0; i < dim; i++) sb.append('['); componentType.encode(sb); } } /** * Type variables. */ public static class TypeVariable extends ObjectType { String name; TypeVariable(String sig, int begin, int end) { name = sig.substring(begin, end); } /** * Constructs a <code>TypeVariable</code>. * * @param name the name of a type variable. */ public TypeVariable(String name) { this.name = name; } /** * Returns the variable name. */ public String getName() { return name; } /** * Returns the string representation. */ public String toString() { return name; } void encode(StringBuffer sb) { sb.append('T').append(name).append(';'); } } /** * Parses the given signature string as a class signature. * * @param sig the signature obtained from the <code>SignatureAttribute</code> * of a <code>ClassFile</code>. * @return a tree-like data structure representing a class signature. It provides * convenient accessor methods. * @throws BadBytecode thrown when a syntactical error is found. * @see #getSignature() * @since 3.5 */ public static ClassSignature toClassSignature(String sig) throws BadBytecode { try { return parseSig(sig); } catch (IndexOutOfBoundsException e) { throw error(sig); } } /** * Parses the given signature string as a method type signature. * * @param sig the signature obtained from the <code>SignatureAttribute</code> * of a <code>MethodInfo</code>. * @return @return a tree-like data structure representing a method signature. It provides * convenient accessor methods. * @throws BadBytecode thrown when a syntactical error is found. * @see #getSignature() * @since 3.5 */ public static MethodSignature toMethodSignature(String sig) throws BadBytecode { try { return parseMethodSig(sig); } catch (IndexOutOfBoundsException e) { throw error(sig); } } /** * Parses the given signature string as a field type signature. * * @param sig the signature string obtained from the <code>SignatureAttribute</code> * of a <code>FieldInfo</code>. * @return the field type signature. * @throws BadBytecode thrown when a syntactical error is found. * @see #getSignature() * @since 3.5 */ public static ObjectType toFieldSignature(String sig) throws BadBytecode { try { return parseObjectType(sig, new Cursor(), false); } catch (IndexOutOfBoundsException e) { throw error(sig); } } /** * Parses the given signature string as a type signature. * The type signature is either the field type signature or a base type * descriptor including <code>void</code> type. * * @throws BadBytecode thrown when a syntactical error is found. * @since 3.18 */ public static Type toTypeSignature(String sig) throws BadBytecode { try { return parseType(sig, new Cursor()); } catch (IndexOutOfBoundsException e) { throw error(sig); } } private static ClassSignature parseSig(String sig) throws BadBytecode, IndexOutOfBoundsException { Cursor cur = new Cursor(); TypeParameter[] tp = parseTypeParams(sig, cur); ClassType superClass = parseClassType(sig, cur); int sigLen = sig.length(); ArrayList ifArray = new ArrayList(); while (cur.position < sigLen && sig.charAt(cur.position) == 'L') ifArray.add(parseClassType(sig, cur)); ClassType[] ifs = (ClassType[])ifArray.toArray(new ClassType[ifArray.size()]); return new ClassSignature(tp, superClass, ifs); } private static MethodSignature parseMethodSig(String sig) throws BadBytecode { Cursor cur = new Cursor(); TypeParameter[] tp = parseTypeParams(sig, cur); if (sig.charAt(cur.position++) != '(') throw error(sig); ArrayList params = new ArrayList(); while (sig.charAt(cur.position) != ')') { Type t = parseType(sig, cur); params.add(t); } cur.position++; Type ret = parseType(sig, cur); int sigLen = sig.length(); ArrayList exceptions = new ArrayList(); while (cur.position < sigLen && sig.charAt(cur.position) == '^') { cur.position++; ObjectType t = parseObjectType(sig, cur, false); if (t instanceof ArrayType) throw error(sig); exceptions.add(t); } Type[] p = (Type[])params.toArray(new Type[params.size()]); ObjectType[] ex = (ObjectType[])exceptions.toArray(new ObjectType[exceptions.size()]); return new MethodSignature(tp, p, ret, ex); } private static TypeParameter[] parseTypeParams(String sig, Cursor cur) throws BadBytecode { ArrayList typeParam = new ArrayList(); if (sig.charAt(cur.position) == '<') { cur.position++; while (sig.charAt(cur.position) != '>') { int nameBegin = cur.position; int nameEnd = cur.indexOf(sig, ':'); ObjectType classBound = parseObjectType(sig, cur, true); ArrayList ifBound = new ArrayList(); while (sig.charAt(cur.position) == ':') { cur.position++; ObjectType t = parseObjectType(sig, cur, false); ifBound.add(t); } TypeParameter p = new TypeParameter(sig, nameBegin, nameEnd, classBound, (ObjectType[])ifBound.toArray(new ObjectType[ifBound.size()])); typeParam.add(p); } cur.position++; } return (TypeParameter[])typeParam.toArray(new TypeParameter[typeParam.size()]); } private static ObjectType parseObjectType(String sig, Cursor c, boolean dontThrow) throws BadBytecode { int i; int begin = c.position; switch (sig.charAt(begin)) { case 'L' : return parseClassType2(sig, c, null); case 'T' : i = c.indexOf(sig, ';'); return new TypeVariable(sig, begin + 1, i); case '[' : return parseArray(sig, c); default : if (dontThrow) return null; else throw error(sig); } } private static ClassType parseClassType(String sig, Cursor c) throws BadBytecode { if (sig.charAt(c.position) == 'L') return parseClassType2(sig, c, null); else throw error(sig); } private static ClassType parseClassType2(String sig, Cursor c, ClassType parent) throws BadBytecode { int start = ++c.position; char t; do { t = sig.charAt(c.position++); } while (t != '$' && t != '<' && t != ';'); int end = c.position - 1; TypeArgument[] targs; if (t == '<') { targs = parseTypeArgs(sig, c); t = sig.charAt(c.position++); } else targs = null; ClassType thisClass = ClassType.make(sig, start, end, targs, parent); if (t == '$' || t == '.') { c.position--; return parseClassType2(sig, c, thisClass); } else return thisClass; } private static TypeArgument[] parseTypeArgs(String sig, Cursor c) throws BadBytecode { ArrayList args = new ArrayList(); char t; while ((t = sig.charAt(c.position++)) != '>') { TypeArgument ta; if (t == '*' ) ta = new TypeArgument(null, '*'); else { if (t != '+' && t != '-') { t = ' '; c.position--; } ta = new TypeArgument(parseObjectType(sig, c, false), t); } args.add(ta); } return (TypeArgument[])args.toArray(new TypeArgument[args.size()]); } private static ObjectType parseArray(String sig, Cursor c) throws BadBytecode { int dim = 1; while (sig.charAt(++c.position) == '[') dim++; return new ArrayType(dim, parseType(sig, c)); } private static Type parseType(String sig, Cursor c) throws BadBytecode { Type t = parseObjectType(sig, c, true); if (t == null) t = new BaseType(sig.charAt(c.position++)); return t; } private static BadBytecode error(String sig) { return new BadBytecode("bad signature: " + sig); } }
/** * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.process.workitem.email; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.Date; import java.util.List; import java.util.Properties; import javax.activation.DataHandler; import javax.activation.MimetypesFileTypeMap; import javax.mail.Message; import javax.mail.Message.RecipientType; import javax.mail.MessagingException; import javax.mail.Multipart; import javax.mail.PasswordAuthentication; import javax.mail.Session; import javax.mail.Transport; import javax.mail.internet.InternetAddress; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; import javax.mail.util.ByteArrayDataSource; import javax.naming.InitialContext; import javax.naming.NamingException; public class SendHtml { private static final String MAIL_JNDI_KEY = System.getProperty("org.kie.mail.session", "mail/jbpmMailSession"); private static boolean debug = Boolean.parseBoolean(System.getProperty("org.kie.mail.debug", "false")); public static void sendHtml(Email email) { sendHtml(email, email.getConnection()); } public static void sendHtml(Email email, boolean debug) { sendHtml(email, email.getConnection(), debug); } public static void sendHtml(Email email, Connection connection) { sendHtml(email, connection, debug); } public static void sendHtml(Email email, Connection connection, boolean debug) { int port = Integer.parseInt(connection.getPort()); String mailhost = connection.getHost(); String username = connection.getUserName(); String password = connection.getPassword(); Session session = getSession(connection); session.setDebug( debug ); try { Message msg = fillMessage(email, session); // send the thing off Transport t = (Transport)session.getTransport("smtp"); try { t.connect(mailhost, port, username, password); t.sendMessage(msg, msg.getAllRecipients()); } catch (Exception e) { throw new RuntimeException( "Connection failure", e ); } finally { t.close(); } } catch ( Exception e ) { throw new RuntimeException( "Unable to send email", e ); } } private static Message fillMessage(Email email, Session session) { org.jbpm.process.workitem.email.Message message = email.getMessage(); String subject = message.getSubject(); String from = message.getFrom(); String replyTo = message.getReplyTo(); String mailer = "sendhtml"; if ( from == null ) { throw new RuntimeException("Email must have 'from' address" ); } if ( replyTo == null ) { replyTo = from; } // Construct and fill the Message Message msg = null; try { msg = new MimeMessage( session ); msg.setFrom( new InternetAddress( from ) ); msg.setReplyTo( new InternetAddress[] { new InternetAddress( replyTo ) } ); for ( Recipient recipient : message.getRecipients().getRecipients() ) { RecipientType type = null; if ( "To".equals( recipient.getType() ) ) { type = Message.RecipientType.TO; } else if ( "Cc".equals( recipient.getType() ) ) { type = Message.RecipientType.CC; } else if ( "Bcc".equals( recipient.getType() ) ) { type = Message.RecipientType.BCC; } else { throw new RuntimeException( "Unable to determine recipient type" ); } msg.addRecipients( type, InternetAddress.parse( recipient.getEmail(), false ) ); } if (message.hasAttachment()) { Multipart multipart = new MimeMultipart(); // prepare body as first mime body part MimeBodyPart messageBodyPart = new MimeBodyPart(); messageBodyPart.setDataHandler( new DataHandler( new ByteArrayDataSource( message.getBody(), "text/html" ) ) ); multipart.addBodyPart(messageBodyPart); List<String> attachments = message.getAttachments(); for (String attachment : attachments) { MimeBodyPart attachementBodyPart = new MimeBodyPart(); URL attachmentUrl = getAttachemntURL(attachment); String contentType = MimetypesFileTypeMap.getDefaultFileTypeMap().getContentType(attachmentUrl.getFile()); attachementBodyPart.setDataHandler(new DataHandler(new ByteArrayDataSource( attachmentUrl.openStream(), contentType ) )); String fileName = new File(attachmentUrl.getFile()).getName(); attachementBodyPart.setFileName(fileName); attachementBodyPart.setContentID("<"+fileName+">"); multipart.addBodyPart(attachementBodyPart); } // Put parts in message msg.setContent(multipart); } else { msg.setDataHandler( new DataHandler( new ByteArrayDataSource( message.getBody(), "text/html" ) ) ); } msg.setSubject( subject ); msg.setHeader( "X-Mailer", mailer ); msg.setSentDate( new Date() ); } catch ( Exception e ) { throw new RuntimeException( "Unable to send email", e ); } return msg; } public static void collect(String body, Message msg) throws MessagingException, IOException { // String subject = msg.getSubject(); StringBuffer sb = new StringBuffer(); // sb.append( "<HTML>\n" ); // sb.append( "<HEAD>\n" ); // sb.append( "<TITLE>\n" ); // sb.append( subject + "\n" ); // sb.append( "</TITLE>\n" ); // sb.append( "</HEAD>\n" ); // sb.append( "<BODY>\n" ); // sb.append( "<H1>" + subject + "</H1>" + "\n" ); sb.append( body ); // sb.append( "</BODY>\n" ); // sb.append( "</HTML>\n" ); } private static Session getSession(Connection connection) { Session session = null; try { session = InitialContext.doLookup(MAIL_JNDI_KEY); } catch (NamingException e1) { String username = connection.getUserName(); String password = connection.getPassword(); Properties properties = new Properties(); properties.setProperty("mail.smtp.host", connection.getHost()); properties.setProperty("mail.smtp.port", connection.getPort()); if( connection.getStartTls() != null && connection.getStartTls() ) { properties.put("mail.smtp.starttls.enable","true"); } if( username != null ) { properties.setProperty("mail.smtp.submitter", username); if( password != null) { Authenticator authenticator = new Authenticator(username, password); properties.setProperty("mail.smtp.auth", "true"); session = Session.getInstance(properties, authenticator); } else { session = Session.getInstance(properties); } } else { session = Session.getInstance(properties); } } return session; } protected static URL getAttachemntURL(String attachment) throws MalformedURLException { if (attachment.startsWith("classpath:")) { String location = attachment.replaceFirst("classpath:", ""); return SendHtml.class.getResource(location); } else { URL attachmentUrl = new URL(attachment); return attachmentUrl; } } private static class Authenticator extends javax.mail.Authenticator { private PasswordAuthentication authentication; public Authenticator(String username, String password) { authentication = new PasswordAuthentication(username, password); } protected PasswordAuthentication getPasswordAuthentication() { return authentication; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.plugins.index.search; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import javax.jcr.PropertyType; import javax.jcr.Repository; import org.apache.commons.io.IOUtils; import org.apache.jackrabbit.JcrConstants; import org.apache.jackrabbit.api.JackrabbitRepository; import org.apache.jackrabbit.oak.api.Blob; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.plugins.index.IndexConstants; import org.apache.jackrabbit.oak.plugins.index.search.FulltextIndexConstants.IndexingMode; import org.apache.jackrabbit.oak.plugins.index.search.util.IndexHelper; import org.apache.jackrabbit.oak.plugins.memory.MemoryNodeStore; import org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState; import org.apache.jackrabbit.oak.plugins.name.NamespaceEditorProvider; import org.apache.jackrabbit.oak.plugins.nodetype.TypeEditorProvider; import org.apache.jackrabbit.oak.plugins.nodetype.write.NodeTypeRegistry; import org.apache.jackrabbit.oak.plugins.tree.factories.RootFactory; import org.apache.jackrabbit.oak.spi.commit.CommitInfo; import org.apache.jackrabbit.oak.spi.commit.CompositeEditorProvider; import org.apache.jackrabbit.oak.spi.commit.Editor; import org.apache.jackrabbit.oak.spi.commit.EditorHook; import org.apache.jackrabbit.oak.spi.commit.EditorProvider; import org.apache.jackrabbit.oak.spi.state.ApplyDiff; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.spi.state.NodeStore; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.collect.ImmutableSet.of; import static org.apache.jackrabbit.JcrConstants.JCR_CONTENT; import static org.apache.jackrabbit.oak.api.Type.STRINGS; import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NAME; import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.INDEX_DEFINITIONS_NODE_TYPE; import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.REINDEX_PROPERTY_NAME; import static org.apache.jackrabbit.oak.plugins.index.IndexConstants.TYPE_PROPERTY_NAME; import static org.apache.jackrabbit.oak.plugins.memory.PropertyStates.createProperty; public class TestUtil { private static final AtomicInteger COUNTER = new AtomicInteger(); public static final String NT_TEST = "oak:TestNode"; public static final String TEST_NODE_TYPE = "[oak:TestNode]\n" + " - * (UNDEFINED) multiple\n" + " - * (UNDEFINED)\n" + " + * (nt:base) = oak:TestNode VERSION"; static void useV2(NodeBuilder idxNb) { if (!IndexFormatVersion.getDefault().isAtLeast(IndexFormatVersion.V2)) { idxNb.setProperty(FulltextIndexConstants.COMPAT_MODE, IndexFormatVersion.V2.getVersion()); } } static void useV2(Tree idxTree) { if (!IndexFormatVersion.getDefault().isAtLeast(IndexFormatVersion.V2)) { idxTree.setProperty(FulltextIndexConstants.COMPAT_MODE, IndexFormatVersion.V2.getVersion()); } } public static NodeBuilder newFTIndexDefinitionV2( @NotNull NodeBuilder index, @NotNull String name, String type, @Nullable Set<String> propertyTypes) { NodeBuilder nb = IndexHelper.newFTIndexDefinition(index, name, type, propertyTypes, null, null, null); useV2(nb); return nb; } public static Tree enableForFullText(Tree props, String propName) { return enableForFullText(props, propName, false); } public static Tree enableForFullText(Tree props, String propName, boolean regex) { Tree prop = props.addChild(unique("prop")); prop.setProperty(FulltextIndexConstants.PROP_NAME, propName); prop.setProperty(FulltextIndexConstants.PROP_PROPERTY_INDEX, true); prop.setProperty(FulltextIndexConstants.PROP_IS_REGEX, regex); prop.setProperty(FulltextIndexConstants.PROP_NODE_SCOPE_INDEX, true); prop.setProperty(FulltextIndexConstants.PROP_ANALYZED, true); prop.setProperty(FulltextIndexConstants.PROP_USE_IN_EXCERPT, true); prop.setProperty(FulltextIndexConstants.PROP_USE_IN_SPELLCHECK, true); return prop; } public static Tree enableForOrdered(Tree props, String propName) { Tree prop = enablePropertyIndex(props, propName, false); prop.setProperty("ordered", true); return prop; } public static Tree enablePropertyIndex(Tree props, String propName, boolean regex) { Tree prop = props.addChild(unique("prop")); prop.setProperty(FulltextIndexConstants.PROP_NAME, propName); prop.setProperty(FulltextIndexConstants.PROP_PROPERTY_INDEX, true); prop.setProperty(FulltextIndexConstants.PROP_IS_REGEX, regex); prop.setProperty(FulltextIndexConstants.PROP_NODE_SCOPE_INDEX, false); prop.setProperty(FulltextIndexConstants.PROP_ANALYZED, false); return prop; } public static Tree enableFunctionIndex(Tree props, String function) { Tree prop = props.addChild(unique("prop")); prop.setProperty(FulltextIndexConstants.PROP_FUNCTION, function); return prop; } public static AggregatorBuilder newNodeAggregator(Tree indexDefn){ return new AggregatorBuilder(indexDefn); } public static Tree newRulePropTree(Tree indexDefn, String typeName){ Tree rules = indexDefn.addChild(FulltextIndexConstants.INDEX_RULES); rules.setOrderableChildren(true); Tree rule = rules.addChild(typeName); Tree props = rule.addChild(FulltextIndexConstants.PROP_NODE); props.setOrderableChildren(true); return props; } public static NodeBuilder child(NodeBuilder nb, String path) { for (String name : PathUtils.elements(checkNotNull(path))) { nb = nb.child(name); } return nb; } static class AggregatorBuilder { private final Tree aggs; private AggregatorBuilder(Tree indexDefn) { this.aggs = indexDefn.addChild(FulltextIndexConstants.AGGREGATES); } AggregatorBuilder newRuleWithName(String primaryType, List<String> includes){ Tree agg = aggs.addChild(primaryType); for (String include : includes){ agg.addChild(unique("include")).setProperty(FulltextIndexConstants.AGG_PATH, include); } return this; } } static String unique(String name){ return name + COUNTER.getAndIncrement(); } public static NodeBuilder registerTestNodeType(NodeBuilder builder){ registerNodeType(builder, TEST_NODE_TYPE); return builder; } public static void registerNodeType(NodeBuilder builder, String nodeTypeDefn){ //Taken from org.apache.jackrabbit.oak.plugins.nodetype.write.InitialContent NodeState base = ModifiedNodeState.squeeze(builder.getNodeState()); NodeStore store = new MemoryNodeStore(base); Root root = RootFactory.createSystemRoot( store, new EditorHook(new CompositeEditorProvider( new NamespaceEditorProvider(), new TypeEditorProvider())), null, null, null); NodeTypeRegistry.register(root, IOUtils.toInputStream(nodeTypeDefn), "test node types"); NodeState target = store.getRoot(); target.compareAgainstBaseState(base, new ApplyDiff(builder)); } public static Tree createNodeWithType(Tree t, String nodeName, String typeName){ t = t.addChild(nodeName); t.setProperty(JcrConstants.JCR_PRIMARYTYPE, typeName, Type.NAME); return t; } public static NodeBuilder createNodeWithType(NodeBuilder builder, String nodeName, String typeName){ builder = builder.child(nodeName); builder.setProperty(JcrConstants.JCR_PRIMARYTYPE, typeName, Type.NAME); return builder; } public static Tree createFileNode(Tree tree, String name, Blob content, String mimeType){ Tree fileNode = tree.addChild(name); fileNode.setProperty(JcrConstants.JCR_PRIMARYTYPE, JcrConstants.NT_FILE, Type.NAME); Tree jcrContent = fileNode.addChild(JCR_CONTENT); jcrContent.setProperty(JcrConstants.JCR_DATA, content); jcrContent.setProperty(JcrConstants.JCR_MIMETYPE, mimeType); return jcrContent; } public static Tree createFulltextIndex(Tree index, String name, String type) throws CommitFailedException { Tree def = index.addChild(INDEX_DEFINITIONS_NAME).addChild(name); def.setProperty(JcrConstants.JCR_PRIMARYTYPE, INDEX_DEFINITIONS_NODE_TYPE, Type.NAME); def.setProperty(TYPE_PROPERTY_NAME, type); def.setProperty(REINDEX_PROPERTY_NAME, true); def.setProperty(createProperty(FulltextIndexConstants.INCLUDE_PROPERTY_TYPES, of(PropertyType.TYPENAME_STRING, PropertyType.TYPENAME_BINARY), STRINGS)); return index.getChild(INDEX_DEFINITIONS_NAME).getChild(name); } public static void shutdown(Repository repository) { if (repository instanceof JackrabbitRepository) { ((JackrabbitRepository) repository).shutdown(); } } public static NodeBuilder enableIndexingMode(NodeBuilder builder, IndexingMode indexingMode){ builder.setProperty(createAsyncProperty(indexingMode)); return builder; } public static Tree enableIndexingMode(Tree tree, IndexingMode indexingMode){ tree.setProperty(createAsyncProperty(indexingMode)); return tree; } private static PropertyState createAsyncProperty(String indexingMode) { return createProperty(IndexConstants.ASYNC_PROPERTY_NAME, of(indexingMode , "async"), STRINGS); } private static PropertyState createAsyncProperty(IndexingMode indexingMode) { switch(indexingMode) { case SYNC : return createAsyncProperty(indexingMode.asyncValueName()); case ASYNC: return createProperty(IndexConstants.ASYNC_PROPERTY_NAME, of("async"), STRINGS); default: throw new IllegalArgumentException("Unknown mode " + indexingMode); } } public static class OptionalEditorProvider implements EditorProvider { public EditorProvider delegate; @Override public Editor getRootEditor(NodeState before, NodeState after, NodeBuilder builder, CommitInfo info) throws CommitFailedException { if (delegate != null){ return delegate.getRootEditor(before, after, builder, info); } return null; } } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnTlvUdpSrcVer14 implements OFBsnTlvUdpSrc { private static final Logger logger = LoggerFactory.getLogger(OFBsnTlvUdpSrcVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int LENGTH = 6; private final static int DEFAULT_VALUE = 0x0; // OF message fields private final int value; // // Immutable default instance final static OFBsnTlvUdpSrcVer14 DEFAULT = new OFBsnTlvUdpSrcVer14( DEFAULT_VALUE ); // package private constructor - used by readers, builders, and factory OFBsnTlvUdpSrcVer14(int value) { this.value = U16.normalize(value); } // Accessors for OF message fields @Override public int getType() { return 0x24; } @Override public int getValue() { return value; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } public OFBsnTlvUdpSrc.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnTlvUdpSrc.Builder { final OFBsnTlvUdpSrcVer14 parentMessage; // OF message fields private boolean valueSet; private int value; BuilderWithParent(OFBsnTlvUdpSrcVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public int getType() { return 0x24; } @Override public int getValue() { return value; } @Override public OFBsnTlvUdpSrc.Builder setValue(int value) { this.value = value; this.valueSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFBsnTlvUdpSrc build() { int value = this.valueSet ? this.value : parentMessage.value; // return new OFBsnTlvUdpSrcVer14( value ); } } static class Builder implements OFBsnTlvUdpSrc.Builder { // OF message fields private boolean valueSet; private int value; @Override public int getType() { return 0x24; } @Override public int getValue() { return value; } @Override public OFBsnTlvUdpSrc.Builder setValue(int value) { this.value = value; this.valueSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } // @Override public OFBsnTlvUdpSrc build() { int value = this.valueSet ? this.value : DEFAULT_VALUE; return new OFBsnTlvUdpSrcVer14( value ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnTlvUdpSrc> { @Override public OFBsnTlvUdpSrc readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property type == 0x24 short type = bb.readShort(); if(type != (short) 0x24) throw new OFParseError("Wrong type: Expected=0x24(0x24), got="+type); int length = U16.f(bb.readShort()); if(length != 6) throw new OFParseError("Wrong length: Expected=6(6), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); int value = U16.f(bb.readShort()); OFBsnTlvUdpSrcVer14 bsnTlvUdpSrcVer14 = new OFBsnTlvUdpSrcVer14( value ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnTlvUdpSrcVer14); return bsnTlvUdpSrcVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnTlvUdpSrcVer14Funnel FUNNEL = new OFBsnTlvUdpSrcVer14Funnel(); static class OFBsnTlvUdpSrcVer14Funnel implements Funnel<OFBsnTlvUdpSrcVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnTlvUdpSrcVer14 message, PrimitiveSink sink) { // fixed value property type = 0x24 sink.putShort((short) 0x24); // fixed value property length = 6 sink.putShort((short) 0x6); sink.putInt(message.value); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnTlvUdpSrcVer14> { @Override public void write(ByteBuf bb, OFBsnTlvUdpSrcVer14 message) { // fixed value property type = 0x24 bb.writeShort((short) 0x24); // fixed value property length = 6 bb.writeShort((short) 0x6); bb.writeShort(U16.t(message.value)); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnTlvUdpSrcVer14("); b.append("value=").append(value); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnTlvUdpSrcVer14 other = (OFBsnTlvUdpSrcVer14) obj; if( value != other.value) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + value; return result; } }
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.remote.work; import java.io.File; import java.io.IOException; import java.net.SocketTimeoutException; import java.util.Date; import java.util.List; import com.thoughtworks.go.config.ArtifactPropertiesGenerator; import com.thoughtworks.go.domain.*; import com.thoughtworks.go.domain.GoControlLog; import com.thoughtworks.go.domain.materials.MaterialAgentFactory; import com.thoughtworks.go.publishers.GoArtifactsManipulator; import com.thoughtworks.go.remote.AgentIdentifier; import com.thoughtworks.go.remote.BuildRepositoryRemote; import com.thoughtworks.go.server.service.AgentBuildingInfo; import com.thoughtworks.go.server.service.AgentRuntimeInfo; import com.thoughtworks.go.util.SystemEnvironment; import com.thoughtworks.go.util.TimeProvider; import com.thoughtworks.go.util.command.EnvironmentVariableContext; import com.thoughtworks.go.util.command.ProcessOutputStreamConsumer; import com.thoughtworks.go.work.DefaultGoPublisher; import com.thoughtworks.go.work.GoPublisher; import org.apache.commons.io.FileUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jdom.Element; import static com.thoughtworks.go.util.ExceptionUtils.bomb; import static com.thoughtworks.go.util.ExceptionUtils.messageOf; public class BuildWork implements Work { private static final Log LOGGER = LogFactory.getLog(BuildWork.class); private final BuildAssignment assignment; private DefaultGoPublisher goPublisher; private TimeProvider timeProvider = new TimeProvider(); private JobPlan plan; private File workingDirectory; private MaterialRevisions materialRevisions; private GoControlLog buildLog; private Builders builders; public BuildWork(BuildAssignment assignment) { this.assignment = assignment; } private void initialize(BuildRepositoryRemote remoteBuildRepository, GoArtifactsManipulator goArtifactsManipulator, AgentRuntimeInfo agentRuntimeInfo) { plan = assignment.getPlan(); agentRuntimeInfo.busy(new AgentBuildingInfo(plan.getIdentifier().buildLocatorForDisplay(), plan.getIdentifier().buildLocator())); workingDirectory = assignment.getWorkingDirectory(); materialRevisions = assignment.materialRevisions(); buildLog = new GoControlLog(this.workingDirectory + "/cruise-output"); goPublisher = new DefaultGoPublisher(goArtifactsManipulator, plan.getIdentifier(), remoteBuildRepository, agentRuntimeInfo); builders = new Builders(assignment.getBuilders(), goPublisher, buildLog); } public void doWork(AgentIdentifier agentIdentifier, BuildRepositoryRemote remoteBuildRepository, GoArtifactsManipulator goArtifactsManipulator, EnvironmentVariableContext environmentVariableContext, AgentRuntimeInfo agentRuntimeInfo) { initialize(remoteBuildRepository, goArtifactsManipulator, agentRuntimeInfo); environmentVariableContext.addAll(assignment.initialEnvironmentVariableContext()); try { JobResult result = build(environmentVariableContext, agentIdentifier); reportCompletion(result); } catch (InvalidAgentException e) { LOGGER.error("Agent UUID changed in the middle of the build.", e); } catch (Exception e) { reportFailure(e); } finally { goPublisher.stop(); } } private void reportFailure(Exception e) { try{ goPublisher.reportErrorMessage(messageOf(e), e); } catch (Exception reportException) { LOGGER.error(String.format("Unable to report error message - %s.", messageOf(e)), reportException); } reportCompletion(JobResult.Failed); } private void reportCompletion(JobResult result) { try { builders.waitForCancelTasks(); if (result == null) { goPublisher.reportCurrentStatus(JobState.Completed); goPublisher.reportCompletedAction(); } else { goPublisher.reportCompleted(result); } } catch (Exception ex) { LOGGER.error("New error occured during error handling:\n" + "build will be rescheduled when agent starts asking for work again", ex); } } private JobResult build(EnvironmentVariableContext environmentVariableContext, AgentIdentifier agentIdentifier) throws Exception { if (this.goPublisher.isIgnored()) { this.goPublisher.reportAction("Job is cancelled"); return null; } prepareJob(agentIdentifier); setupEnvrionmentContext(environmentVariableContext); plan.applyTo(environmentVariableContext); if (this.goPublisher.isIgnored()) { this.goPublisher.reportAction("Job is cancelled"); return null; } JobResult result = buildJob(environmentVariableContext); completeJob(); return result; } private void prepareJob(AgentIdentifier agentIdentifier) { goPublisher.reportAction("Start to prepare"); goPublisher.reportCurrentStatus(JobState.Preparing); createWorkingDirectoryIfNotExist(workingDirectory); if (!plan.shouldFetchMaterials()) { goPublisher.consumeLineWithPrefix("Skipping material update since stage is configured not to fetch materials"); return; } ProcessOutputStreamConsumer<GoPublisher, GoPublisher> consumer = new ProcessOutputStreamConsumer<GoPublisher, GoPublisher>(goPublisher, goPublisher); MaterialAgentFactory materialAgentFactory = new MaterialAgentFactory(consumer, workingDirectory, agentIdentifier); materialRevisions.getMaterials().cleanUp(workingDirectory, consumer); for (MaterialRevision revision : materialRevisions.getRevisions()) { materialAgentFactory.createAgent(revision).prepare(); } } private EnvironmentVariableContext setupEnvrionmentContext(EnvironmentVariableContext context) { context.setProperty("GO_SERVER_URL", new SystemEnvironment().getPropertyImpl("serviceUrl"), false); context.setProperty("GO_TRIGGER_USER", assignment.getBuildApprover() , false); plan.getIdentifier().populateEnvironmentVariables(context); materialRevisions.populateEnvironmentVariables(context, workingDirectory); return context; } private JobResult buildJob(EnvironmentVariableContext environmentVariableContext) { goPublisher.reportCurrentStatus(JobState.Building); goPublisher.reportAction("Start to build"); return execute(environmentVariableContext); } private void completeJob() throws SocketTimeoutException { if (goPublisher.isIgnored()) { return; } goPublisher.reportCurrentStatus(JobState.Completing); goPublisher.reportAction("Start to create properties"); harvestProperties(goPublisher); goPublisher.reportAction("Start to upload"); plan.publishArtifacts(goPublisher, workingDirectory); } private JobResult execute(EnvironmentVariableContext environmentVariableContext) { Date now = new Date(); // collect project information // TODO - #2409 buildLog.addContent(new Element("info")); JobResult result = builders.build(environmentVariableContext); goPublisher.reportCompleting(result); try { buildLog.writeLogFile(now); } catch (IOException e) { throw bomb("Failed to write log file", e); } buildLog.reset(); return result; } private List<ArtifactPropertiesGenerator> getArtifactPropertiesGenerators() { return plan.getPropertyGenerators(); } private void harvestProperties(DefaultGoPublisher publisher) { List<ArtifactPropertiesGenerator> generators = getArtifactPropertiesGenerators(); for (ArtifactPropertiesGenerator generator : generators) { generator.generate(publisher, workingDirectory); } } public String description() { return "Running build ..."; } public void cancel(EnvironmentVariableContext environmentVariableContext, AgentRuntimeInfo agentruntimeInfo) { agentruntimeInfo.cancel(); builders.cancel(environmentVariableContext); } // only for test public BuildAssignment getAssignment() { return assignment; } public JobIdentifier identifierForLogging() { if (assignment == null || assignment.getPlan() == null || assignment.getPlan().getIdentifier() == null) { return JobIdentifier.invalidIdentifier("Unknown", "Unknown", "Unknown", "Unknown", "Unknown"); } return assignment.getPlan().getIdentifier(); } public String toString() { return "BuildWork[" + assignment.toString() + "]"; } public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } BuildWork work = (BuildWork) o; if (assignment != null ? !assignment.equals(work.assignment) : work.assignment != null) { return false; } return true; } public int hashCode() { int result; result = (assignment != null ? assignment.hashCode() : 0); result = 31 * result + (goPublisher != null ? goPublisher.hashCode() : 0); result = 31 * result + (timeProvider != null ? timeProvider.hashCode() : 0); return result; } private void createWorkingDirectoryIfNotExist(File buildWorkingDirectory) { if (plan.shouldCleanWorkingDir() && buildWorkingDirectory.exists()) { try { FileUtils.cleanDirectory(buildWorkingDirectory); goPublisher.consumeLineWithPrefix("Cleaning working directory \"" + buildWorkingDirectory.getAbsolutePath() + "\" since stage is configured to clean working directory"); } catch (IOException e) { bomb("Clean working directory is set to true. Unable to clean working directory for agent: " + buildWorkingDirectory.getAbsolutePath()); } } if (!buildWorkingDirectory.exists()) { if (!buildWorkingDirectory.mkdirs()) { bomb("Unable to create working directory for agent: " + buildWorkingDirectory.getAbsolutePath()); } } } }
package com.puzzletimer.gui; import static com.puzzletimer.Internationalization.t; import java.awt.Dimension; import java.awt.Toolkit; import java.awt.datatransfer.Clipboard; import java.awt.datatransfer.StringSelection; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.text.DateFormat; import java.util.Date; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JScrollPane; import javax.swing.JTextArea; import javax.swing.KeyStroke; import net.miginfocom.swing.MigLayout; import com.puzzletimer.models.Category; import com.puzzletimer.models.Solution; import com.puzzletimer.state.CategoryManager; import com.puzzletimer.state.SessionManager; import com.puzzletimer.statistics.Average; import com.puzzletimer.statistics.Best; import com.puzzletimer.statistics.BestAverage; import com.puzzletimer.statistics.Mean; import com.puzzletimer.statistics.Percentile; import com.puzzletimer.statistics.StandardDeviation; import com.puzzletimer.statistics.StatisticalMeasure; import com.puzzletimer.statistics.Worst; import com.puzzletimer.util.SolutionUtils; @SuppressWarnings("serial") public class SessionSummaryFrame extends JFrame { private JTextArea textAreaSummary; private JButton buttonCopyToClipboard; private JButton buttonOk; public SessionSummaryFrame(final CategoryManager categoryManager, SessionManager sessionManager) { super(); setMinimumSize(new Dimension(640, 480)); setPreferredSize(getMinimumSize()); createComponents(); // title categoryManager.addListener(new CategoryManager.Listener() { @Override public void categoriesUpdated(Category[] categories, Category currentCategory) { setTitle( String.format( t("session_summary.session_sumary_category"), currentCategory.getDescription())); } }); categoryManager.notifyListeners(); // summary sessionManager.addListener(new SessionManager.Listener() { @Override public void solutionsUpdated(Solution[] solutions) { updateSummary(categoryManager.getCurrentCategory(), solutions); } }); // copy to clipboard this.buttonCopyToClipboard.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent event) { StringSelection contents = new StringSelection(SessionSummaryFrame.this.textAreaSummary.getText()); Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard(); clipboard.setContents(contents, contents); } }); // ok button this.setDefaultCloseOperation(HIDE_ON_CLOSE); this.buttonOk.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent event) { SessionSummaryFrame.this.setVisible(false); } }); // esc key closes window this.getRootPane().registerKeyboardAction( new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { SessionSummaryFrame.this.setVisible(false); } }, KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), JComponent.WHEN_IN_FOCUSED_WINDOW); } private void createComponents() { setLayout( new MigLayout( "fill", "", "[pref!][][pref!]16[pref!]")); // labelSessionSummary add(new JLabel(t("session_summary.summary")), "wrap"); // textAreaContents this.textAreaSummary = new JTextArea(); JScrollPane scrollPane = new JScrollPane(this.textAreaSummary); add(scrollPane, "grow, wrap"); // button copy to clipboard this.buttonCopyToClipboard = new JButton(t("session_summary.copy_to_clipboard")); add(this.buttonCopyToClipboard, "width 150, right, wrap"); // buttonOk this.buttonOk = new JButton(t("session_summary.ok")); add(this.buttonOk, "tag ok"); } private void updateSummary(Category currentCategory, Solution[] solutions) { StringBuilder summary = new StringBuilder(); if (solutions.length >= 1) { // categoryName summary.append(currentCategory.getDescription()); summary.append("\n"); // session interval Date start = solutions[solutions.length - 1].getTiming().getStart(); Date end = solutions[0].getTiming().getEnd(); DateFormat dateTimeFormat = DateFormat.getDateTimeInstance(DateFormat.MEDIUM, DateFormat.MEDIUM); DateFormat timeFormat = DateFormat.getTimeInstance(DateFormat.MEDIUM); summary.append(dateTimeFormat.format(start) + " - " + timeFormat.format(end)); summary.append("\n"); summary.append("\n"); // statistics String[] labels = { t("session_summary.mean"), t("session_summary.average"), t("session_summary.best_time"), t("session_summary.median"), t("session_summary.worst_time"), t("session_summary.standard_deviation"), }; StatisticalMeasure[] statistics = { new Mean(1, Integer.MAX_VALUE), new Average(3, Integer.MAX_VALUE), new Best(1, Integer.MAX_VALUE), new Percentile(1, Integer.MAX_VALUE, 0.5), new Worst(1, Integer.MAX_VALUE), new StandardDeviation(1, Integer.MAX_VALUE), }; int maxLabelLength = 0; for (int i = 0; i < labels.length; i++) { if (labels[i].length() > maxLabelLength) { maxLabelLength = labels[i].length(); } } int maxStringLength = 0; for (int i = 0; i < statistics.length; i++) { if (solutions.length < statistics[i].getMinimumWindowSize()) { continue; } statistics[i].setSolutions(solutions); String s = SolutionUtils.format(statistics[i].getValue()); if (s.length() > maxStringLength) { maxStringLength = s.length(); } } for (int i = 0; i < labels.length; i++) { if (solutions.length < statistics[i].getMinimumWindowSize()) { continue; } summary.append(String.format( "%-" + maxLabelLength + "s %" + maxStringLength + "s", labels[i], SolutionUtils.format(statistics[i].getValue()))); summary.append("\n"); } summary.append("\n"); } // best average of X String[] labels = { t("session_summary.best_average_of_5"), t("session_summary.best_average_of_12"), }; StatisticalMeasure[] statistics = { new BestAverage(5, Integer.MAX_VALUE), new BestAverage(12, Integer.MAX_VALUE), }; for (int i = 0; i < statistics.length; i++) { int windowSize = statistics[i].getMinimumWindowSize(); if (solutions.length >= windowSize) { statistics[i].setSolutions(solutions); int windowPosition = statistics[i].getWindowPosition(); // value summary.append(labels[i] + " " + SolutionUtils.format(statistics[i].getValue())); summary.append("\n"); // index range summary.append(String.format( " %d-%d - ", solutions.length - windowPosition - windowSize + 1, solutions.length - windowPosition)); // find indices of best and worst times int indexBest = 0; int indexWorst = 0; long[] times = new long[windowSize]; for (int j = 0; j < windowSize; j++) { times[j] = SolutionUtils.realTime(solutions[windowPosition + j]); if (times[j] < times[indexBest]) { indexBest = j; } if (times[j] > times[indexWorst]) { indexWorst = j; } } // times String sTimes = ""; for (int j = windowSize - 1; j >= 0; j--) { if (j == indexBest || j == indexWorst) { sTimes += "(" + SolutionUtils.format(times[j]) + ") "; } else { sTimes += SolutionUtils.format(times[j]) + " "; } } summary.append(sTimes.trim()); summary.append("\n"); summary.append("\n"); } } // solutions String[] sSolutions = new String[solutions.length]; long[] realTimes = SolutionUtils.realTimes(solutions, false); int maxStringLength = 0; for (int i = 0; i < realTimes.length; i++) { sSolutions[i] = SolutionUtils.format(realTimes[i]); if (sSolutions[i].length() > maxStringLength) { maxStringLength = sSolutions[i].length(); } } for (int i = solutions.length - 1; i >= 0; i--) { // index String indexFormat = "%" + ((int) Math.log10(solutions.length) + 1) + "d. "; summary.append(String.format(indexFormat, solutions.length - i)); // time String timeFormat = "%" + maxStringLength + "s "; summary.append(String.format(timeFormat, sSolutions[i])); // scramble summary.append(solutions[i].getScramble().getRawSequence()); summary.append("\n"); } this.textAreaSummary.setText(summary.toString()); this.textAreaSummary.setCaretPosition(0); } }
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.screens.datamodeller.client.widgets.droolsdomain; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import javax.annotation.PostConstruct; import javax.enterprise.event.Observes; import javax.inject.Inject; import com.github.gwtbootstrap.client.ui.CheckBox; import com.github.gwtbootstrap.client.ui.Icon; import com.github.gwtbootstrap.client.ui.ListBox; import com.github.gwtbootstrap.client.ui.TextBox; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Style; import com.google.gwt.event.dom.client.ChangeEvent; import com.google.gwt.event.dom.client.ChangeHandler; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.logical.shared.ValueChangeEvent; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.uibinder.client.UiHandler; import com.google.gwt.user.client.Command; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.Widget; import org.jboss.errai.common.client.api.Caller; import org.kie.workbench.common.screens.datamodeller.model.droolsdomain.DroolsDomainAnnotations; import org.kie.workbench.common.screens.datamodeller.client.resources.i18n.Constants; import org.kie.workbench.common.screens.datamodeller.client.util.AnnotationValueHandler; import org.kie.workbench.common.screens.datamodeller.client.util.DataModelerUtils; import org.kie.workbench.common.screens.datamodeller.client.validation.ValidatorService; import org.kie.workbench.common.screens.datamodeller.client.widgets.common.domain.ObjectEditor; import org.kie.workbench.common.screens.datamodeller.events.DataModelerEvent; import org.kie.workbench.common.screens.datamodeller.events.DataObjectFieldChangeEvent; import org.kie.workbench.common.screens.datamodeller.events.DataObjectFieldCreatedEvent; import org.kie.workbench.common.screens.datamodeller.events.DataObjectFieldDeletedEvent; import org.kie.workbench.common.screens.datamodeller.service.DataModelerService; import org.kie.workbench.common.services.datamodeller.core.Annotation; import org.kie.workbench.common.services.datamodeller.core.DataObject; import org.kie.workbench.common.services.datamodeller.core.ObjectProperty; import org.uberfire.ext.editor.commons.client.validation.ValidatorCallback; import org.uberfire.ext.widgets.common.client.common.popups.errors.ErrorPopup; public class DroolsDataObjectEditor extends ObjectEditor { interface DroolsDataObjectEditorUIBinder extends UiBinder<Widget, DroolsDataObjectEditor> { } public static final String NOT_SELECTED = "NOT_SELECTED"; private static final String DEFAULT_LABEL_CLASS = "gwt-Label"; private static final String TEXT_ERROR_CLASS = "text-error"; @UiField ListBox roleSelector; @UiField CheckBox classReactiveSelector; @UiField CheckBox propertyReactiveSelector; @UiField Icon roleHelpIcon; @UiField Icon classReactiveHelpIcon; @UiField Icon propertyReactiveHelpIcon; @UiField Label typeSafeLabel; @UiField Icon typeSafeHelpIcon; @UiField ListBox typeSafeSelector; @UiField Label timestampLabel; @UiField Icon timestampHelpIcon; @UiField ListBox timestampFieldSelector; @UiField Label durationLabel; @UiField ListBox durationFieldSelector; @UiField Icon durationHelpIcon; @UiField Label expiresLabel; @UiField Icon expiresHelpIcon; @UiField TextBox expires; @UiField Label remotableLabel; @UiField Icon remotableHelpIcon; @UiField CheckBox remotableSelector; @Inject private Caller<DataModelerService> modelerService; @Inject private ValidatorService validatorService; private static DroolsDataObjectEditorUIBinder uiBinder = GWT.create( DroolsDataObjectEditorUIBinder.class ); public DroolsDataObjectEditor() { initWidget( uiBinder.createAndBindUi( this ) ); roleHelpIcon.getElement().getStyle().setCursor( Style.Cursor.POINTER ); classReactiveHelpIcon.getElement().getStyle().setCursor( Style.Cursor.POINTER ); propertyReactiveHelpIcon.getElement().getStyle().setCursor( Style.Cursor.POINTER ); typeSafeHelpIcon.getElement().getStyle().setCursor( Style.Cursor.POINTER ); timestampHelpIcon.getElement().getStyle().setCursor( Style.Cursor.POINTER ); durationHelpIcon.getElement().getStyle().setCursor( Style.Cursor.POINTER ); expiresHelpIcon.getElement().getStyle().setCursor( Style.Cursor.POINTER ); remotableHelpIcon.getElement().getStyle().setCursor( Style.Cursor.POINTER ); } @PostConstruct void init() { roleSelector.addChangeHandler( new ChangeHandler() { @Override public void onChange( ChangeEvent event ) { roleChanged( event ); } } ); typeSafeSelector.addChangeHandler( new ChangeHandler() { @Override public void onChange( ChangeEvent event ) { typeSafeChanged( event ); } } ); timestampFieldSelector.addChangeHandler( new ChangeHandler() { @Override public void onChange( ChangeEvent event ) { timestampChanged( event ); } } ); durationFieldSelector.addChangeHandler( new ChangeHandler() { @Override public void onChange( ChangeEvent event ) { durationChanged( event ); } } ); // TODO Change this when necessary (for now hardcoded here) roleSelector.addItem( "", NOT_SELECTED ); roleSelector.addItem( "EVENT", "EVENT" ); roleSelector.setSelectedValue( NOT_SELECTED ); typeSafeSelector.addItem( "", NOT_SELECTED ); typeSafeSelector.addItem( "false", "false" ); typeSafeSelector.addItem( "true", "true" ); timestampFieldSelector.addItem( "", NOT_SELECTED ); durationFieldSelector.addItem( "", NOT_SELECTED ); setReadonly( true ); } @Override public String getName() { return "DROOLS_OBJECT_EDITOR"; } @Override public String getDomainName() { return DroolsDomainEditor.DROOLS_DOMAIN; } public void setReadonly( boolean readonly ) { super.setReadonly( readonly ); boolean value = !readonly; roleSelector.setEnabled( value ); propertyReactiveSelector.setEnabled( value ); classReactiveSelector.setEnabled( value ); typeSafeSelector.setEnabled( value ); expires.setEnabled( value ); durationFieldSelector.setEnabled( value ); timestampFieldSelector.setEnabled( value ); remotableSelector.setEnabled( value ); } protected void loadDataObject( DataObject dataObject ) { clean(); setReadonly( true ); if ( dataObject != null ) { this.dataObject = dataObject; Annotation annotation = dataObject.getAnnotation( DroolsDomainAnnotations.ROLE_ANNOTATION ); if ( annotation != null ) { String value = annotation.getValue( DroolsDomainAnnotations.VALUE_PARAM ) != null ? annotation.getValue( DroolsDomainAnnotations.VALUE_PARAM ).toString() : NOT_SELECTED; roleSelector.setSelectedValue( value ); } annotation = dataObject.getAnnotation( DroolsDomainAnnotations.PROPERTY_REACTIVE_ANNOTATION ); if ( annotation != null ) { propertyReactiveSelector.setValue( Boolean.TRUE ); } annotation = dataObject.getAnnotation( DroolsDomainAnnotations.CLASS_REACTIVE_ANNOTATION ); if ( annotation != null ) { classReactiveSelector.setValue( Boolean.TRUE ); } annotation = dataObject.getAnnotation( DroolsDomainAnnotations.TYPE_SAFE_ANNOTATION ); if ( annotation != null ) { String value = annotation.getValue( DroolsDomainAnnotations.VALUE_PARAM ) != null ? annotation.getValue( DroolsDomainAnnotations.VALUE_PARAM ).toString() : NOT_SELECTED; typeSafeSelector.setSelectedValue( value ); } annotation = dataObject.getAnnotation( DroolsDomainAnnotations.EXPIRES_ANNOTATION ); if ( annotation != null ) { expires.setText( annotation.getValue( DroolsDomainAnnotations.VALUE_PARAM ).toString() ); } annotation = dataObject.getAnnotation( DroolsDomainAnnotations.REMOTABLE_ANNOTATION ); if ( annotation != null ) { remotableSelector.setValue( Boolean.TRUE ); } loadDuration( dataObject ); loadTimestamp( dataObject ); setReadonly( getContext() == null || getContext().isReadonly() ); } } private void loadDuration( DataObject dataObject ) { Annotation annotation; loadDurationSelector( dataObject ); annotation = dataObject.getAnnotation( DroolsDomainAnnotations.DURATION_ANNOTATION ); if ( annotation != null ) { String value = annotation.getValue( DroolsDomainAnnotations.VALUE_PARAM ) != null ? annotation.getValue( DroolsDomainAnnotations.VALUE_PARAM ).toString() : NOT_SELECTED; durationFieldSelector.setSelectedValue( value ); } } private void loadTimestamp( DataObject dataObject ) { Annotation annotation; loadTimestampSelector( dataObject ); annotation = dataObject.getAnnotation( DroolsDomainAnnotations.TIMESTAMP_ANNOTATION ); if ( annotation != null ) { String value = annotation.getValue( DroolsDomainAnnotations.VALUE_PARAM ) != null ? annotation.getValue( DroolsDomainAnnotations.VALUE_PARAM ).toString() : NOT_SELECTED; timestampFieldSelector.setSelectedValue( value ); } } // Event observers private void onDataObjectFieldCreated( @Observes DataObjectFieldCreatedEvent event ) { updateFieldDependentSelectors( event, event.getCurrentDataObject(), event.getCurrentField() ); } private void onDataObjectFieldChange( @Observes DataObjectFieldChangeEvent event ) { updateFieldDependentSelectors( event, event.getCurrentDataObject(), event.getCurrentField() ); } private void onDataObjectFieldDeleted( @Observes DataObjectFieldDeletedEvent event ) { updateFieldDependentSelectors( event, event.getCurrentDataObject(), event.getCurrentField() ); } private void updateFieldDependentSelectors( DataModelerEvent event, DataObject currentDataObject, ObjectProperty currentField ) { if ( event.isFromContext( context != null ? context.getContextId() : null ) && getDataObject() == currentDataObject ) { loadDuration( getDataObject() ); loadTimestamp( getDataObject() ); } } // Event handlers private void roleChanged( final ChangeEvent event ) { if ( getDataObject() != null ) { final String newRole = NOT_SELECTED.equals( roleSelector.getValue() ) ? null: roleSelector.getValue(); commandBuilder.buildDataObjectAnnotationValueChangeCommand( getContext(), getName(), getDataObject(), DroolsDomainAnnotations.ROLE_ANNOTATION, DroolsDomainAnnotations.VALUE_PARAM, newRole, true ).execute(); } } private void typeSafeChanged( final ChangeEvent event ) { if ( getDataObject() != null ) { final String newTypeSafeValue = NOT_SELECTED.equals( typeSafeSelector.getValue() ) ? null : typeSafeSelector.getValue(); commandBuilder.buildDataObjectAnnotationValueChangeCommand( getContext(), getName(), getDataObject(), DroolsDomainAnnotations.TYPE_SAFE_ANNOTATION, DroolsDomainAnnotations.VALUE_PARAM, newTypeSafeValue, true ).execute(); } } private void timestampChanged( final ChangeEvent event ) { if ( getDataObject() != null ) { final String newTimestampValue = NOT_SELECTED.equals( timestampFieldSelector.getValue() ) ? null : timestampFieldSelector.getValue(); commandBuilder.buildDataObjectAnnotationValueChangeCommand( getContext(), getName(), getDataObject(), DroolsDomainAnnotations.TIMESTAMP_ANNOTATION, DroolsDomainAnnotations.VALUE_PARAM, newTimestampValue, true ).execute(); } } private void durationChanged( final ChangeEvent event ) { if ( getDataObject() != null ) { final String newDurationValue = NOT_SELECTED.equals( durationFieldSelector.getValue() ) ? null : durationFieldSelector.getValue(); commandBuilder.buildDataObjectAnnotationValueChangeCommand( getContext(), getName(), getDataObject(), DroolsDomainAnnotations.DURATION_ANNOTATION, DroolsDomainAnnotations.VALUE_PARAM, newDurationValue, true ).execute(); } } @UiHandler( "propertyReactiveSelector" ) void propertyReactiveChanged( final ClickEvent event ) { if ( getDataObject() != null ) { final Boolean isChecked = propertyReactiveSelector.getValue(); commandBuilder.buildDataObjectAddOrRemoveAnnotationCommand( getContext(), getName(), getDataObject(), DroolsDomainAnnotations.PROPERTY_REACTIVE_ANNOTATION, isChecked ).execute(); if ( isChecked ) { commandBuilder.buildDataObjectRemoveAnnotationCommand( getContext(), getName(), getDataObject(), DroolsDomainAnnotations.CLASS_REACTIVE_ANNOTATION ).execute(); classReactiveSelector.setValue( false ); } } } @UiHandler( "classReactiveSelector" ) void classReactiveChanged( final ClickEvent event ) { if ( getDataObject() != null ) { final Boolean isChecked = classReactiveSelector.getValue(); commandBuilder.buildDataObjectAddOrRemoveAnnotationCommand( getContext(), getName(), getDataObject(), DroolsDomainAnnotations.CLASS_REACTIVE_ANNOTATION, isChecked ).execute(); if ( isChecked ) { commandBuilder.buildDataObjectRemoveAnnotationCommand( getContext(), getName(), getDataObject(), DroolsDomainAnnotations.PROPERTY_REACTIVE_ANNOTATION ).execute(); propertyReactiveSelector.setValue( false ); } } } @UiHandler( "expires" ) void expiresChanged( final ValueChangeEvent<String> event ) { if ( getDataObject() != null ) { // Set widgets to error popup for styling purposes etc. expiresLabel.setStyleName( DEFAULT_LABEL_CLASS ); final Command afterCloseCommand = new Command() { @Override public void execute() { expiresLabel.setStyleName( TEXT_ERROR_CLASS ); expires.selectAll(); } }; final String newValue = expires.getText(); // Otherwise validate validatorService.isValidTimerInterval( newValue, new ValidatorCallback() { @Override public void onFailure() { ErrorPopup.showMessage( Constants.INSTANCE.validation_error_invalid_timer_expression( newValue ), null, afterCloseCommand ); } @Override public void onSuccess() { commandBuilder.buildDataObjectAnnotationValueChangeCommand( getContext(), getName(), getDataObject(), DroolsDomainAnnotations.EXPIRES_ANNOTATION, DroolsDomainAnnotations.VALUE_PARAM, DataModelerUtils.nullTrim( newValue ), true ).execute(); } } ); } } @UiHandler( "remotableSelector" ) void remotableChanged( final ClickEvent event ) { if ( getDataObject() != null ) { final Boolean isChecked = remotableSelector.getValue(); commandBuilder.buildDataObjectAddOrRemoveAnnotationCommand( getContext(), getName(), getDataObject(), DroolsDomainAnnotations.REMOTABLE_ANNOTATION, isChecked ).execute(); } } private void loadDurationSelector( DataObject dataObject ) { if ( dataObject == null ) { return; } List<String> types = new ArrayList<String>(); types.add( "short" ); types.add( "int" ); types.add( "long" ); types.add( "java.lang.Short" ); types.add( "java.lang.Integer" ); types.add( "java.lang.Long" ); String defaultValue = null; Annotation annotation = dataObject.getAnnotation( DroolsDomainAnnotations.DURATION_ANNOTATION ); if ( annotation != null ) { defaultValue = AnnotationValueHandler.getStringValue( annotation, DroolsDomainAnnotations.VALUE_PARAM ); } loadPropertySelector( durationFieldSelector, dataObject, types, defaultValue ); } private void loadTimestampSelector( DataObject dataObject ) { if ( dataObject == null ) { return; } List<String> types = new ArrayList<String>(); types.add( "long" ); types.add( "java.lang.Long" ); types.add( "java.util.Date" ); types.add( "java.sql.Timestamp" ); String defaultValue = null; Annotation annotation = dataObject.getAnnotation( DroolsDomainAnnotations.TIMESTAMP_ANNOTATION ); if ( annotation != null ) { defaultValue = AnnotationValueHandler.getStringValue( annotation, DroolsDomainAnnotations.VALUE_PARAM ); } loadPropertySelector( timestampFieldSelector, dataObject, types, defaultValue ); } private void loadPropertySelector( ListBox selector, DataObject dataObject, List<String> types, String defaultValue ) { if ( dataObject == null ) { return; } List<ObjectProperty> properties = DataModelerUtils.filterPropertiesByType( dataObject.getProperties(), types, true ); SortedMap<String, String> propertyNames = new TreeMap<String, String>(); for ( ObjectProperty property : properties ) { propertyNames.put( property.getName(), property.getName() ); } if ( defaultValue != null && !"".equals( defaultValue ) ) { propertyNames.put( defaultValue, defaultValue ); } selector.clear(); selector.addItem( "", NOT_SELECTED ); for ( Map.Entry<String, String> propertyName : propertyNames.entrySet() ) { selector.addItem( propertyName.getKey(), propertyName.getValue() ); } selector.setSelectedValue( NOT_SELECTED ); } public void clean() { roleSelector.setSelectedValue( NOT_SELECTED ); classReactiveSelector.setValue( false ); propertyReactiveSelector.setValue( false ); typeSafeSelector.setSelectedValue( NOT_SELECTED ); expires.setText( null ); durationFieldSelector.setSelectedValue( NOT_SELECTED ); timestampFieldSelector.setSelectedValue( NOT_SELECTED ); remotableSelector.setValue( false ); } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.firefox; import static java.util.Arrays.stream; import static java.util.concurrent.TimeUnit.SECONDS; import static java.util.stream.Collectors.toList; import static org.openqa.selenium.Platform.MAC; import static org.openqa.selenium.Platform.UNIX; import static org.openqa.selenium.Platform.WINDOWS; import com.google.common.collect.ImmutableList; import org.openqa.selenium.Platform; import org.openqa.selenium.WebDriverException; import org.openqa.selenium.os.ExecutableFinder; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; public class FirefoxBinary { /** * Enumerates Firefox channels, according to https://wiki.mozilla.org/RapidRelease */ public enum Channel { ESR("esr"), RELEASE("release"), BETA("beta"), AURORA("aurora"), NIGHTLY("nightly"); private String name; Channel(String name) { this.name = name; } public String toString() { return name; } /** * Gets a channel with the name matching the parameter. * * @param name the channel name * @return the Channel enum value matching the parameter */ public static Channel fromString(String name) { final String lcName = name.toLowerCase(); return stream(Channel.values()) .filter(ch -> ch.name.equals(lcName)) .findFirst().orElseThrow(() -> new WebDriverException("Unrecognized channel: " + name)); } } private final List<String> extraOptions = new ArrayList<>(); private final Executable executable; private long timeout = SECONDS.toMillis(45); public FirefoxBinary() { Executable systemBinary = locateFirefoxBinaryFromSystemProperty(); if (systemBinary != null) { executable = systemBinary; return; } Executable platformBinary = locateFirefoxBinariesFromPlatform().findFirst().orElse(null); if (platformBinary != null) { executable = platformBinary; return; } throw new WebDriverException("Cannot find firefox binary in PATH. " + "Make sure firefox is installed. OS appears to be: " + Platform.getCurrent()); } public FirefoxBinary(Channel channel) { Executable systemBinary = locateFirefoxBinaryFromSystemProperty(); if (systemBinary != null) { if (systemBinary.getChannel() == channel) { executable = systemBinary; return; } else { throw new WebDriverException( "Firefox executable specified by system property " + FirefoxDriver.SystemProperty.BROWSER_BINARY + " does not belong to channel '" + channel + "', it appears to be '" + systemBinary.getChannel() + "'"); } } executable = locateFirefoxBinariesFromPlatform() .filter(e -> e.getChannel() == channel) .findFirst().orElseThrow(() -> new WebDriverException( String.format("Cannot find firefox binary for channel '%s' in PATH", channel))); } public FirefoxBinary(File pathToFirefoxBinary) { executable = new Executable(pathToFirefoxBinary); } public void addCommandLineOptions(String... options) { Collections.addAll(extraOptions, options); } void amendOptions(FirefoxOptions options) { options.addArguments(extraOptions); } public File getFile() { return executable.getFile(); } public String getPath() { return executable.getPath(); } public List<String> getExtraOptions() { return extraOptions; } public long getTimeout() { return timeout; } public void setTimeout(long timeout) { this.timeout = timeout; } @Override public String toString() { return "FirefoxBinary(" + executable.getPath() + ")"; } public String toJson() { return executable.getPath(); } /** * Locates the firefox binary from a system property. Will throw an exception if the binary cannot * be found. */ private static Executable locateFirefoxBinaryFromSystemProperty() { String binaryName = System.getProperty(FirefoxDriver.SystemProperty.BROWSER_BINARY); if (binaryName == null) return null; File binary = new File(binaryName); if (binary.exists() && !binary.isDirectory()) return new Executable(binary); Platform current = Platform.getCurrent(); if (current.is(WINDOWS)) { if (!binaryName.endsWith(".exe")) { binaryName += ".exe"; } } else if (current.is(MAC)) { if (!binaryName.endsWith(".app")) { binaryName += ".app"; } binaryName += "/Contents/MacOS/firefox-bin"; } binary = new File(binaryName); if (binary.exists()) return new Executable(binary); throw new WebDriverException( String.format("'%s' property set, but unable to locate the requested binary: %s", FirefoxDriver.SystemProperty.BROWSER_BINARY, binaryName)); } /** * Locates the firefox binary by platform. */ private static Stream<Executable> locateFirefoxBinariesFromPlatform() { ImmutableList.Builder<Executable> executables = new ImmutableList.Builder<>(); Platform current = Platform.getCurrent(); if (current.is(WINDOWS)) { executables.addAll(Stream.of("Mozilla Firefox\\firefox.exe", "Firefox Developer Edition\\firefox.exe", "Nightly\\firefox.exe") .map(FirefoxBinary::getPathsInProgramFiles) .flatMap(List::stream) .map(File::new).filter(File::exists) .map(Executable::new).collect(toList())); } else if (current.is(MAC)) { // system File binary = new File("/Applications/Firefox.app/Contents/MacOS/firefox-bin"); if (binary.exists()) { executables.add(new Executable(binary)); } // user home binary = new File(System.getProperty("user.home") + binary.getAbsolutePath()); if (binary.exists()) { executables.add(new Executable(binary)); } } else if (current.is(UNIX)) { String systemFirefoxBin = new ExecutableFinder().find("firefox-bin"); if (systemFirefoxBin != null) { executables.add(new Executable(new File(systemFirefoxBin))); } } String systemFirefox = new ExecutableFinder().find("firefox"); if (systemFirefox != null) { Path firefoxPath = new File(systemFirefox).toPath(); if (Files.isSymbolicLink(firefoxPath)) { try { Path realPath = firefoxPath.toRealPath(); File attempt1 = realPath.getParent().resolve("firefox").toFile(); if (attempt1.exists()) { executables.add(new Executable(attempt1)); } else { File attempt2 = realPath.getParent().resolve("firefox-bin").toFile(); if (attempt2.exists()) { executables.add(new Executable(attempt2)); } } } catch (IOException e) { // ignore this path } } else { executables.add(new Executable(new File(systemFirefox))); } } return executables.build().stream(); } private static List<String> getPathsInProgramFiles(final String childPath) { return Stream.of(getProgramFilesPath(), getProgramFiles86Path()) .map(parent -> new File(parent, childPath).getAbsolutePath()) .collect(Collectors.toList()); } /** * Returns the path to the Windows Program Files. On non-English versions, this is not necessarily * "C:\Program Files". * * @return the path to the Windows Program Files */ private static String getProgramFilesPath() { return getEnvVarPath("ProgramFiles", "C:\\Program Files").replace(" (x86)", ""); } private static String getProgramFiles86Path() { return getEnvVarPath("ProgramFiles(x86)", "C:\\Program Files (x86)"); } private static String getEnvVarPath(final String envVar, final String defaultValue) { return getEnvVarIgnoreCase(envVar) .map(File::new).filter(File::exists).map(File::getAbsolutePath) .orElseGet(() -> new File(defaultValue).getAbsolutePath()); } private static Optional<String> getEnvVarIgnoreCase(String var) { return System.getenv().entrySet().stream() .filter(e -> e.getKey().equalsIgnoreCase(var)) .findFirst().map(Map.Entry::getValue); } //------------------------ /** * @deprecated DriverService is responsible for process management */ @Deprecated public void waitFor() { } /** * @deprecated DriverSerrice is responsible for process management */ @Deprecated public void waitFor(long timeout) { } /** * @deprecated DriverService is responsible for process management */ @Deprecated public String getConsoleOutput() { return null; } /** * @deprecated DriverService is responsible for process management */ @Deprecated public void setOutputWatcher(OutputStream stream) { } /** * @deprecated DriverService is responsible for process management */ @Deprecated public void quit() { } }
/* * Copyright 2013 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp.newtypes; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Multimap; import com.google.javascript.rhino.Node; import java.util.Collection; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.regex.Pattern; /** * * @author blickly@google.com (Ben Lickly) * @author dimvar@google.com (Dimitris Vardoulakis) */ public final class NominalType { // In the case of a generic type (rawType.typeParameters non-empty) either: // a) typeMap is empty, this is an uninstantiated generic type (Foo<T>), or // b) typeMap's keys exactly correspond to the type parameters of rawType; // this represents a completely instantiated generic type (Foo<number>). private final ImmutableMap<String, JSType> typeMap; private final RawNominalType rawType; private static final Pattern NUMERIC_PATTERN = Pattern.compile("\\d+"); private NominalType( ImmutableMap<String, JSType> typeMap, RawNominalType rawType) { Preconditions.checkState(typeMap.isEmpty() || typeMap.keySet().containsAll(rawType.typeParameters) && rawType.typeParameters.containsAll(typeMap.keySet())); this.typeMap = typeMap; this.rawType = rawType; } // This should only be called during GlobalTypeInfo. public RawNominalType getRawNominalType() { Preconditions.checkState(!isFinalized()); return this.rawType; } public JSType getInstanceAsJSType() { return (rawType.isGeneric() && !typeMap.isEmpty()) ? JSType.fromObjectType(ObjectType.fromNominalType(this)) : rawType.getInstanceAsJSType(); } ObjectKind getObjectKind() { return rawType.objectKind; } Map<String, JSType> getTypeMap() { return typeMap; } boolean isClassy() { return !isFunction() && !isObject(); } boolean isFunction() { return "Function".equals(rawType.name); } private boolean isObject() { return "Object".equals(rawType.name); } public boolean isStruct() { return rawType.isStruct(); } public boolean isDict() { return rawType.isDict(); } public boolean isGeneric() { return rawType.isGeneric(); } public boolean isUninstantiatedGenericType() { return rawType.isGeneric() && typeMap.isEmpty(); } NominalType instantiateGenerics(List<JSType> types) { Preconditions.checkState(types.size() == rawType.typeParameters.size()); Map<String, JSType> typeMap = new LinkedHashMap<>(); for (int i = 0; i < rawType.typeParameters.size(); i++) { typeMap.put(rawType.typeParameters.get(i), types.get(i)); } return instantiateGenerics(typeMap); } NominalType instantiateGenerics(Map<String, JSType> newTypeMap) { if (newTypeMap.isEmpty()) { return this; } if (!this.rawType.isGeneric()) { return this.rawType.wrappedAsNominal; } ImmutableMap.Builder<String, JSType> builder = ImmutableMap.builder(); ImmutableMap<String, JSType> resultMap; if (!typeMap.isEmpty()) { for (String oldKey : typeMap.keySet()) { builder.put(oldKey, typeMap.get(oldKey).substituteGenerics(newTypeMap)); } resultMap = builder.build(); } else { for (String newKey : rawType.typeParameters) { if (newTypeMap.containsKey(newKey)) { builder.put(newKey, newTypeMap.get(newKey)); } } resultMap = builder.build(); if (resultMap.isEmpty()) { return this; } // This works around a bug in FunctionType, because we can't know where // FunctionType#receiverType is coming from. // If the condition is true, receiverType comes from a method declaration, // and we should not create a new type here. if (resultMap.size() < rawType.typeParameters.size()) { return this; } } return new NominalType(resultMap, this.rawType); } // Methods that delegate to RawNominalType public String getName() { return rawType.name; } public Node getDefsite() { return rawType.defSite; } // Only used for keys in GlobalTypeInfo public RawNominalType getId() { return rawType; } public boolean isClass() { return rawType.isClass(); } public boolean isInterface() { return rawType.isInterface(); } /** True iff it has all properties and the RawNominalType is immutable */ public boolean isFinalized() { return rawType.isFinalized; } public ImmutableSet<String> getAllPropsOfInterface() { return rawType.getAllPropsOfInterface(); } public ImmutableSet<String> getAllPropsOfClass() { return rawType.getAllPropsOfClass(); } public NominalType getInstantiatedSuperclass() { Preconditions.checkState(rawType.isFinalized); if (rawType.superClass == null) { return null; } return rawType.superClass.instantiateGenerics(typeMap); } public JSType getPrototype() { Preconditions.checkState(rawType.isFinalized); return rawType.getCtorPropDeclaredType("prototype") .substituteGenerics(typeMap); } public ImmutableSet<NominalType> getInstantiatedInterfaces() { Preconditions.checkState(rawType.isFinalized); ImmutableSet.Builder<NominalType> result = ImmutableSet.builder(); for (NominalType interf : rawType.interfaces) { result.add(interf.instantiateGenerics(typeMap)); } return result.build(); } Property getProp(String pname) { if (this.rawType.name.equals("Array") && NUMERIC_PATTERN.matcher(pname).matches()) { if (typeMap.isEmpty()) { return Property.make(JSType.UNKNOWN, null); } Preconditions.checkState(typeMap.size() == 1); JSType elmType = Iterables.getOnlyElement(typeMap.values()); return Property.make(elmType, null); } Property p = rawType.getProp(pname); return p == null ? null : p.substituteGenerics(typeMap); } public Node getPropDefsite(String pname) { return getProp(pname).getDefsite(); } public JSType getPropDeclaredType(String pname) { JSType type = rawType.getInstancePropDeclaredType(pname); if (type == null) { return null; } return type.substituteGenerics(typeMap); } public boolean hasConstantProp(String pname) { Property p = rawType.getProp(pname); return p != null && p.isConstant(); } static JSType getConstructorObject(FunctionType ctorFn) { return ctorFn.nominalType.rawType.getConstructorObject(ctorFn); } boolean isSubtypeOf(NominalType other) { RawNominalType thisRaw = this.rawType; if (thisRaw == other.rawType) { return areTypeMapsCompatible(other); } if (other.isInterface()) { // If thisRaw is not finalized, thisRaw.interfaces may be null. for (NominalType i : thisRaw.getInterfaces()) { if (i.instantiateGenerics(this.typeMap).isSubtypeOf(other)) { return true; } } } // Note that other can still be an interface here (implemented by a superclass) return isClass() && thisRaw.superClass != null && thisRaw.superClass.instantiateGenerics(this.typeMap).isSubtypeOf(other); } private boolean areTypeMapsCompatible(NominalType other) { Preconditions.checkState(rawType.equals(other.rawType)); if (this.typeMap.isEmpty()) { return other.instantiationIsUnknownOrIdentity(); } if (other.typeMap.isEmpty()) { return instantiationIsUnknownOrIdentity(); } for (String typeVar : rawType.getTypeParameters()) { Preconditions.checkState(this.typeMap.containsKey(typeVar), "Type variable %s not in the domain: %s", typeVar, this.typeMap.keySet()); Preconditions.checkState(other.typeMap.containsKey(typeVar), "Other (%s) doesn't contain mapping (%s->%s) from this (%s)", other, typeVar, this.typeMap.get(typeVar), this); JSType thisType = this.typeMap.get(typeVar); JSType otherType = other.typeMap.get(typeVar); if (allowCovariantGenerics(this)) { if (!thisType.isSubtypeOf(otherType)) { return false; } } else if (!thisType.equals(otherType) && JSType.unifyUnknowns(thisType, otherType) == null) { return false; } } return true; } private static boolean allowCovariantGenerics(NominalType nt) { // TODO(dimvar): Add Object here when we handle parameterized Object. return nt.rawType.name.equals("Array"); } private boolean instantiationIsUnknownOrIdentity() { if (this.typeMap.isEmpty()) { return true; } for (String typeVar : this.rawType.getTypeParameters()) { Preconditions.checkState(this.typeMap.containsKey(typeVar), "Type variable %s not in the domain: %s", typeVar, this.typeMap.keySet()); JSType t = this.typeMap.get(typeVar); if (!t.isUnknown() && !t.equals(JSType.fromTypeVar(typeVar))) { return false; } } return true; } // A special-case of join static NominalType pickSuperclass(NominalType c1, NominalType c2) { if (c1 == null || c2 == null) { return null; } if (c1.isSubtypeOf(c2)) { return c2; } return c2.isSubtypeOf(c1) ? c1 : null; } // A special-case of meet static NominalType pickSubclass(NominalType c1, NominalType c2) { if (c1 == null) { return c2; } else if (c2 == null) { return c1; } if (c1.isSubtypeOf(c2)) { return c1; } return c2.isSubtypeOf(c1) ? c2 : null; } boolean unifyWithSubtype(NominalType other, List<String> typeParameters, Multimap<String, JSType> typeMultimap) { other = other.findMatchingAncestorWith(this); if (other == null) { return false; } if (!isGeneric()) { // Non-generic nominal types don't contribute to the unification. return true; } // Most of the time, both nominal types are already instantiated when // unifyWith is called. Rarely, when we call a polymorphic function from the // body of a method of a polymorphic class, then other.typeMap is empty. // For now, don't do anything fancy in that case. Preconditions.checkState(!typeMap.isEmpty()); if (other.typeMap.isEmpty()) { return true; } boolean hasUnified = true; for (String typeParam : this.rawType.typeParameters) { JSType fromOtherMap = other.typeMap.get(typeParam); Preconditions.checkNotNull(fromOtherMap, "Type variable %s not found in map %s", typeParam, other.typeMap); hasUnified = hasUnified && this.typeMap.get(typeParam) .unifyWithSubtype(fromOtherMap, typeParameters, typeMultimap); } return hasUnified && (allowCovariantGenerics(this) || isInvariantWith(typeMultimap, other)); } // Returns a type with the same raw type as other, but possibly different type maps. private NominalType findMatchingAncestorWith(NominalType other) { RawNominalType thisRaw = this.rawType; if (thisRaw == other.rawType) { return this; } if (other.isInterface()) { for (NominalType i : thisRaw.interfaces) { NominalType nt = i.instantiateGenerics(this.typeMap).findMatchingAncestorWith(other); if (nt != null) { return nt; } } } // Note that other can still be an interface here (implemented by a superclass) if (isClass() && thisRaw.superClass != null) { return thisRaw.superClass.instantiateGenerics(this.typeMap) .findMatchingAncestorWith(other); } return null; } private boolean isInvariantWith(Multimap<String, JSType> typeMultimap, NominalType other) { Preconditions.checkState(isGeneric()); Preconditions.checkState(this.rawType == other.rawType); Map<String, JSType> newTypeMap = new LinkedHashMap<>(); for (String typeVar : typeMultimap.keySet()) { Collection<JSType> c = typeMultimap.get(typeVar); if (c.size() != 1) { return false; } newTypeMap.put(typeVar, Preconditions.checkNotNull(Iterables.getOnlyElement(c))); } NominalType instantiated = instantiateGenerics(newTypeMap); return Objects.equals(instantiated.typeMap, other.typeMap); } @Override public String toString() { return appendTo(new StringBuilder()).toString(); } StringBuilder appendTo(StringBuilder builder) { builder.append(rawType.name); rawType.appendGenericSuffixTo(builder, typeMap); return builder; } @Override public int hashCode() { return Objects.hash(typeMap, rawType); } @Override public boolean equals(Object other) { if (other == null) { return false; } Preconditions.checkState(other instanceof NominalType); NominalType o = (NominalType) other; return Objects.equals(typeMap, o.typeMap) && rawType.equals(o.rawType); } /** * Represents a class or interface as defined in the code. * If the raw nominal type has an @template, then many nominal types can be * created from it by instantiation. */ public static class RawNominalType extends Namespace { private final String name; // The function node (if any) that defines the type private final Node defSite; // Each instance of the class has these properties by default private PersistentMap<String, Property> classProps = PersistentMap.create(); // The object pointed to by the prototype property of the constructor of // this class has these properties private PersistentMap<String, Property> protoProps = PersistentMap.create(); // For @unrestricted, we are less strict about inexistent-prop warnings than // for @struct. We use this map to remember the names of props added outside // the constructor and the prototype methods. private PersistentMap<String, Property> randomProps = PersistentMap.create(); // The "static" properties of the constructor are stored in the namespace. boolean isFinalized = false; // Consider a generic type A<T> which inherits from a generic type B<T>. // All instantiated A classes, such as A<number>, A<string>, etc, // have the same superclass and interfaces fields, because they have the // same raw type. You need to instantiate these fields to get the correct // type maps, eg, see NominalType#isSubtypeOf. private NominalType superClass = null; private ImmutableSet<NominalType> interfaces = null; private final boolean isInterface; // Used in GlobalTypeInfo to find type mismatches in the inheritance chain. private ImmutableSet<String> allProps = null; // In GlobalTypeInfo, we request (wrapped) RawNominalTypes in various // places. Create them here and cache them to save mem. private final NominalType wrappedAsNominal; private final JSType wrappedAsJSType; private final JSType wrappedAsNullableJSType; // Empty iff this type is not generic private final ImmutableList<String> typeParameters; private final ObjectKind objectKind; private FunctionType ctorFn; private JSType ctorFnWrappedAsJSType; private NominalType builtinFunction; private RawNominalType( Node defSite, String name, ImmutableList<String> typeParameters, boolean isInterface, ObjectKind objectKind) { Preconditions.checkNotNull(objectKind); Preconditions.checkState(defSite == null || defSite.isFunction()); if (typeParameters == null) { typeParameters = ImmutableList.of(); } this.name = name; this.defSite = defSite; this.typeParameters = typeParameters; this.isInterface = isInterface; this.objectKind = objectKind; this.wrappedAsNominal = new NominalType(ImmutableMap.<String, JSType>of(), this); ObjectType objInstance; switch (name) { case "Function": objInstance = ObjectType.fromFunction(FunctionType.TOP_FUNCTION, this.wrappedAsNominal); break; case "Object": // We do this to avoid having two instances of ObjectType that both // represent the top JS object. objInstance = ObjectType.TOP_OBJECT; break; default: objInstance = ObjectType.fromNominalType(this.wrappedAsNominal); } this.wrappedAsJSType = JSType.fromObjectType(objInstance); this.wrappedAsNullableJSType = JSType.join(JSType.NULL, this.wrappedAsJSType); } public static RawNominalType makeUnrestrictedClass( Node defSite, QualifiedName name, ImmutableList<String> typeParameters) { return new RawNominalType( defSite, name.toString(), typeParameters, false, ObjectKind.UNRESTRICTED); } public static RawNominalType makeStructClass( Node defSite, QualifiedName name, ImmutableList<String> typeParameters) { return new RawNominalType( defSite, name.toString(), typeParameters, false, ObjectKind.STRUCT); } public static RawNominalType makeDictClass( Node defSite, QualifiedName name, ImmutableList<String> typeParameters) { return new RawNominalType( defSite, name.toString(), typeParameters, false, ObjectKind.DICT); } public static RawNominalType makeInterface( Node defSite, QualifiedName name, ImmutableList<String> typeParameters) { // interfaces are struct by default return new RawNominalType( defSite, name.toString(), typeParameters, true, ObjectKind.STRUCT); } public String getName() { return name; } public boolean isClass() { return !isInterface; } public boolean isInterface() { return isInterface; } boolean isGeneric() { return !typeParameters.isEmpty(); } public boolean isStruct() { return objectKind.isStruct(); } public boolean isDict() { return objectKind.isDict(); } ImmutableList<String> getTypeParameters() { return typeParameters; } public void setCtorFunction( FunctionType ctorFn, NominalType builtinFunction) { Preconditions.checkState(!isFinalized); this.ctorFn = ctorFn; this.builtinFunction = builtinFunction; } private boolean hasAncestorClass(RawNominalType ancestor) { Preconditions.checkState(ancestor.isClass()); if (this == ancestor) { return true; } else if (this.superClass == null) { return false; } else { return this.superClass.rawType.hasAncestorClass(ancestor); } } /** @return Whether the superclass can be added without creating a cycle. */ public boolean addSuperClass(NominalType superClass) { Preconditions.checkState(!isFinalized); Preconditions.checkState(this.superClass == null); if (superClass.rawType.hasAncestorClass(this)) { return false; } this.superClass = superClass; return true; } private boolean hasAncestorInterface(RawNominalType ancestor) { Preconditions.checkState(ancestor.isInterface); if (this == ancestor) { return true; } else if (this.interfaces == null) { return false; } else { for (NominalType superInter : interfaces) { if (superInter.rawType.hasAncestorInterface(ancestor)) { return true; } } return false; } } /** @return Whether the interface can be added without creating a cycle. */ public boolean addInterfaces(ImmutableSet<NominalType> interfaces) { Preconditions.checkState(!isFinalized); Preconditions.checkState(this.interfaces == null); Preconditions.checkNotNull(interfaces); if (this.isInterface) { for (NominalType interf : interfaces) { if (interf.rawType.hasAncestorInterface(this)) { this.interfaces = ImmutableSet.of(); return false; } } } this.interfaces = interfaces; return true; } public NominalType getSuperClass() { return superClass; } public ImmutableSet<NominalType> getInterfaces() { return this.interfaces == null ? ImmutableSet.<NominalType>of() : this.interfaces; } private Property getOwnProp(String pname) { Property p = classProps.get(pname); if (p != null) { return p; } p = randomProps.get(pname); if (p != null) { return p; } return protoProps.get(pname); } private Property getPropFromClass(String pname) { Preconditions.checkState(!isInterface); Property p = getOwnProp(pname); if (p != null) { return p; } if (superClass != null) { p = superClass.getProp(pname); if (p != null) { return p; } } return null; } private Property getPropFromInterface(String pname) { Preconditions.checkState(isInterface); Property p = getOwnProp(pname); if (p != null) { return p; } if (interfaces != null) { for (NominalType interf : interfaces) { p = interf.getProp(pname); if (p != null) { return p; } } } return null; } private Property getProp(String pname) { if (isInterface) { return getPropFromInterface(pname); } return getPropFromClass(pname); } public boolean mayHaveOwnProp(String pname) { return getOwnProp(pname) != null; } public boolean mayHaveProp(String pname) { return getProp(pname) != null; } public JSType getInstancePropDeclaredType(String pname) { Property p = getProp(pname); if (p == null) { return null; } else if (p.getDeclaredType() == null && superClass != null) { return superClass.getPropDeclaredType(pname); } return p.getDeclaredType(); } public Set<String> getAllOwnProps() { Set<String> ownProps = new LinkedHashSet<>(); ownProps.addAll(classProps.keySet()); ownProps.addAll(protoProps.keySet()); return ownProps; } private ImmutableSet<String> getAllPropsOfInterface() { Preconditions.checkState(isInterface); Preconditions.checkState(isFinalized); if (allProps == null) { ImmutableSet.Builder<String> builder = ImmutableSet.builder(); if (interfaces != null) { for (NominalType interf : interfaces) { builder.addAll(interf.rawType.getAllPropsOfInterface()); } } allProps = builder.addAll(protoProps.keySet()).build(); } return allProps; } private ImmutableSet<String> getAllPropsOfClass() { Preconditions.checkState(!isInterface); Preconditions.checkState(isFinalized); if (allProps == null) { ImmutableSet.Builder<String> builder = ImmutableSet.builder(); if (superClass != null) { builder.addAll(superClass.rawType.getAllPropsOfClass()); } allProps = builder.addAll(classProps.keySet()).addAll(protoProps.keySet()).build(); } return allProps; } public void addPropertyWhichMayNotBeOnAllInstances(String pname, JSType type) { Preconditions.checkState(!isFinalized); if (this.classProps.containsKey(pname) || this.protoProps.containsKey(pname)) { return; } if (this.objectKind == ObjectKind.UNRESTRICTED) { this.randomProps = this.randomProps.with( pname, Property.make(type == null ? JSType.UNKNOWN : type, type)); } } //////////// Class Properties /** Add a new non-optional declared property to instances of this class */ public void addClassProperty(String pname, Node defSite, JSType type, boolean isConstant) { Preconditions.checkState(!isFinalized); if (type == null && isConstant) { type = JSType.UNKNOWN; } this.classProps = this.classProps.with(pname, isConstant ? Property.makeConstant(defSite, type, type) : Property.makeWithDefsite(defSite, type, type)); // Upgrade any proto props to declared, if present if (this.protoProps.containsKey(pname)) { addProtoProperty(pname, defSite, type, isConstant); } if (this.randomProps.containsKey(pname)) { this.randomProps = this.randomProps.without(pname); } } /** Add a new undeclared property to instances of this class */ public void addUndeclaredClassProperty(String pname, Node defSite) { Preconditions.checkState(!isFinalized); // Only do so if there isn't a declared prop already. if (mayHaveProp(pname)) { return; } classProps = classProps.with(pname, Property.makeWithDefsite(defSite, JSType.UNKNOWN, null)); } //////////// Prototype Properties /** Add a new non-optional declared prototype property to this class */ public void addProtoProperty(String pname, Node defSite, JSType type, boolean isConstant) { Preconditions.checkState(!isFinalized); if (type == null && isConstant) { type = JSType.UNKNOWN; } if (this.classProps.containsKey(pname) && this.classProps.get(pname).getDeclaredType() == null) { this.classProps = this.classProps.without(pname); } if (this.randomProps.containsKey(pname)) { this.randomProps = this.randomProps.without(pname); } this.protoProps = this.protoProps.with(pname, isConstant ? Property.makeConstant(defSite, type, type) : Property.makeWithDefsite(defSite, type, type)); } /** Add a new undeclared prototype property to this class */ public void addUndeclaredProtoProperty(String pname, Node defSite) { Preconditions.checkState(!isFinalized); if (!this.protoProps.containsKey(pname) || this.protoProps.get(pname).getDeclaredType() == null) { this.protoProps = this.protoProps.with(pname, Property.makeWithDefsite(defSite, JSType.UNKNOWN, null)); if (this.randomProps.containsKey(pname)) { this.randomProps = this.randomProps.without(pname); } } } // Returns the object referred to by the prototype property of the // constructor of this class. private JSType createProtoObject() { return JSType.fromObjectType(ObjectType.makeObjectType( superClass, protoProps, null, false, ObjectKind.UNRESTRICTED)); } //////////// Constructor Properties public boolean hasCtorProp(String pname) { return super.hasProp(pname); } /** Add a new non-optional declared property to this class's constructor */ public void addCtorProperty(String pname, Node defSite, JSType type, boolean isConstant) { Preconditions.checkState(!isFinalized); super.addProperty(pname, defSite, type, isConstant); } /** Add a new undeclared property to this class's constructor */ public void addUndeclaredCtorProperty(String pname, Node defSite) { Preconditions.checkState(!isFinalized); super.addUndeclaredProperty(pname, defSite, JSType.UNKNOWN, false); } public JSType getCtorPropDeclaredType(String pname) { return super.getPropDeclaredType(pname); } // Returns the (function) object referred to by the constructor of this class. // TODO(dimvar): this function shouldn't take any arguments; it should // construct and cache the result based on the fields. // But currently a couple of unit tests break because of "structural" // constructors with a different number of arguments. // For those, we should just be creating a basic function type, not be // adding all the static properties. private JSType getConstructorObject(FunctionType ctorFn) { Preconditions.checkState(isFinalized); if (this.ctorFn != ctorFn || this.ctorFnWrappedAsJSType == null) { JSType result = withNamedTypes(ObjectType.makeObjectType( this.builtinFunction, otherProps, ctorFn, ctorFn.isLoose(), ObjectKind.UNRESTRICTED)); if (this.ctorFn == ctorFn) { this.ctorFnWrappedAsJSType = result; } return result; } return this.ctorFnWrappedAsJSType; } private StringBuilder appendGenericSuffixTo( StringBuilder builder, Map<String, JSType> typeMap) { Preconditions.checkState(typeMap.isEmpty() || typeMap.keySet().containsAll(typeParameters)); if (typeParameters.isEmpty()) { return builder; } boolean firstIteration = true; builder.append("<"); for (String typeParam : typeParameters) { if (firstIteration) { firstIteration = false; } else { builder.append(','); } JSType concrete = typeMap.get(typeParam); if (concrete != null) { concrete.appendTo(builder); } else { builder.append(typeParam); } } builder.append('>'); return builder; } // If we try to mutate the class after the AST-preparation phase, error. public RawNominalType finalizeNominalType() { Preconditions.checkState(this.ctorFn != null); // System.out.println("Class " + name + // " created with class properties: " + classProps + // " and prototype properties: " + protoProps); if (this.interfaces == null) { this.interfaces = ImmutableSet.of(); } addCtorProperty("prototype", null, createProtoObject(), false); this.isFinalized = true; return this; } @Override public String toString() { StringBuilder builder = new StringBuilder(name); appendGenericSuffixTo(builder, ImmutableMap.<String, JSType>of()); return builder.toString(); } @Override public JSType toJSType() { Preconditions.checkState(this.isFinalized); return getConstructorObject(this.ctorFn); } public NominalType getAsNominalType() { return wrappedAsNominal; } // Don't confuse with the toJSType method, inherited from Namespace. // The namespace is represented by the constructor, so that method wraps the // constructor in a JSType, and this method wraps the instance. public JSType getInstanceAsJSType() { return wrappedAsJSType; } public JSType getInstanceAsNullableJSType() { return wrappedAsNullableJSType; } // equals and hashCode default to reference equality, which is what we want } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.redshift.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * <p/> */ public class CreateHsmClientCertificateRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The identifier to be assigned to the new HSM client certificate that the * cluster will use to connect to the HSM to use the database encryption * keys. * </p> */ private String hsmClientCertificateIdentifier; /** * <p> * A list of tag instances. * </p> */ private com.amazonaws.internal.SdkInternalList<Tag> tags; /** * <p> * The identifier to be assigned to the new HSM client certificate that the * cluster will use to connect to the HSM to use the database encryption * keys. * </p> * * @param hsmClientCertificateIdentifier * The identifier to be assigned to the new HSM client certificate * that the cluster will use to connect to the HSM to use the * database encryption keys. */ public void setHsmClientCertificateIdentifier( String hsmClientCertificateIdentifier) { this.hsmClientCertificateIdentifier = hsmClientCertificateIdentifier; } /** * <p> * The identifier to be assigned to the new HSM client certificate that the * cluster will use to connect to the HSM to use the database encryption * keys. * </p> * * @return The identifier to be assigned to the new HSM client certificate * that the cluster will use to connect to the HSM to use the * database encryption keys. */ public String getHsmClientCertificateIdentifier() { return this.hsmClientCertificateIdentifier; } /** * <p> * The identifier to be assigned to the new HSM client certificate that the * cluster will use to connect to the HSM to use the database encryption * keys. * </p> * * @param hsmClientCertificateIdentifier * The identifier to be assigned to the new HSM client certificate * that the cluster will use to connect to the HSM to use the * database encryption keys. * @return Returns a reference to this object so that method calls can be * chained together. */ public CreateHsmClientCertificateRequest withHsmClientCertificateIdentifier( String hsmClientCertificateIdentifier) { setHsmClientCertificateIdentifier(hsmClientCertificateIdentifier); return this; } /** * <p> * A list of tag instances. * </p> * * @return A list of tag instances. */ public java.util.List<Tag> getTags() { if (tags == null) { tags = new com.amazonaws.internal.SdkInternalList<Tag>(); } return tags; } /** * <p> * A list of tag instances. * </p> * * @param tags * A list of tag instances. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags); } /** * <p> * A list of tag instances. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setTags(java.util.Collection)} or * {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * A list of tag instances. * @return Returns a reference to this object so that method calls can be * chained together. */ public CreateHsmClientCertificateRequest withTags(Tag... tags) { if (this.tags == null) { setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * A list of tag instances. * </p> * * @param tags * A list of tag instances. * @return Returns a reference to this object so that method calls can be * chained together. */ public CreateHsmClientCertificateRequest withTags( java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getHsmClientCertificateIdentifier() != null) sb.append("HsmClientCertificateIdentifier: " + getHsmClientCertificateIdentifier() + ","); if (getTags() != null) sb.append("Tags: " + getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateHsmClientCertificateRequest == false) return false; CreateHsmClientCertificateRequest other = (CreateHsmClientCertificateRequest) obj; if (other.getHsmClientCertificateIdentifier() == null ^ this.getHsmClientCertificateIdentifier() == null) return false; if (other.getHsmClientCertificateIdentifier() != null && other.getHsmClientCertificateIdentifier().equals( this.getHsmClientCertificateIdentifier()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getHsmClientCertificateIdentifier() == null) ? 0 : getHsmClientCertificateIdentifier().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public CreateHsmClientCertificateRequest clone() { return (CreateHsmClientCertificateRequest) super.clone(); } }
package com.github.dreamhead.moco; import com.google.common.net.HttpHeaders; import org.apache.http.Header; import org.apache.http.HttpVersion; import org.apache.http.ProtocolVersion; import org.apache.http.client.HttpResponseException; import org.apache.http.client.fluent.Request; import org.junit.Test; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.util.concurrent.TimeUnit; import static com.github.dreamhead.moco.HttpProtocolVersion.VERSION_1_0; import static com.github.dreamhead.moco.Moco.*; import static com.github.dreamhead.moco.Runner.running; import static com.github.dreamhead.moco.helper.RemoteTestUtils.remoteUrl; import static com.github.dreamhead.moco.helper.RemoteTestUtils.root; import static com.google.common.collect.ImmutableMultimap.of; import static com.google.common.io.Files.toByteArray; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.greaterThan; import static org.junit.Assert.assertThat; public class MocoTest extends AbstractMocoHttpTest { @Test public void should_return_expected_response() throws Exception { server.response("foo"); running(server, new Runnable() { @Override public void run() throws Exception { assertThat(helper.get(root()), is("foo")); } }); } @Test public void should_return_expected_response_with_text_api() throws Exception { server.response(text("foo")); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(root()), is("foo")); } }); } @Test public void should_return_expected_response_with_content_api() throws Exception { server.response(with("foo")); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(root()), is("foo")); } }); } @Test public void should_return_expected_response_from_file() throws Exception { server.response(file("src/test/resources/foo.response")); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(root()), is("foo.response")); } }); } @Test public void should_return_expected_response_from_file_with_charset() throws Exception { server.response(file("src/test/resources/gbk.response", Charset.forName("GBK"))); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.getAsBytes(root()), is(toByteArray(new File("src/test/resources/gbk.response")))); } }); } @Test public void should_return_expected_response_from_path_resource() throws Exception { server.response(pathResource("foo.response")); running(server, new Runnable() { @Override public void run() throws Exception { assertThat(helper.get(root()), is("foo.response")); } }); } @Test public void should_return_expected_response_from_path_resource_with_charset() throws Exception { server.response(pathResource("gbk.response", Charset.forName("GBK"))); running(server, new Runnable() { @Override public void run() throws Exception { assertThat(helper.getAsBytes(root()), is(toByteArray(new File("src/test/resources/gbk.response")))); } }); } @Test public void should_return_expected_response_based_on_path_resource() throws Exception { server.request(by(pathResource("foo.request"))).response("foo"); running(server, new Runnable() { @Override public void run() throws Exception { InputStream stream = this.getClass().getClassLoader().getResourceAsStream("foo.request"); assertThat(helper.postStream(root(), stream), is("foo")); } }); } @Test(expected = HttpResponseException.class) public void should_throw_exception_for_unknown_request() throws Exception { running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(root()), is("bar")); } }); } @Test public void should_return_expected_response_based_on_specified_request() throws Exception { server.request(by("foo")).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.postContent(root(), "foo"), is("bar")); } }); } @Test public void should_return_expected_response_based_on_specified_request_with_text_api() throws Exception { server.request(by(text("foo"))).response(text("bar")); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.postContent(root(), "foo"), is("bar")); } }); } @Test public void should_return_expected_response_based_on_specified_uri() throws Exception { server.request(by(uri("/foo"))).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(remoteUrl("/foo")), is("bar")); } }); } @Test public void should_match_request_based_on_multiple_matchers() throws Exception { server.request(and(by("foo"), by(uri("/foo")))).response(text("bar")); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.postContent(remoteUrl("/foo"), "foo"), is("bar")); } }); } @Test(expected = HttpResponseException.class) public void should_throw_exception_even_if_match_one_of_conditions() throws Exception { server.request(and(by("foo"), by(uri("/foo")))).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { helper.get(remoteUrl("/foo")); } }); } @Test public void should_match_request_based_on_either_matcher() throws Exception { server.request(or(by("foo"), by(uri("/foo")))).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(remoteUrl("/foo")), is("bar")); assertThat(helper.postContent(remoteUrl("/foo"), "foo"), is("bar")); } }); } @Test public void should_match_request_based_on_not_matcher() throws Exception { server.request(not(by(uri("/foo")))).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(remoteUrl("/bar")), is("bar")); } }); } @Test public void should_match_request_based_on_simplified_either_matcher() throws Exception { server.request(by("foo"), by(uri("/foo"))).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(remoteUrl("/foo")), is("bar")); assertThat(helper.postContent(root(), "foo"), is("bar")); } }); } @Test public void should_match_get_method_by_method_api() throws Exception { server.request(and(by(uri("/foo")), by(method("get")))).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(remoteUrl("/foo")), is("bar")); } }); } @Test(expected = HttpResponseException.class) public void should_not_response_for_get_while_http_method_is_not_get() throws Exception { server.get(by(uri("/foo"))).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { helper.postContent(remoteUrl("/foo"), ""); } }); } @Test public void should_match_put_method_via_api() throws Exception { server.request(and(by(uri("/foo")), by(method("put")))).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { String response = Request.Put(remoteUrl("/foo")).execute().returnContent().toString(); assertThat(response, is("bar")); } }); } @Test public void should_match_delete_method_via_api() throws Exception { server.request(and(by(uri("/foo")), by(method("delete")))).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { String response = Request.Delete(remoteUrl("/foo")).execute().returnContent().toString(); assertThat(response, is("bar")); } }); } @Test(expected = HttpResponseException.class) public void should_not_response_for_post_while_http_method_is_not_post() throws Exception { server.post(by(uri("/foo"))).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { helper.get(remoteUrl("/foo")); } }); } @Test public void should_return_content_one_by_one() throws Exception { server.request(by(uri("/foo"))).response(seq("bar", "blah")); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(remoteUrl("/foo")), is("bar")); assertThat(helper.get(remoteUrl("/foo")), is("blah")); assertThat(helper.get(remoteUrl("/foo")), is("blah")); } }); } @Test public void should_return_content_one_by_one_with_text_api() throws Exception { server.request(by(uri("/foo"))).response(seq(text("bar"), text("blah"))); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(remoteUrl("/foo")), is("bar")); assertThat(helper.get(remoteUrl("/foo")), is("blah")); assertThat(helper.get(remoteUrl("/foo")), is("blah")); } }); } @Test public void should_return_response_one_by_one() throws Exception { server.request(by(uri("/foo"))).response(seq(status(302), status(302), status(200))); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.getForStatus(remoteUrl("/foo")), is(302)); assertThat(helper.getForStatus(remoteUrl("/foo")), is(302)); assertThat(helper.getForStatus(remoteUrl("/foo")), is(200)); } }); } @Test public void should_match() throws Exception { server.request(match(uri("/\\w*/foo"))).response("bar"); running(server, new Runnable() { @Override public void run() throws Exception { assertThat(helper.get(remoteUrl("/bar/foo")), is("bar")); assertThat(helper.get(remoteUrl("/blah/foo")), is("bar")); } }); } @Test public void should_match_header() throws Exception { server.request(match(header("foo"), "bar|blah")).response("header"); running(server, new Runnable() { @Override public void run() throws Exception { assertThat(helper.getWithHeader(root(), of("foo", "bar")), is("header")); assertThat(helper.getWithHeader(root(), of("foo", "blah")), is("header")); } }); } @Test public void should_exist_header() throws Exception { server.request(exist(header("foo"))).response("header"); running(server, new Runnable() { @Override public void run() throws Exception { assertThat(helper.getWithHeader(root(), of("foo", "bar")), is("header")); assertThat(helper.getWithHeader(root(), of("foo", "blah")), is("header")); } }); } @Test public void should_starts_with() throws Exception { server.request(startsWith(uri("/foo"))).response("bar"); running(server, new Runnable() { @Override public void run() throws Exception { assertThat(helper.get(remoteUrl("/foo/a")), is("bar")); assertThat(helper.get(remoteUrl("/foo/b")), is("bar")); } }); } @Test public void should_starts_with_for_resource() throws Exception { server.request(startsWith(header("foo"), "bar")).response("bar"); running(server, new Runnable() { @Override public void run() throws Exception { assertThat(helper.getWithHeader(root(), of("foo", "barA")), is("bar")); assertThat(helper.getWithHeader(root(), of("foo", "barB")), is("bar")); } }); } @Test public void should_ends_with() throws Exception { server.request(endsWith(uri("foo"))).response("bar"); running(server, new Runnable() { @Override public void run() throws Exception { assertThat(helper.get(remoteUrl("/a/foo")), is("bar")); assertThat(helper.get(remoteUrl("/b/foo")), is("bar")); } }); } @Test public void should_ends_with_for_resource() throws Exception { server.request(endsWith(header("foo"), "bar")).response("bar"); running(server, new Runnable() { @Override public void run() throws Exception { assertThat(helper.getWithHeader(root(), of("foo", "Abar")), is("bar")); assertThat(helper.getWithHeader(root(), of("foo", "Bbar")), is("bar")); } }); } @Test public void should_contain() throws Exception { server.request(contain(uri("foo"))).response("bar"); running(server, new Runnable() { @Override public void run() throws Exception { assertThat(helper.get(remoteUrl("/a/foo")), is("bar")); assertThat(helper.get(remoteUrl("/foo/a")), is("bar")); } }); } @Test public void should_contain_for_resource() throws Exception { server.request(contain(header("foo"), "bar")).response("bar"); running(server, new Runnable() { @Override public void run() throws Exception { assertThat(helper.getWithHeader(root(), of("foo", "Abar")), is("bar")); assertThat(helper.getWithHeader(root(), of("foo", "barA")), is("bar")); } }); } @Test public void should_eq_header() throws Exception { server.request(eq(header("foo"), "bar")).response("blah"); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.getWithHeader(root(), of("foo", "bar")), is("blah")); } }); } @Test(expected = HttpResponseException.class) public void should_throw_exception_without_specified_header() throws Exception { server.request(eq(header("foo"), "bar")).response("blah"); running(server, new Runnable() { @Override public void run() throws IOException { helper.get(remoteUrl("/foo")); } }); } @Test public void should_return_expected_response_for_multiple_specified_query() throws Exception { server.request(and(by(uri("/foo")), eq(query("param"), "blah"))).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(remoteUrl("/foo?param=multiple&param=blah")), is("bar")); } }); } @Test public void should_return_expected_response_for_specified_query() throws Exception { server.request(and(by(uri("/foo")), eq(query("param"), "blah"))).response("bar"); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.get(remoteUrl("/foo?param=blah")), is("bar")); } }); } @Test public void should_match_version() throws Exception { server.request(by(version(VERSION_1_0))).response("foo"); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.getWithVersion(root(), HttpVersion.HTTP_1_0), is("foo")); } }); } @Test public void should_return_expected_version() throws Exception { server.response(version(VERSION_1_0)); running(server, new Runnable() { @Override public void run() throws IOException { ProtocolVersion version = Request.Get(root()).execute().returnResponse().getProtocolVersion(); assertThat(version.getMajor(), is(1)); assertThat(version.getMinor(), is(0)); } }); } @Test public void should_return_excepted_version_with_version_api() throws Exception { server.response(version(VERSION_1_0)); running(server, new Runnable() { @Override public void run() throws IOException { ProtocolVersion version = Request.Get(root()).execute().returnResponse().getProtocolVersion(); assertThat(version.getMajor(), is(1)); assertThat(version.getMinor(), is(0)); } }); } @Test public void should_return_expected_status_code() throws Exception { server.response(status(200)); running(server, new Runnable() { @Override public void run() throws IOException { assertThat(helper.getForStatus(root()), is(200)); } }); } @Test public void should_return_expected_header() throws Exception { server.response(header(HttpHeaders.CONTENT_TYPE, "application/json")); running(server, new Runnable() { @Override public void run() throws IOException { String value = Request.Get(root()).execute().returnResponse().getFirstHeader(HttpHeaders.CONTENT_TYPE).getValue(); assertThat(value, is("application/json")); } }); } @Test public void should_return_multiple_expected_header() throws Exception { server.response(header(HttpHeaders.CONTENT_TYPE, "application/json"), header("foo", "bar")); running(server, new Runnable() { @Override public void run() throws IOException { String json = Request.Get(root()).execute().returnResponse().getFirstHeader(HttpHeaders.CONTENT_TYPE).getValue(); assertThat(json, is("application/json")); String bar = Request.Get(root()).execute().returnResponse().getFirstHeader("foo").getValue(); assertThat(bar, is("bar")); } }); } @Test public void should_wait_for_awhile() throws Exception { final long latency = 1000; final long delta = 200; server.response(latency(latency, TimeUnit.MILLISECONDS)); running(server, new Runnable() { @Override public void run() throws IOException { long start = System.currentTimeMillis(); helper.get(root()); int code = helper.getForStatus(root()); long stop = System.currentTimeMillis(); long gap = stop - start + delta; assertThat(gap, greaterThan(latency)); assertThat(code, is(200)); } }); } @Test public void should_wait_for_awhile_with_time_unit() throws Exception { final long delta = 200; server.response(latency(1, TimeUnit.SECONDS)); running(server, new Runnable() { @Override public void run() throws IOException { long start = System.currentTimeMillis(); helper.get(root()); int code = helper.getForStatus(root()); long stop = System.currentTimeMillis(); long gap = stop - start + delta; assertThat(gap, greaterThan(TimeUnit.SECONDS.toMillis(1))); assertThat(code, is(200)); } }); } @Test public void should_return_same_http_version_without_specified_version() throws Exception { server.response("foobar"); running(server, new Runnable() { @Override public void run() throws IOException { ProtocolVersion version10 = Request.Get(root()).version(HttpVersion.HTTP_1_0).execute().returnResponse().getProtocolVersion(); assertThat(version10.getMajor(), is(1)); assertThat(version10.getMinor(), is(0)); ProtocolVersion version11 = Request.Get(root()).version(HttpVersion.HTTP_1_1).execute().returnResponse().getProtocolVersion(); assertThat(version11.getMajor(), is(1)); assertThat(version11.getMinor(), is(1)); } }); } @Test public void should_return_same_http_version_without_specified_version_for_error_response() throws Exception { running(server, new Runnable() { @Override public void run() throws IOException { ProtocolVersion version10 = Request.Get(root()).version(HttpVersion.HTTP_1_0).execute().returnResponse().getProtocolVersion(); assertThat(version10.getMajor(), is(1)); assertThat(version10.getMinor(), is(0)); ProtocolVersion version11 = Request.Get(root()).version(HttpVersion.HTTP_1_1).execute().returnResponse().getProtocolVersion(); assertThat(version11.getMajor(), is(1)); assertThat(version11.getMinor(), is(1)); } }); } @Test public void should_return_default_content_type() throws Exception { server.response(with("foo")); running(server, new Runnable() { @Override public void run() throws Exception { Header header = Request.Get(root()).execute().returnResponse().getFirstHeader(HttpHeaders.CONTENT_TYPE); assertThat(header.getValue(), is("text/plain; charset=utf-8")); } }); } @Test public void should_return_specified_content_type() throws Exception { server.response(with("foo"), header(HttpHeaders.CONTENT_TYPE, "text/html")); running(server, new Runnable() { @Override public void run() throws Exception { Header header = Request.Get(root()).execute().returnResponse().getFirstHeader(HttpHeaders.CONTENT_TYPE); assertThat(header.getValue(), is("text/html")); } }); } @Test public void should_return_specified_content_type_no_matter_order() throws Exception { server.response(header(HttpHeaders.CONTENT_TYPE, "text/html"), with("foo")); running(server, new Runnable() { @Override public void run() throws Exception { Header header = Request.Get(root()).execute().returnResponse().getFirstHeader(HttpHeaders.CONTENT_TYPE); assertThat(header.getValue(), is("text/html")); } }); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.search.Filter; import org.apache.lucene.search.NumericRangeFilter; import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.codec.docvaluesformat.DocValuesFormatProvider; import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.search.NumericRangeFieldDataFilter; import org.elasticsearch.index.similarity.SimilarityProvider; import java.io.IOException; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeByteValue; import static org.elasticsearch.index.mapper.MapperBuilders.byteField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; /** * */ public class ByteFieldMapper extends NumberFieldMapper<Byte> { public static final String CONTENT_TYPE = "byte"; public static class Defaults extends NumberFieldMapper.Defaults { public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE); static { FIELD_TYPE.freeze(); } public static final Byte NULL_VALUE = null; } public static class Builder extends NumberFieldMapper.Builder<Builder, ByteFieldMapper> { protected Byte nullValue = Defaults.NULL_VALUE; public Builder(String name) { super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_8_BIT); builder = this; } public Builder nullValue(byte nullValue) { this.nullValue = nullValue; return this; } @Override public ByteFieldMapper build(BuilderContext context) { fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f); ByteFieldMapper fieldMapper = new ByteFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, ignoreMalformed(context), coerce(context), postingsProvider, docValuesProvider, similarity, normsLoading, fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { ByteFieldMapper.Builder builder = byteField(name); parseNumberField(builder, name, node, parserContext); for (Map.Entry<String, Object> entry : node.entrySet()) { String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); if (propName.equals("null_value")) { builder.nullValue(nodeByteValue(propNode)); } } return builder; } } private Byte nullValue; private String nullValueAsString; protected ByteFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues, Byte nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, PostingsFormatProvider postingsProvider, DocValuesFormatProvider docValuesProvider, SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce, new NamedAnalyzer("_byte/" + precisionStep, new NumericIntegerAnalyzer(precisionStep)), new NamedAnalyzer("_byte/max", new NumericIntegerAnalyzer(Integer.MAX_VALUE)), postingsProvider, docValuesProvider, similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo); this.nullValue = nullValue; this.nullValueAsString = nullValue == null ? null : nullValue.toString(); } @Override public FieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @Override public FieldDataType defaultFieldDataType() { return new FieldDataType("byte"); } @Override protected int maxPrecisionStep() { return 32; } @Override public Byte value(Object value) { if (value == null) { return null; } if (value instanceof Number) { return ((Number) value).byteValue(); } if (value instanceof BytesRef) { return ((BytesRef) value).bytes[((BytesRef) value).offset]; } return Byte.parseByte(value.toString()); } @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } private byte parseValue(Object value) { if (value instanceof Number) { return ((Number) value).byteValue(); } if (value instanceof BytesRef) { return Byte.parseByte(((BytesRef) value).utf8ToString()); } return Byte.parseByte(value.toString()); } private int parseValueAsInt(Object value) { return parseValue(value); } @Override public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { byte iValue = Byte.parseByte(value); byte iSim = fuzziness.asByte(); return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, iValue - iSim, iValue + iSim, true, true); } @Override public Query termQuery(Object value, @Nullable QueryParseContext context) { int iValue = parseValue(value); return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, iValue, iValue, true, true); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { return NumericRangeQuery.newIntRange(names.indexName(), precisionStep, lowerTerm == null ? null : parseValueAsInt(lowerTerm), upperTerm == null ? null : parseValueAsInt(upperTerm), includeLower, includeUpper); } @Override public Filter termFilter(Object value, @Nullable QueryParseContext context) { int iValue = parseValueAsInt(value); return NumericRangeFilter.newIntRange(names.indexName(), precisionStep, iValue, iValue, true, true); } @Override public Filter rangeFilter(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { return NumericRangeFilter.newIntRange(names.indexName(), precisionStep, lowerTerm == null ? null : parseValueAsInt(lowerTerm), upperTerm == null ? null : parseValueAsInt(upperTerm), includeLower, includeUpper); } @Override public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { return NumericRangeFieldDataFilter.newByteRange((IndexNumericFieldData) parseContext.getForField(this), lowerTerm == null ? null : parseValue(lowerTerm), upperTerm == null ? null : parseValue(upperTerm), includeLower, includeUpper); } @Override public Filter nullValueFilter() { if (nullValue == null) { return null; } return NumericRangeFilter.newIntRange(names.indexName(), precisionStep, nullValue.intValue(), nullValue.intValue(), true, true); } @Override protected boolean customBoost() { return true; } @Override protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException { byte value; float boost = this.boost; if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { if (nullValue == null) { return; } value = nullValue; } else if (externalValue instanceof String) { String sExternalValue = (String) externalValue; if (sExternalValue.length() == 0) { if (nullValue == null) { return; } value = nullValue; } else { value = Byte.parseByte(sExternalValue); } } else { value = ((Number) externalValue).byteValue(); } if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(names.fullName(), Byte.toString(value), boost); } } else { XContentParser parser = context.parser(); if (parser.currentToken() == XContentParser.Token.VALUE_NULL || (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { if (nullValue == null) { return; } value = nullValue; if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) { context.allEntries().addText(names.fullName(), nullValueAsString, boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; String currentFieldName = null; Byte objValue = nullValue; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { objValue = (byte) parser.shortValue(coerce.value()); } } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { throw new ElasticsearchIllegalArgumentException("unknown property [" + currentFieldName + "]"); } } } if (objValue == null) { // no value return; } value = objValue; } else { value = (byte) parser.shortValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(names.fullName(), parser.text(), boost); } } } if (fieldType.indexed() || fieldType.stored()) { CustomByteNumericField field = new CustomByteNumericField(this, value, fieldType); field.setBoost(boost); fields.add(field); } if (hasDocValues()) { addDocValue(context, fields, value); } } @Override protected String contentType() { return CONTENT_TYPE; } @Override public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException { super.merge(mergeWith, mergeContext); if (!this.getClass().equals(mergeWith.getClass())) { return; } if (!mergeContext.mergeFlags().simulate()) { this.nullValue = ((ByteFieldMapper) mergeWith).nullValue; this.nullValueAsString = ((ByteFieldMapper) mergeWith).nullValueAsString; } } @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_8_BIT) { builder.field("precision_step", precisionStep); } if (includeDefaults || nullValue != null) { builder.field("null_value", nullValue); } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } else if (includeDefaults) { builder.field("include_in_all", false); } } public static class CustomByteNumericField extends CustomNumericField { private final byte number; private final NumberFieldMapper mapper; public CustomByteNumericField(NumberFieldMapper mapper, byte number, FieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; } @Override public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { if (fieldType().indexed()) { return mapper.popCachedStream().setIntValue(number); } return null; } @Override public String numericAsString() { return Byte.toString(number); } } }
/* * File : CoAuthorAction.java.java * Date : 29.03.2010 * * * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright 2008 The KiWi Project. All rights reserved. * http://www.kiwi-project.eu * * The contents of this file are subject to the terms of either the GNU * General Public License Version 2 only ("GPL") or the Common * Development and Distribution License("CDDL") (collectively, the * "License"). You may not use this file except in compliance with the * License. You can obtain a copy of the License at * http://www.netbeans.org/cddl-gplv2.html * or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the * specific language governing permissions and limitations under the * License. When distributing the software, include this License Header * Notice in each file and include the License file at * nbbuild/licenses/CDDL-GPL-2-CP. KiWi designates this * particular file as subject to the "Classpath" exception as provided * by Sun in the GPL Version 2 section of the License file that * accompanied this code. If applicable, add the following below the * License Header, with the fields enclosed by brackets [] replaced by * your own identifying information: * "Portions Copyrighted [year] [name of copyright owner]" * * If you wish your version of this file to be governed by only the CDDL * or only the GPL Version 2, indicate your decision by adding * "[Contributor] elects to include this software in this distribution * under the [CDDL or GPL Version 2] license." If you do not indicate a * single choice of license, a recipient has the option to distribute * your version of this file under either the CDDL, the GPL Version 2 or * to extend the choice of license to its licensees as provided above. * However, if you add GPL Version 2 code and therefore, elected the GPL * Version 2 license, then the option applies only if the new code is * made subject to such option by the copyright holder. * * Contributor(s): */ package ideator.action.idea.wizard; import ideator.action.admin.MailAction; import ideator.action.register.PositionAction; import ideator.datamodel.IdeatorUserFacade; import ideator.service.IdeatorAutocompletionService; import ideator.service.IdeatorUserService; import ideator.utils.IdeatorUtils; import java.util.LinkedList; import java.util.List; import kiwi.api.user.UserService; import kiwi.exception.RegisterException; import kiwi.model.ontology.SKOSConcept; import kiwi.model.user.User; import org.hibernate.validator.Email; import org.jboss.seam.ScopeType; import org.jboss.seam.annotations.Create; import org.jboss.seam.annotations.End; import org.jboss.seam.annotations.In; import org.jboss.seam.annotations.Logger; import org.jboss.seam.annotations.Name; import org.jboss.seam.annotations.Scope; import org.jboss.seam.annotations.Transactional; import org.jboss.seam.faces.FacesMessages; import org.jboss.seam.log.Log; /** * @author Rolf Sint * @version 0.7 * @since 0.7 * */ @Name("ideator.coAuthorAction") @Scope(ScopeType.CONVERSATION) //@Transactional public class CoAuthorAction { @In public IdeaBean ideaBean; @In(create=true) private PositionAction positionAction; @Logger private static Log log; private String coAuthorsLogin; private String coAuthorsFirstName; private String coAuthorsLastName; private User selectedUser; @Email private String coAuthorsEmail; private LinkedList<IdeatorUserFacade> coAuthors; @In(value="ideator.ideaBean") private IdeaBean ideBean; @In(value="ideator.userService") private IdeatorUserService ideatorUserService; @In(value="ideator.autocompletionService") private IdeatorAutocompletionService autocompleteUserService; @In protected UserService userService; private List<User> recommendedUsers = new LinkedList<User>(); @Create public void init(){ positionAction.clear(); } @End public String addUser(){ CoAuthorTmp ct = new CoAuthorTmp(); if(isUser()) { ct.setUser(selectedUser); log.info(selectedUser.getPassword()); // mailAction.send(ct.getCoAuthorsEmail(), ct.getCoAuthorsFirstName()+" "+ct.getCoAuthorsLastName(), ct.getLogin(), ideaBean.getTitle(), ideaBean.getAuthor().getLogin()); } else { String pwd = IdeatorUtils.generatePassword(6); ct.setLogin(coAuthorsFirstName+coAuthorsLastName); ct.setCoAuthorsFirstName(coAuthorsFirstName); ct.setCoAuthorsLastName(coAuthorsLastName); ct.setCoAuthorsEmail(coAuthorsEmail); ct.setPwd(pwd); LinkedList<SKOSConcept> positionsTmp = positionAction.getChosenConcepts(); if(positionsTmp.size() < 1){ FacesMessages.instance().add("No Position selected"); return "coAuthorsPage"; } ct.setPositions(positionsTmp); // mailAction.send(ct.getCoAuthorsEmail(), ct.getCoAuthorsFirstName()+" "+ct.getCoAuthorsLastName(), ct.getLogin(), ideaBean.getTitle(), pwd, ideaBean.getAuthor().getLogin()); } ideaBean.getCoAuthors().add(ct); return "new_idea"; } private boolean isUser() { return userService.userExists(coAuthorsLogin); } public String setUserToSelection() { log.info("selected user #0", selectedUser.getLogin()); coAuthorsLogin = selectedUser.getFirstName(); coAuthorsFirstName = selectedUser.getFirstName(); coAuthorsLastName = selectedUser.getLastName(); coAuthorsEmail = selectedUser.getEmail(); IdeatorUserFacade iuser = ideatorUserService.getUser(selectedUser); LinkedList<SKOSConcept> ls = iuser.getPositions(); log.info(ls.size()); positionAction.setChosenConcepts(ls); return "coAuthorsPage"; } @End public String cancel(){ return "new_idea"; } public String selectUser( User user ) { //set user log.info("selected user #0", user.getLogin()); coAuthorsLogin = user.getLogin(); coAuthorsFirstName = user.getFirstName(); coAuthorsLastName = user.getLastName(); coAuthorsEmail = user.getEmail(); IdeatorUserFacade iuser = ideatorUserService.getUser(user); LinkedList<SKOSConcept> ls = iuser.getPositions(); log.info(ls.size()); positionAction.setChosenConcepts(ls); selectedUser = user; return "coAuthorsPage"; } public String unselectUser() { coAuthorsFirstName = ""; coAuthorsLastName = ""; coAuthorsEmail = ""; positionAction.setChosenConcepts(new LinkedList<SKOSConcept>()); selectedUser = null; return "coAuthorsPage"; } //return if list should be displayed public boolean autocompleteUsername() { if( coAuthorsFirstName == null || coAuthorsFirstName.length() < 3 ) return false; recommendedUsers = autocompleteUserService.getUsersByKeyword(coAuthorsFirstName); return recommendedUsers.size() > 0; } public LinkedList<IdeatorUserFacade> getCoAuthors() { return coAuthors; } public void setCoAuthors(LinkedList<IdeatorUserFacade> coAuthors) { this.coAuthors = coAuthors; } public String getCoAuthorsFirstName() { return coAuthorsFirstName; } public void setCoAuthorsFirstName(String coAuthorsFirstName) { this.coAuthorsFirstName = coAuthorsFirstName; } public String getCoAuthorsLastName() { return coAuthorsLastName; } public void setCoAuthorsLastName(String coAuthorsLastName) { this.coAuthorsLastName = coAuthorsLastName; } public String getCoAuthorsEmail() { return coAuthorsEmail; } public void setCoAuthorsEmail(String coAuthorsEmail) { this.coAuthorsEmail = coAuthorsEmail; } public void setRecommendedUsers(List<User> recommendedUsers) { this.recommendedUsers = recommendedUsers; } public List<User> getRecommendedUsers() { return recommendedUsers; } public boolean isExistingUser() { return selectedUser != null; } }
package modmuss50.M50; import java.io.*; import java.util.HashMap; import java.util.Map; import java.util.Scanner; public class m50Main { public static File script; public static HashMap<Integer, String> gotos = new HashMap<Integer, String>(); //Fist string is the var name second is the data public static HashMap<String, String> strVars = new HashMap<String, String>(); //Fist string is the var name second is the data public static HashMap<String, Integer> intVars = new HashMap<String, Integer>(); //Fist string is the var name second is the data public static HashMap<String, Boolean> booVars = new HashMap<String, Boolean>(); public static Boolean stopReading = false; public static void main(String[] args) throws IOException { m50Utils.print("Starting M50"); File thisfolder = new File(".").getAbsoluteFile(); script = new File(thisfolder, "script.m50"); if (!script.exists()) { m50Utils.print("No script file found!"); System.exit(-1); } m50Utils.print("Will now try to run " + Integer.toString(countLines(script)) + " lines of script"); m50Utils.print(); m50Utils.print(); BufferedReader br = new BufferedReader(new FileReader(script)); String scriptArgs; scriptArgs = br.readLine(); if (!scriptArgs.startsWith("#m50")) { m50Utils.print("this is not a script file"); System.exit(-2); } br.close(); if (!scriptArgs.contains("noPlaces")) { //doing this allows us to go to places after the goto statement loadPlaces(); } readFrom(1); m50Utils.print(); m50Utils.print(); m50Utils.print("Script finished"); } public static void readFrom(int startLine) throws IOException { boolean proccess = false; int linenumber = 1; BufferedReader br = new BufferedReader(new FileReader(script)); String line; while ((line = br.readLine()) != null) { if (stopReading) { return; } if (linenumber == startLine) { proccess = true; } if (proccess) { if (!processLine(line, linenumber)) { return; } } linenumber += 1; } br.close(); } public static void loadPlaces() throws IOException { int linenumber = 1; BufferedReader br = new BufferedReader(new FileReader(script)); String line; while ((line = br.readLine()) != null) { if (stopReading) { return; } line = line.replaceAll(" ", ""); if (line.startsWith("place")) { String[] vars = line.split(":"); String name = vars[1]; Boolean canAdd = true; for (Map.Entry<Integer, String> entry : gotos.entrySet()) { if (entry.getValue().equals(name)) { String output = entry.getValue(); m50Utils.print("A place exits with that name! :" + linenumber); canAdd = false; } } if (canAdd) { gotos.put(linenumber, name); } } linenumber += 1; } br.close(); } public static boolean processLine(String line, int lineNumber) throws IOException { line = line.replaceAll(" ", ""); if (line.startsWith("//") || line.startsWith("-")) { //there is no need to do anything :) return true; } else if (line.startsWith("print")) { String[] vars = line.split(":"); if (vars[1].contains("\"")) { String message = line.replaceAll("\"", "").replace("print:", ""); m50Utils.print(message); } else { for (Map.Entry<String, String> entry : strVars.entrySet()) { if (entry.getKey().equals(vars[1])) { String output = entry.getValue(); m50Utils.print(output); } } for (Map.Entry<String, Integer> entry : intVars.entrySet()) { if (entry.getKey().equals(vars[1])) { Integer output = entry.getValue(); m50Utils.print(output); } } for (Map.Entry<String, Boolean> entry : booVars.entrySet()) { if (entry.getKey().equals(vars[1])) { Boolean output = entry.getValue(); m50Utils.print(output); } } } } else if (line.startsWith("goto")) { String[] vars = line.split(":"); String name = vars[1]; for (Map.Entry<Integer, String> entry : gotos.entrySet()) { if (entry.getValue().equals(name)) { readFrom(entry.getKey()); stopReading = true; } } } else if (line.startsWith("stop")) { System.exit(-1); } else if (line.startsWith("input:")) { String[] vars = line.split(":"); Scanner reader = new Scanner(System.in); if (vars.length >= 3) { m50Utils.print(vars[2]); } else { m50Utils.print("Enter input:"); } String input = reader.nextLine(); strVars.put(vars[1], input); } else if (line.startsWith("var")) { String[] vars = line.split(":"); if (vars[1].equals("str")) { for (Map.Entry<String, String> entry : strVars.entrySet()) { if (entry.getKey().equals(vars[2])) { m50Utils.print("A variable with that name exists! :" + lineNumber); return true; } } strVars.put(vars[2], vars[3]); } if (vars[1].equals("int")) { for (Map.Entry<String, Integer> entry : intVars.entrySet()) { if (entry.getKey().equals(vars[2])) { m50Utils.print("A variable with that name exists! :" + lineNumber); return true; } } intVars.put(vars[2], Integer.parseInt(vars[3])); } if (vars[1].equals("boo") || vars[1].equals("boolean")) { for (Map.Entry<String, Boolean> entry : booVars.entrySet()) { if (entry.getKey().equals(vars[2])) { m50Utils.print("A variable with that name exists! :" + lineNumber); return true; } } if (vars[3].equals("true")) { booVars.put(vars[2], true); } else if (vars[3].equals("false")) { booVars.put(vars[2], false); } } } else if (line.startsWith("if")) { String[] vars = line.split(":"); if (vars.length <= 3) { m50Utils.print("Error at " + lineNumber); } else { if (vars[1].equals("str")) { String var1 = null; String var2 = null; if (vars[2].contains("\"")) { var1 = vars[2].replace("\"", ""); } else { for (Map.Entry<String, String> entry : strVars.entrySet()) { if (entry.getKey().equals(vars[2])) { String output = entry.getValue(); var1 = output; } } } if (vars[3].startsWith("\"")) { var2 = vars[3].replaceAll("\"", ""); } else { for (Map.Entry<String, String> entry : strVars.entrySet()) { if (entry.getKey().equals(vars[3])) { String output = entry.getValue(); var2 = output; } } } int endIf = 0; endIf = findEndIfWithElse(lineNumber); boolean hasElse = hasElse(lineNumber); if (endIf != 0) { if (vars[4].equals("=")) { if (var1.equals(var2)) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum, endIf, true); } } } else if (vars[4].equals("!=")) { if (!var1.equals(var2)) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum + 1, endIf, true); } } } } else { m50Utils.print("No end if found! at " + lineNumber); } } else if (vars[1].equals("int")) { int var1 = 0; int var2 = 0; for (Map.Entry<String, Integer> entry : intVars.entrySet()) { if (entry.getKey().equals(vars[2])) { int output = entry.getValue(); var1 = output; } } if (var1 == 0) { var1 = Integer.parseInt(vars[2]); } for (Map.Entry<String, Integer> entry : intVars.entrySet()) { if (entry.getKey().equals(vars[3])) { int output = entry.getValue(); var2 = output; } } if (var2 == 0) { var2 = Integer.parseInt(vars[3]); } int endIf = 0; endIf = findEndIfWithElse(lineNumber); boolean hasElse = hasElse(lineNumber); if (endIf != 0) { if (vars[4].equals("=")) { if (var1 == var2) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum, endIf, true); } } } else if (vars[4].equals("!=")) { if (var1 != var2) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum + 1, endIf, true); } } } else if (vars[4].equals(">")) { if (var1 > var2) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum + 1, endIf, true); } } } else if (vars[4].equals("<")) { if (var1 < var2) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum + 1, endIf, true); } } } else if (vars[4].equals(">=")) { if (var1 >= var2) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum + 1, endIf, true); } } } else if (vars[4].equals("<=")) { if (var1 <= var2) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum + 1, endIf, true); } } } else if (vars[4].equals("!>")) { if (!(var1 > var2)) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum + 1, endIf, true); } } } else if (vars[4].equals("!<")) { if (!(var1 < var2)) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum + 1, endIf, true); } } } else if (vars[4].equals(">=!")) { if (!(var1 >= var2)) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum + 1, endIf, true); } } } else if (vars[4].equals("<=!")) { if (!(var1 <= var2)) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum + 1, endIf, true); } } } } else { m50Utils.print("No end if found! at " + lineNumber); } }if (vars[1].equals("boo") || vars[1].equals("boolean")) { boolean var1 = false; boolean var2 = false; if (vars[2].equals("true")) { var1 = true; } else if (vars[2].equals("false")) { var1 = false; } else{ for (Map.Entry<String, Boolean> entry : booVars.entrySet()) { if (entry.getKey().equals(vars[2])) { Boolean output = entry.getValue(); var1 = output; } } } if (vars[3].equals("true")) { var2 = true; } else if (vars[3].equals("false")) { var2 = false; } else{ for (Map.Entry<String, Boolean> entry : booVars.entrySet()) { if (entry.getKey().equals(vars[3])) { Boolean output = entry.getValue(); var2 = output; } } } int endIf = 0; endIf = findEndIfWithElse(lineNumber); boolean hasElse = hasElse(lineNumber); if (endIf != 0) { if (vars[4].equals("=")) { if (var1 = (var2)) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum, endIf, true); } } } else if (vars[4].equals("!=")) { if (var1 != (var2)) { runIf(lineNumber + 1, endIf, false); } else { if (hasElse) { endIf = findEndIfWithoutElse(lineNumber); int elseNum = findElse(lineNumber); runIf(elseNum + 1, endIf, true); } } } } else { m50Utils.print("No end if found! at " + lineNumber); } } } } return true; } public static int countLines(File filename) throws IOException { InputStream is = new BufferedInputStream(new FileInputStream(filename)); try { byte[] c = new byte[1024]; int count = 0; int readChars = 0; boolean empty = true; while ((readChars = is.read(c)) != -1) { empty = false; for (int i = 0; i < readChars; ++i) { if (c[i] == '\n') { ++count; } } } return (count == 0 && !empty) ? 1 : count; } finally { is.close(); } } public static int findEndIfWithElse(int startNum) throws IOException { int linenumber = 1; BufferedReader br = new BufferedReader(new FileReader(script)); String line; while ((line = br.readLine()) != null) { if (linenumber >= startNum) { if (line.startsWith("endIf") || line.startsWith("else")) { return linenumber; } } linenumber += 1; } br.close(); return 0; } public static int findEndIfWithoutElse(int startNum) throws IOException { int linenumber = 1; BufferedReader br = new BufferedReader(new FileReader(script)); String line; while ((line = br.readLine()) != null) { if (linenumber >= startNum) { if (line.startsWith("endIf")) { return linenumber; } } linenumber += 1; } br.close(); return 0; } public static int runIf(int startNum, int endNum, boolean isElse) throws IOException { int linenumber = 1; BufferedReader br = new BufferedReader(new FileReader(script)); String line; while ((line = br.readLine()) != null) { if (linenumber >= startNum && linenumber <= endNum) { processLine(line.replace("-", ""), linenumber); } linenumber += 1; } br.close(); return 0; } public static boolean hasElse(int startNum) throws IOException { int linenumber = 1; int lastNum = findEndIfWithElse(startNum); if (lastNum == 0) { return false; } BufferedReader br = new BufferedReader(new FileReader(script)); String line; while ((line = br.readLine()) != null) { if (linenumber >= startNum && linenumber <= lastNum) { if (line.startsWith("else")) { return true; } } linenumber += 1; } br.close(); return false; } public static int findElse(int startNum) throws IOException { int linenumber = 1; BufferedReader br = new BufferedReader(new FileReader(script)); String line; while ((line = br.readLine()) != null) { if (linenumber >= startNum) { if (line.startsWith("else")) { return linenumber; } } linenumber += 1; } br.close(); return 0; } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.bytecodeAnalysis; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInspection.dataFlow.ControlFlowAnalyzer; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.ModificationTracker; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.search.ProjectScope; import com.intellij.psi.util.CachedValueProvider; import com.intellij.psi.util.CachedValuesManager; import com.intellij.psi.util.PsiFormatUtil; import com.intellij.psi.util.PsiModificationTracker; import com.intellij.testFramework.LightVirtualFile; import com.intellij.util.IncorrectOperationException; import com.intellij.util.containers.ConcurrentFactoryMap; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.Stack; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.security.MessageDigest; import java.util.*; import static com.intellij.codeInspection.bytecodeAnalysis.Direction.*; /** * @author lambdamix */ public class ProjectBytecodeAnalysis { public static final Logger LOG = Logger.getInstance("#com.intellij.codeInspection.bytecodeAnalysis"); public static final Key<Boolean> INFERRED_ANNOTATION = Key.create("INFERRED_ANNOTATION"); public static final String NULLABLE_METHOD = "java.annotations.inference.nullable.method"; public static final String NULLABLE_METHOD_TRANSITIVITY = "java.annotations.inference.nullable.method.transitivity"; public static final int EQUATIONS_LIMIT = 1000; private final Project myProject; private final boolean nullableMethod; private final boolean nullableMethodTransitivity; private final Map<Bytes, List<HEquations>> myEquationCache = ContainerUtil.createConcurrentSoftValueMap(); public static ProjectBytecodeAnalysis getInstance(@NotNull Project project) { return ServiceManager.getService(project, ProjectBytecodeAnalysis.class); } public ProjectBytecodeAnalysis(Project project) { myProject = project; nullableMethod = Registry.is(NULLABLE_METHOD); nullableMethodTransitivity = Registry.is(NULLABLE_METHOD_TRANSITIVITY); myProject.getMessageBus().connect().subscribe(PsiModificationTracker.TOPIC, () -> myEquationCache.clear()); } @Nullable public PsiAnnotation findInferredAnnotation(@NotNull PsiModifierListOwner listOwner, @NotNull String annotationFQN) { if (!(listOwner instanceof PsiCompiledElement)) { return null; } if (annotationFQN.equals(AnnotationUtil.NOT_NULL) || annotationFQN.equals(AnnotationUtil.NULLABLE) || annotationFQN.equals(ControlFlowAnalyzer.ORG_JETBRAINS_ANNOTATIONS_CONTRACT)) { PsiAnnotation[] annotations = findInferredAnnotations(listOwner); for (PsiAnnotation annotation : annotations) { if (annotationFQN.equals(annotation.getQualifiedName())) { return annotation; } } return null; } else { return null; } } @NotNull public PsiAnnotation[] findInferredAnnotations(@NotNull final PsiModifierListOwner listOwner) { if (!(listOwner instanceof PsiCompiledElement)) { return PsiAnnotation.EMPTY_ARRAY; } return CachedValuesManager.getCachedValue(listOwner, () -> CachedValueProvider.Result.create(collectInferredAnnotations(listOwner), listOwner)); } /** * Ignore inside android.jar because all class files there are dummy and contain no code at all. * Rely on the fact that it's always located at .../platforms/android-.../android.jar!/ */ private static boolean isInsideDummyAndroidJar(@Nullable PsiFile psiFile) { VirtualFile file = psiFile == null ? null : psiFile.getVirtualFile(); if (file == null) return false; String path = file.getPath(); int index = path.indexOf("/android.jar!/"); return index > 0 && path.lastIndexOf("platforms/android-", index) > 0; } @NotNull private PsiAnnotation[] collectInferredAnnotations(PsiModifierListOwner listOwner) { if (isInsideDummyAndroidJar(listOwner.getContainingFile())) return PsiAnnotation.EMPTY_ARRAY; try { MessageDigest md = BytecodeAnalysisConverter.getMessageDigest(); HKey primaryKey = getKey(listOwner, md); if (primaryKey == null) { return PsiAnnotation.EMPTY_ARRAY; } if (listOwner instanceof PsiMethod) { ArrayList<HKey> allKeys = collectMethodKeys((PsiMethod)listOwner, primaryKey); MethodAnnotations methodAnnotations = loadMethodAnnotations((PsiMethod)listOwner, primaryKey, allKeys); return toPsi(primaryKey, methodAnnotations); } else if (listOwner instanceof PsiParameter) { ParameterAnnotations parameterAnnotations = loadParameterAnnotations(primaryKey); return toPsi(parameterAnnotations); } return PsiAnnotation.EMPTY_ARRAY; } catch (EquationsLimitException e) { if (LOG.isDebugEnabled()) { String externalName = PsiFormatUtil.getExternalName(listOwner, false, Integer.MAX_VALUE); LOG.debug("Too many equations for " + externalName); } return PsiAnnotation.EMPTY_ARRAY; } } /** * Converts inferred method annotations to Psi annotations * * @param primaryKey primary compressed key for method * @param methodAnnotations inferred annotations * @return Psi annotations */ @NotNull private PsiAnnotation[] toPsi(HKey primaryKey, MethodAnnotations methodAnnotations) { boolean notNull = methodAnnotations.notNulls.contains(primaryKey); boolean nullable = methodAnnotations.nullables.contains(primaryKey); boolean pure = methodAnnotations.pures.contains(primaryKey); String contractValues = methodAnnotations.contractsValues.get(primaryKey); String contractPsiText = null; if (contractValues != null) { contractPsiText = pure ? "value=" + contractValues + ",pure=true" : contractValues; } else if (pure) { contractPsiText = "pure=true"; } PsiAnnotation psiAnnotation = contractPsiText == null ? null : createContractAnnotation(contractPsiText); if (notNull && psiAnnotation != null) { return new PsiAnnotation[]{ getNotNullAnnotation(), psiAnnotation }; } if (nullable && psiAnnotation != null) { return new PsiAnnotation[]{ getNullableAnnotation(), psiAnnotation }; } if (notNull) { return new PsiAnnotation[]{ getNotNullAnnotation() }; } if (nullable) { return new PsiAnnotation[]{ getNullableAnnotation() }; } if (psiAnnotation != null) { return new PsiAnnotation[]{ psiAnnotation }; } return PsiAnnotation.EMPTY_ARRAY; } /** * Converts inferred parameter annotations to Psi annotations * * @param parameterAnnotations inferred parameter annotations * @return Psi annotations */ @NotNull private PsiAnnotation[] toPsi(ParameterAnnotations parameterAnnotations) { if (parameterAnnotations.notNull) { return new PsiAnnotation[]{ getNotNullAnnotation() }; } else if (parameterAnnotations.nullable) { return new PsiAnnotation[]{ getNullableAnnotation() }; } return PsiAnnotation.EMPTY_ARRAY; } public PsiAnnotation getNotNullAnnotation() { return CachedValuesManager.getManager(myProject).getCachedValue(myProject, () -> CachedValueProvider.Result.create(createAnnotationFromText("@" + AnnotationUtil.NOT_NULL), ModificationTracker.NEVER_CHANGED)); } public PsiAnnotation getNullableAnnotation() { return CachedValuesManager.getManager(myProject).getCachedValue(myProject, () -> CachedValueProvider.Result.create(createAnnotationFromText("@" + AnnotationUtil.NULLABLE), ModificationTracker.NEVER_CHANGED)); } public PsiAnnotation createContractAnnotation(String contractValue) { Map<String, PsiAnnotation> cache = CachedValuesManager.getManager(myProject).getCachedValue(myProject, () -> { Map<String, PsiAnnotation> map = ConcurrentFactoryMap.createConcurrentMap(attrs -> createAnnotationFromText("@org.jetbrains.annotations.Contract(" + attrs + ")")); return CachedValueProvider.Result.create(map, ModificationTracker.NEVER_CHANGED); }); return cache.get(contractValue); } @Nullable public static HKey getKey(@NotNull PsiModifierListOwner owner, MessageDigest md) { LOG.assertTrue(owner instanceof PsiCompiledElement, owner); if (owner instanceof PsiMethod) { return BytecodeAnalysisConverter.psiKey((PsiMethod)owner, Out, md); } if (owner instanceof PsiParameter) { PsiElement parent = owner.getParent(); if (parent instanceof PsiParameterList) { PsiElement gParent = parent.getParent(); if (gParent instanceof PsiMethod) { final int index = ((PsiParameterList)parent).getParameterIndex((PsiParameter)owner); return BytecodeAnalysisConverter.psiKey((PsiMethod)gParent, new In(index, In.NOT_NULL_MASK), md); } } } return null; } /** * Collects all (starting) keys needed to infer all pieces of method annotations. * * @param method Psi method for which annotations are being inferred * @param primaryKey primary compressed key for this method * @return compressed keys for this method */ public static ArrayList<HKey> collectMethodKeys(@NotNull PsiMethod method, HKey primaryKey) { return BytecodeAnalysisConverter.mkInOutKeys(method, primaryKey); } private ParameterAnnotations loadParameterAnnotations(@NotNull HKey notNullKey) throws EquationsLimitException { final Solver notNullSolver = new Solver(new ELattice<>(Value.NotNull, Value.Top), Value.Top); collectEquations(Collections.singletonList(notNullKey), notNullSolver); Map<HKey, Value> notNullSolutions = notNullSolver.solve(); // subtle point boolean notNull = (Value.NotNull == notNullSolutions.get(notNullKey)) || (Value.NotNull == notNullSolutions.get(notNullKey.mkUnstable())); final Solver nullableSolver = new Solver(new ELattice<>(Value.Null, Value.Top), Value.Top); final HKey nullableKey = new HKey(notNullKey.key, notNullKey.dirKey + 1, true, false); collectEquations(Collections.singletonList(nullableKey), nullableSolver); Map<HKey, Value> nullableSolutions = nullableSolver.solve(); // subtle point boolean nullable = (Value.Null == nullableSolutions.get(nullableKey)) || (Value.Null == nullableSolutions.get(nullableKey.mkUnstable())); return new ParameterAnnotations(notNull, nullable); } private MethodAnnotations loadMethodAnnotations(@NotNull PsiMethod owner, @NotNull HKey key, ArrayList<HKey> allKeys) throws EquationsLimitException { MethodAnnotations result = new MethodAnnotations(); final PuritySolver puritySolver = new PuritySolver(); collectPurityEquations(key.withDirection(Pure), puritySolver); Map<HKey, Set<HEffectQuantum>> puritySolutions = puritySolver.solve(); int arity = owner.getParameterList().getParameters().length; BytecodeAnalysisConverter.addEffectAnnotations(puritySolutions, result, key, owner.isConstructor()); HKey failureKey = key.withDirection(Throw); final Solver failureSolver = new Solver(new ELattice<>(Value.Fail, Value.Top), Value.Top); collectEquations(Collections.singletonList(failureKey), failureSolver); if (failureSolver.solve().get(failureKey) == Value.Fail) { // Always failing method result.contractsValues.put(key, StreamEx.constant("_", arity).joining(",", "\"", "->fail\"")); } else { final Solver outSolver = new Solver(new ELattice<>(Value.Bot, Value.Top), Value.Top); collectEquations(allKeys, outSolver); Map<HKey, Value> solutions = outSolver.solve(); BytecodeAnalysisConverter.addMethodAnnotations(solutions, result, key, arity); } if (nullableMethod) { final Solver nullableMethodSolver = new Solver(new ELattice<>(Value.Bot, Value.Null), Value.Bot); HKey nullableKey = key.withDirection(NullableOut); if (nullableMethodTransitivity) { collectEquations(Collections.singletonList(nullableKey), nullableMethodSolver); } else { collectSingleEquation(nullableKey, nullableMethodSolver); } Map<HKey, Value> nullableSolutions = nullableMethodSolver.solve(); if (nullableSolutions.get(nullableKey) == Value.Null || nullableSolutions.get(nullableKey.invertStability()) == Value.Null) { result.nullables.add(key); } } return result; } private List<HEquations> getEquations(Bytes key) { List<HEquations> result = myEquationCache.get(key); if (result == null) { myEquationCache.put(key, result = BytecodeAnalysisIndex.getEquations(ProjectScope.getLibrariesScope(myProject), key)); } return result; } private void collectPurityEquations(HKey key, PuritySolver puritySolver) throws EquationsLimitException { HashSet<HKey> queued = new HashSet<>(); Stack<HKey> queue = new Stack<>(); queue.push(key); queued.add(key); while (!queue.empty()) { if (queued.size() > EQUATIONS_LIMIT) { throw new EquationsLimitException(); } ProgressManager.checkCanceled(); HKey hKey = queue.pop(); Bytes bytes = new Bytes(hKey.key); for (HEquations hEquations : getEquations(bytes)) { boolean stable = hEquations.stable; for (DirectionResultPair pair : hEquations.results) { int dirKey = pair.directionKey; if (dirKey == hKey.dirKey) { Set<HEffectQuantum> effects = ((HEffects)pair.hResult).effects; puritySolver.addEquation(new HKey(bytes.bytes, dirKey, stable, false), effects); for (HEffectQuantum effect : effects) { if (effect instanceof HEffectQuantum.CallQuantum) { HKey depKey = ((HEffectQuantum.CallQuantum)effect).key; if (!queued.contains(depKey)) { queue.push(depKey); queued.add(depKey); } } } } } } } } private void collectEquations(List<HKey> keys, Solver solver) throws EquationsLimitException { HashSet<HKey> queued = new HashSet<>(); Stack<HKey> queue = new Stack<>(); for (HKey key : keys) { queue.push(key); queued.add(key); } while (!queue.empty()) { if (queued.size() > EQUATIONS_LIMIT) { throw new EquationsLimitException(); } ProgressManager.checkCanceled(); HKey hKey = queue.pop(); Bytes bytes = new Bytes(hKey.key); for (HEquations hEquations : getEquations(bytes)) { boolean stable = hEquations.stable; for (DirectionResultPair pair : hEquations.results) { int dirKey = pair.directionKey; if (dirKey == hKey.dirKey) { HResult result = pair.hResult; solver.addEquation(new HEquation(new HKey(bytes.bytes, dirKey, stable, false), result)); if (result instanceof HPending) { HPending pending = (HPending)result; for (HComponent component : pending.delta) { for (HKey depKey : component.ids) { if (!queued.contains(depKey)) { queue.push(depKey); queued.add(depKey); } } } } } } } } } private void collectSingleEquation(HKey hKey, Solver solver) throws EquationsLimitException { ProgressManager.checkCanceled(); Bytes bytes = new Bytes(hKey.key); for (HEquations hEquations : getEquations(bytes)) { boolean stable = hEquations.stable; for (DirectionResultPair pair : hEquations.results) { int dirKey = pair.directionKey; if (dirKey == hKey.dirKey) { HResult result = pair.hResult; solver.addEquation(new HEquation(new HKey(bytes.bytes, dirKey, stable, false), result)); } } } } @NotNull private PsiAnnotation createAnnotationFromText(@NotNull final String text) throws IncorrectOperationException { PsiAnnotation annotation = JavaPsiFacade.getElementFactory(myProject).createAnnotationFromText(text, null); annotation.putUserData(INFERRED_ANNOTATION, Boolean.TRUE); ((LightVirtualFile)annotation.getContainingFile().getViewProvider().getVirtualFile()).setWritable(false); return annotation; } } class MethodAnnotations { // @NotNull keys final Set<HKey> notNulls = new HashSet<>(1); // @Nullable keys final Set<HKey> nullables = new HashSet<>(1); // @Contract(pure=true) part of contract final Set<HKey> pures = new HashSet<>(1); // @Contracts final Map<HKey, String> contractsValues = new HashMap<>(); } class ParameterAnnotations { final boolean notNull; final boolean nullable; ParameterAnnotations(boolean notNull, boolean nullable) { this.notNull = notNull; this.nullable = nullable; } } class EquationsLimitException extends Exception {}
/* * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.javascript.rhino.jstype.JSTypeNative.ARRAY_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.BOOLEAN_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.NO_OBJECT_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.NULL_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.NUMBER_STRING; import static com.google.javascript.rhino.jstype.JSTypeNative.NUMBER_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.OBJECT_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.STRING_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.UNKNOWN_TYPE; import static com.google.javascript.rhino.jstype.JSTypeNative.VOID_TYPE; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.javascript.jscomp.Scope.Var; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.jstype.FunctionType; import com.google.javascript.rhino.jstype.JSType; import com.google.javascript.rhino.jstype.JSTypeNative; import com.google.javascript.rhino.jstype.JSTypeRegistry; import com.google.javascript.rhino.jstype.ObjectType; import java.text.MessageFormat; import java.util.Iterator; import java.util.List; /** * A central reporter for all type violations: places where the programmer * has annotated a variable (or property) with one type, but has assigned * another type to it. * * Also doubles as a central repository for all type violations, so that * type-based optimizations (like AmbiguateProperties) can be fault-tolerant. * * @author nicksantos@google.com (Nick Santos) */ class TypeValidator { private final AbstractCompiler compiler; private final JSTypeRegistry typeRegistry; private final JSType allValueTypes; private boolean shouldReport = true; // TODO(nicksantos): Provide accessors to better filter the list of type // mismatches. For example, if we pass (Cake|null) where only Cake is // allowed, that doesn't mean we should invalidate all Cakes. private final List<TypeMismatch> mismatches = Lists.newArrayList(); // User warnings private static final String FOUND_REQUIRED = "{0}\n" + "found : {1}\n" + "required: {2}"; static final DiagnosticType INVALID_CAST = DiagnosticType.warning("JSC_INVALID_CAST", "invalid cast - must be a subtype or supertype\n" + "from: {0}\n" + "to : {1}"); static final DiagnosticType TYPE_MISMATCH_WARNING = DiagnosticType.warning( "JSC_TYPE_MISMATCH", "{0}"); static final DiagnosticType MISSING_EXTENDS_TAG_WARNING = DiagnosticType.warning( "JSC_MISSING_EXTENDS_TAG", "Missing @extends tag on type {0}"); static final DiagnosticType DUP_VAR_DECLARATION = DiagnosticType.warning("JSC_DUP_VAR_DECLARATION", "variable {0} redefined with type {1}, " + "original definition at {2}:{3} with type {4}"); static final DiagnosticType HIDDEN_PROPERTY_MISMATCH = DiagnosticType.warning("JSC_HIDDEN_PROPERTY_MISMATCH", "mismatch of the {0} property type and the type " + "of the property it overrides from superclass {1}\n" + "original: {2}\n" + "override: {3}"); static final DiagnosticType INTERFACE_METHOD_NOT_IMPLEMENTED = DiagnosticType.warning( "JSC_INTERFACE_METHOD_NOT_IMPLEMENTED", "property {0} on interface {1} is not implemented by type {2}"); static final DiagnosticGroup ALL_DIAGNOSTICS = new DiagnosticGroup( INVALID_CAST, TYPE_MISMATCH_WARNING, MISSING_EXTENDS_TAG_WARNING, DUP_VAR_DECLARATION, HIDDEN_PROPERTY_MISMATCH, INTERFACE_METHOD_NOT_IMPLEMENTED); TypeValidator(AbstractCompiler compiler) { this.compiler = compiler; this.typeRegistry = compiler.getTypeRegistry(); this.allValueTypes = typeRegistry.createUnionType( STRING_TYPE, NUMBER_TYPE, BOOLEAN_TYPE, NULL_TYPE, VOID_TYPE); } /** * Gets a list of type violations. * * For each violation, one element is the expected type and the other is * the type that is actually found. Order is not signficant. */ Iterable<TypeMismatch> getMismatches() { return mismatches; } void setShouldReport(boolean report) { this.shouldReport = report; } // All non-private methods should have the form: // expectCondition(NodeTraversal t, Node n, ...); // If there is a mismatch, the {@code expect} method should issue // a warning and attempt to correct the mismatch, when possible. /** * Expect the type to be an object, or a type convertible to object. If the * expectation is not met, issue a warning at the provided node's source code * position. * @return True if there was no warning, false if there was a mismatch. */ boolean expectObject(NodeTraversal t, Node n, JSType type, String msg) { if (!type.matchesObjectContext()) { mismatch(t, n, msg, type, OBJECT_TYPE); return false; } return true; } /** * Expect the type to be an object. Unlike expectObject, a type convertible * to object is not acceptable. */ void expectActualObject(NodeTraversal t, Node n, JSType type, String msg) { if (!type.isObject()) { mismatch(t, n, msg, type, OBJECT_TYPE); } } /** * Expect the type to contain an object sometimes. If the expectation is * not met, issue a warning at the provided node's source code position. */ void expectAnyObject(NodeTraversal t, Node n, JSType type, String msg) { JSType anyObjectType = getNativeType(NO_OBJECT_TYPE); if (!anyObjectType.isSubtype(type)) { mismatch(t, n, msg, type, anyObjectType); } } /** * Expect the type to be a string, or a type convertible to string. If the * expectation is not met, issue a warning at the provided node's source code * position. */ void expectString(NodeTraversal t, Node n, JSType type, String msg) { if (!type.matchesStringContext()) { mismatch(t, n, msg, type, STRING_TYPE); } } /** * Expect the type to be a number, or a type convertible to number. If the * expectation is not met, issue a warning at the provided node's source code * position. */ void expectNumber(NodeTraversal t, Node n, JSType type, String msg) { if (!type.matchesNumberContext()) { mismatch(t, n, msg, type, NUMBER_TYPE); } } /** * Expect the type to be a valid operand to a bitwise operator. This includes * numbers, any type convertible to a number, or any other primitive type * (undefined|null|boolean|string). */ void expectBitwiseable(NodeTraversal t, Node n, JSType type, String msg) { if (!type.matchesNumberContext() && !type.isSubtype(allValueTypes)) { mismatch(t, n, msg, type, allValueTypes); } } /** * Expect the type to be a number or string, or a type convertible to a number * or string. If the expectation is not met, issue a warning at the provided * node's source code position. */ void expectStringOrNumber( NodeTraversal t, Node n, JSType type, String msg) { if (!type.matchesNumberContext() && !type.matchesStringContext()) { mismatch(t, n, msg, type, NUMBER_STRING); } } /** * Expect the type to be anything but the void type. If the expectation is not * met, issue a warning at the provided node's source code position. Note that * a union type that includes the void type and at least one other type meets * the expectation. * @return Whether the expectation was met. */ boolean expectNotVoid( NodeTraversal t, Node n, JSType type, String msg, JSType expectedType) { if (type.isVoidType()) { mismatch(t, n, msg, type, expectedType); return false; } return true; } /** * Expect that the type of a switch condition matches the type of its * case condition. */ void expectSwitchMatchesCase(NodeTraversal t, Node n, JSType switchType, JSType caseType) { // ECMA-262, page 68, step 3 of evaluation of CaseBlock, // but allowing extra autoboxing. // TODO(user): remove extra conditions when type annotations // in the code base have adapted to the change in the compiler. if (!switchType.canTestForShallowEqualityWith(caseType) && (caseType.autoboxesTo() == null || !caseType.autoboxesTo().isSubtype(switchType))) { mismatch(t, n.getFirstChild(), "case expression doesn't match switch", caseType, switchType); } } /** * Expect that the first type can be addressed with GETELEM syntax, * and that the second type is the right type for an index into the * first type. * * @param t The node traversal. * @param n The node to issue warnings on. * @param objType The type of the left side of the GETELEM. * @param indexType The type inside the brackets of the GETELEM. */ void expectIndexMatch(NodeTraversal t, Node n, JSType objType, JSType indexType) { if (objType.isUnknownType()) { expectStringOrNumber(t, n, indexType, "property access"); } else if (objType.toObjectType() != null && objType.toObjectType().getIndexType() != null) { expectCanAssignTo(t, n, indexType, objType.toObjectType().getIndexType(), "restricted index type"); } else if (objType.isArrayType()) { expectNumber(t, n, indexType, "array access"); } else if (objType.matchesObjectContext()) { expectString(t, n, indexType, "property access"); } else { mismatch(t, n, "only arrays or objects can be accessed", objType, typeRegistry.createUnionType(ARRAY_TYPE, OBJECT_TYPE)); } } /** * Expect that the first type can be assigned to a symbol of the second * type. * * @param t The node traversal. * @param n The node to issue warnings on. * @param rightType The type on the RHS of the assign. * @param leftType The type of the symbol on the LHS of the assign. * @param owner The owner of the property being assigned to. * @param propName The name of the property being assigned to. * @return True if the types matched, false otherwise. */ boolean expectCanAssignToPropertyOf(NodeTraversal t, Node n, JSType rightType, JSType leftType, Node owner, String propName) { // The NoType check is a hack to make typedefs work ok. if (!leftType.isNoType() && !rightType.canAssignTo(leftType)) { if (bothIntrinsics(rightType, leftType)) { // We have a superior warning for this mistake, which gives you // the line numbers of both types. registerMismatch(rightType, leftType); } else { mismatch(t, n, "assignment to property " + propName + " of " + getReadableJSTypeName(owner, true), rightType, leftType); } return false; } return true; } /** * Expect that the first type can be assigned to a symbol of the second * type. * * @param t The node traversal. * @param n The node to issue warnings on. * @param rightType The type on the RHS of the assign. * @param leftType The type of the symbol on the LHS of the assign. * @param msg An extra message for the mismatch warning, if necessary. * @return True if the types matched, false otherwise. */ boolean expectCanAssignTo(NodeTraversal t, Node n, JSType rightType, JSType leftType, String msg) { if (!rightType.canAssignTo(leftType)) { if (bothIntrinsics(rightType, leftType)) { // We have a superior warning for this mistake, which gives you // the line numbers of both types. registerMismatch(rightType, leftType); } else { mismatch(t, n, msg, rightType, leftType); } return false; } return true; } private boolean bothIntrinsics(JSType rightType, JSType leftType) { return (leftType.isConstructor() || leftType.isEnumType()) && (rightType.isConstructor() || rightType.isEnumType()); } /** * Expect that the type of an argument matches the type of the parameter * that it's fulfilling. * * @param t The node traversal. * @param n The node to issue warnings on. * @param argType The type of the argument. * @param paramType The type of the parameter. * @param callNode The call node, to help with the warning message. * @param ordinal The argument ordinal, to help with the warning message. */ void expectArgumentMatchesParameter(NodeTraversal t, Node n, JSType argType, JSType paramType, Node callNode, int ordinal) { if (!argType.canAssignTo(paramType)) { mismatch(t, n, String.format("actual parameter %d of %s does not match " + "formal parameter", ordinal, getReadableJSTypeName(callNode.getFirstChild(), false)), argType, paramType); } } /** * Expect that the first type can override a property of the second * type. * * @param t The node traversal. * @param n The node to issue warnings on. * @param overridingType The overriding type. * @param hiddenType The type of the property being overridden. * @param propertyName The name of the property, for use in the * warning message. * @param ownerType The type of the owner of the property, for use * in the warning message. */ void expectCanOverride(NodeTraversal t, Node n, JSType overridingType, JSType hiddenType, String propertyName, JSType ownerType) { if (!overridingType.canAssignTo(hiddenType)) { registerMismatch(overridingType, hiddenType); if (shouldReport) { compiler.report( t.makeError(n, HIDDEN_PROPERTY_MISMATCH, propertyName, ownerType.toString(), hiddenType.toString(), overridingType.toString())); } } } /** * Expect that the first type is the direct superclass of the second type. * * @param t The node traversal. * @param n The node where warnings should point to. * @param superObject The expected super instance type. * @param subObject The sub instance type. */ void expectSuperType(NodeTraversal t, Node n, ObjectType superObject, ObjectType subObject) { FunctionType subCtor = subObject.getConstructor(); ObjectType declaredSuper = subObject.getImplicitPrototype().getImplicitPrototype(); if (!declaredSuper.equals(superObject)) { if (declaredSuper.equals(getNativeType(OBJECT_TYPE))) { if (shouldReport) { compiler.report( t.makeError(n, MISSING_EXTENDS_TAG_WARNING, subObject.toString())); } registerMismatch(superObject, declaredSuper); } else { mismatch(t.getSourceName(), n, "mismatch in declaration of superclass type", superObject, declaredSuper); } // Correct the super type. if (!subCtor.hasCachedValues()) { subCtor.setPrototypeBasedOn(superObject); } } } /** * Expect that the first type can be cast to the second type. The first type * should be either a subtype or supertype of the second. * * @param t The node traversal. * @param n The node where warnings should point. * @param type The type being cast from. * @param castType The type being cast to. */ void expectCanCast(NodeTraversal t, Node n, JSType type, JSType castType) { castType = castType.restrictByNotNullOrUndefined(); type = type.restrictByNotNullOrUndefined(); if (!type.canAssignTo(castType) && !castType.canAssignTo(type)) { if (shouldReport) { compiler.report( t.makeError(n, INVALID_CAST, castType.toString(), type.toString())); } registerMismatch(type, castType); } } /** * Expect that the given variable has not been declared with a type. * * @param sourceName The name of the source file we're in. * @param n The node where warnings should point to. * @param parent The parent of {@code n}. * @param var The variable that we're checking. * @param variableName The name of the variable. * @param newType The type being applied to the variable. Mostly just here * for the benefit of the warning. */ void expectUndeclaredVariable(String sourceName, Node n, Node parent, Var var, String variableName, JSType newType) { boolean allowDupe = false; if (n.getType() == Token.GETPROP) { JSDocInfo info = n.getJSDocInfo(); if (info == null) { info = parent.getJSDocInfo(); } allowDupe = info != null && info.getSuppressions().contains("duplicate"); } JSType varType = var.getType(); // Only report duplicate declarations that have types. Other duplicates // will be reported by the syntactic scope creator later in the // compilation process. if (varType != null && varType != typeRegistry.getNativeType(UNKNOWN_TYPE) && newType != null && newType != typeRegistry.getNativeType(UNKNOWN_TYPE)) { // If there are two typed declarations of the same variable, that // is an error and the second declaration is ignored, except in the // case of native types. A null input type means that the declaration // was made in TypedScopeCreator#createInitialScope and is a // native type. if (var.input == null) { n.setJSType(varType); if (parent.getType() == Token.VAR) { if (n.getFirstChild() != null) { n.getFirstChild().setJSType(varType); } } else { Preconditions.checkState(parent.getType() == Token.FUNCTION); parent.setJSType(varType); } } else { // Always warn about duplicates if the overridden type does not // match the original type. // // If the types match, suppress the warning iff there was a @suppress // tag, or if the original declaration was a stub. if (!(allowDupe || var.getParentNode().getType() == Token.EXPR_RESULT) || !newType.equals(varType)) { if (shouldReport) { compiler.report( JSError.make(sourceName, n, DUP_VAR_DECLARATION, variableName, newType.toString(), var.getInputName(), String.valueOf(var.nameNode.getLineno()), varType.toString())); } } } } } /** * Expect that all properties on interfaces that this type implements are * implemented. */ void expectAllInterfacePropertiesImplemented(FunctionType type) { ObjectType instance = type.getInstanceType(); for (ObjectType implemented : type.getAllImplementedInterfaces()) { if (implemented.getImplicitPrototype() != null) { for (String prop : implemented.getImplicitPrototype().getOwnPropertyNames()) { if (!instance.hasProperty(prop)) { Node source = type.getSource(); Preconditions.checkNotNull(source); String sourceName = (String) source.getProp(Node.SOURCENAME_PROP); sourceName = sourceName == null ? "" : sourceName; if (shouldReport) { compiler.report(JSError.make(sourceName, source, INTERFACE_METHOD_NOT_IMPLEMENTED, prop, implemented.toString(), instance.toString())); } registerMismatch(instance, implemented); } } } } } /** * Report a type mismatch */ private void mismatch(NodeTraversal t, Node n, String msg, JSType found, JSType required) { mismatch(t.getSourceName(), n, msg, found, required); } private void mismatch(NodeTraversal t, Node n, String msg, JSType found, JSTypeNative required) { mismatch(t, n, msg, found, getNativeType(required)); } private void mismatch(String sourceName, Node n, String msg, JSType found, JSType required) { registerMismatch(found, required); if (shouldReport) { compiler.report( JSError.make(sourceName, n, TYPE_MISMATCH_WARNING, formatFoundRequired(msg, found, required))); } } private void registerMismatch(JSType found, JSType required) { // Don't register a mismatch for differences in null or undefined or if the // code didn't downcast. found = found.restrictByNotNullOrUndefined(); required = required.restrictByNotNullOrUndefined(); if (found.canAssignTo(required) || required.canAssignTo(found)) { return; } mismatches.add(new TypeMismatch(found, required)); if (found instanceof FunctionType && required instanceof FunctionType) { FunctionType fnTypeA = ((FunctionType) found); FunctionType fnTypeB = ((FunctionType) required); Iterator<Node> paramItA = fnTypeA.getParameters().iterator(); Iterator<Node> paramItB = fnTypeB.getParameters().iterator(); while (paramItA.hasNext() && paramItB.hasNext()) { registerIfMismatch(paramItA.next().getJSType(), paramItB.next().getJSType()); } registerIfMismatch(fnTypeA.getReturnType(), fnTypeB.getReturnType()); } } private void registerIfMismatch(JSType found, JSType required) { if (found != null && required != null && !found.canAssignTo(required)) { registerMismatch(found, required); } } /** * Formats a found/required error message. */ private String formatFoundRequired(String description, JSType found, JSType required) { return MessageFormat.format(FOUND_REQUIRED, description, found, required); } /** * Given a node, get a human-readable name for the type of that node so * that will be easy for the programmer to find the original declaration. * * For example, if SubFoo's property "bar" might have the human-readable * name "Foo.prototype.bar". * * @param n The node. * @param dereference If true, the type of the node will be dereferenced * to an Object type, if possible. */ String getReadableJSTypeName(Node n, boolean dereference) { // If we're analyzing a GETPROP, the property may be inherited by the // prototype chain. So climb the prototype chain and find out where // the property was originally defined. if (n.getType() == Token.GETPROP) { ObjectType objectType = getJSType(n.getFirstChild()).dereference(); if (objectType != null) { String propName = n.getLastChild().getString(); while (objectType != null && !objectType.hasOwnProperty(propName)) { objectType = objectType.getImplicitPrototype(); } // Don't show complex function names or anonymous types. // Instead, try to get a human-readable type name. if (objectType != null && (objectType.getConstructor() != null || objectType.isFunctionPrototypeType())) { return objectType.toString() + "." + propName; } } } JSType type = getJSType(n); if (dereference) { ObjectType dereferenced = type.dereference(); if (dereferenced != null) { type = dereferenced; } } String qualifiedName = n.getQualifiedName(); if (type.isFunctionPrototypeType() || (type.toObjectType() != null && type.toObjectType().getConstructor() != null)) { return type.toString(); } else if (qualifiedName != null) { return qualifiedName; } else if (type instanceof FunctionType) { // Don't show complex function names. return "function"; } else { return type.toString(); } } /** * This method gets the JSType from the Node argument and verifies that it is * present. */ private JSType getJSType(Node n) { JSType jsType = n.getJSType(); if (jsType == null) { // TODO(user): This branch indicates a compiler bug, not worthy of // halting the compilation but we should log this and analyze to track // down why it happens. This is not critical and will be resolved over // time as the type checker is extended. return getNativeType(UNKNOWN_TYPE); } else { return jsType; } } private JSType getNativeType(JSTypeNative typeId) { return typeRegistry.getNativeType(typeId); } /** * Signals that the first type and the second type have been * used interchangeably. * * Type-based optimizations should take this into account * so that they don't wreck code with type warnings. */ static class TypeMismatch { final JSType typeA; final JSType typeB; /** * It's the responsibility of the class that creates the * {@code TypeMismatch} to ensure that {@code a} and {@code b} are * non-matching types. */ TypeMismatch(JSType a, JSType b) { this.typeA = a; this.typeB = b; } @Override public boolean equals(Object object) { if (object instanceof TypeMismatch) { TypeMismatch that = (TypeMismatch) object; return (that.typeA.equals(this.typeA) && that.typeB.equals(this.typeB)) || (that.typeB.equals(this.typeA) && that.typeA.equals(this.typeB)); } return false; } @Override public int hashCode() { return Objects.hashCode(typeA, typeB); } @Override public String toString() { return "(" + typeA + ", " + typeB + ")"; } } }
/* * Copyright 2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.cli; import java.util.*; import java.util.regex.Pattern; /** * <p>A command-line parser which supports a command/sub-command style command-line interface. Supports the following * syntax:</p> * <pre> * &lt;option>* (&lt;sub-command> &lt;sub-command-option>*)* * </pre> * * <ul> <li>Short options are a '-' followed by a single character. For example: {@code -a}.</li> * * <li>Long options are '--' followed by multiple characters. For example: {@code --long-option}.</li> * * <li>Options can take arguments. The argument follows the option. For example: {@code -a arg} or {@code --long * arg}.</li> * * <li>Arguments can be attached to the option using '='. For example: {@code -a=arg} or {@code --long=arg}.</li> * * <li>Arguments can be attached to short options. For example: {@code -aarg}.</li> * * <li>Short options can be combined. For example {@code -ab} is equivalent to {@code -a -b}.</li> * * <li>Anything else is treated as an extra argument. This includes a single {@code -} character.</li> * * <li>'--' indicates the end of the options. Anything following is not parsed and is treated as extra arguments.</li> * * <li>The parser is forgiving, and allows '--' to be used with short options and '-' to be used with long * options.</li> * * <li>The set of options must be known at parse time. Sub-commands and their options do not need to be known at parse * time. Use {@link ParsedCommandLine#getExtraArguments()} to obtain the non-option command-line arguments.</li> * * </ul> */ public class CommandLineParser { private static final Pattern OPTION_NAME_PATTERN = Pattern.compile("(\\?|\\p{Alnum}[\\p{Alnum}-_]*)"); private Map<String, CommandLineOption> optionsByString = new HashMap<String, CommandLineOption>(); private boolean allowMixedOptions; private boolean allowUnknownOptions; /** * Parses the given command-line. * * @param commandLine The command-line. * @return The parsed command line. * @throws org.gradle.cli.CommandLineArgumentException * On parse failure. */ public ParsedCommandLine parse(String... commandLine) throws CommandLineArgumentException { return parse(Arrays.asList(commandLine)); } /** * Parses the given command-line. * * @param commandLine The command-line. * @return The parsed command line. * @throws org.gradle.cli.CommandLineArgumentException * On parse failure. */ public ParsedCommandLine parse(Iterable<String> commandLine) throws CommandLineArgumentException { ParsedCommandLine parsedCommandLine = new ParsedCommandLine(new HashSet<CommandLineOption>(optionsByString.values())); ParserState parseState = new BeforeFirstSubCommand(parsedCommandLine); for (String arg : commandLine) { if (parseState.maybeStartOption(arg)) { if (arg.equals("--")) { parseState = new AfterOptions(parsedCommandLine); } else if (arg.matches("--[^=]+")) { OptionParserState parsedOption = parseState.onStartOption(arg, arg.substring(2)); parseState = parsedOption.onStartNextArg(); } else if (arg.matches("(?s)--[^=]+=.*")) { int endArg = arg.indexOf('='); OptionParserState parsedOption = parseState.onStartOption(arg, arg.substring(2, endArg)); parseState = parsedOption.onArgument(arg.substring(endArg + 1)); } else if (arg.matches("(?s)-[^=]=.*")) { OptionParserState parsedOption = parseState.onStartOption(arg, arg.substring(1, 2)); parseState = parsedOption.onArgument(arg.substring(3)); } else { assert arg.matches("(?s)-[^-].*"); String option = arg.substring(1); if (optionsByString.containsKey(option)) { OptionParserState parsedOption = parseState.onStartOption(arg, option); parseState = parsedOption.onStartNextArg(); } else { String option1 = arg.substring(1, 2); OptionParserState parsedOption; if (optionsByString.containsKey(option1)) { parsedOption = parseState.onStartOption("-" + option1, option1); if (parsedOption.getHasArgument()) { parseState = parsedOption.onArgument(arg.substring(2)); } else { parseState = parsedOption.onComplete(); for (int i = 2; i < arg.length(); i++) { String optionStr = arg.substring(i, i + 1); parsedOption = parseState.onStartOption("-" + optionStr, optionStr); parseState = parsedOption.onComplete(); } } } else { if (allowUnknownOptions) { // if we are allowing unknowns, just pass through the whole arg parsedOption = parseState.onStartOption(arg, option); parseState = parsedOption.onComplete(); } else { // We are going to throw a CommandLineArgumentException below, but want the message // to reflect that we didn't recognise the first char (i.e. the option specifier) parsedOption = parseState.onStartOption("-" + option1, option1); parseState = parsedOption.onComplete(); } } } } } else { parseState = parseState.onNonOption(arg); } } parseState.onCommandLineEnd(); return parsedCommandLine; } public CommandLineParser allowMixedSubcommandsAndOptions() { allowMixedOptions = true; return this; } public CommandLineParser allowUnknownOptions() { allowUnknownOptions = true; return this; } /** * Specifies that the given set of options are mutually-exclusive. Only one of the given options will be selected. * The parser ignores all but the last of these options. */ public CommandLineParser allowOneOf(String... options) { Set<CommandLineOption> commandLineOptions = new HashSet<CommandLineOption>(); for (String option : options) { commandLineOptions.add(optionsByString.get(option)); } for (CommandLineOption commandLineOption : commandLineOptions) { commandLineOption.groupWith(commandLineOptions); } return this; } /** * Prints a usage message to the given stream. * * @param out The output stream to write to. */ public void printUsage(Appendable out) { Formatter formatter = new Formatter(out); Set<CommandLineOption> orderedOptions = new TreeSet<CommandLineOption>(new OptionComparator()); orderedOptions.addAll(optionsByString.values()); Map<String, String> lines = new LinkedHashMap<String, String>(); for (CommandLineOption option : orderedOptions) { Set<String> orderedOptionStrings = new TreeSet<String>(new OptionStringComparator()); orderedOptionStrings.addAll(option.getOptions()); List<String> prefixedStrings = new ArrayList<String>(); for (String optionString : orderedOptionStrings) { if (optionString.length() == 1) { prefixedStrings.add("-" + optionString); } else { prefixedStrings.add("--" + optionString); } } String key = join(prefixedStrings, ", "); String value = option.getDescription(); if (value == null || value.length() == 0) { value = ""; } lines.put(key, value); } int max = 0; for (String optionStr : lines.keySet()) { max = Math.max(max, optionStr.length()); } for (Map.Entry<String, String> entry : lines.entrySet()) { if (entry.getValue().length() == 0) { formatter.format("%s%n", entry.getKey()); } else { formatter.format("%-" + max + "s %s%n", entry.getKey(), entry.getValue()); } } formatter.flush(); } private static String join(Collection<?> things, String separator) { StringBuffer buffer = new StringBuffer(); boolean first = true; if (separator == null) { separator = ""; } for (Object thing : things) { if (!first) { buffer.append(separator); } buffer.append(thing.toString()); first = false; } return buffer.toString(); } /** * Defines a new option. By default, the option takes no arguments and has no description. * * @param options The options values. * @return The option, which can be further configured. */ public CommandLineOption option(String... options) { for (String option : options) { if (optionsByString.containsKey(option)) { throw new IllegalArgumentException(String.format("Option '%s' is already defined.", option)); } if (option.startsWith("-")) { throw new IllegalArgumentException(String.format("Cannot add option '%s' as an option cannot start with '-'.", option)); } if (!OPTION_NAME_PATTERN.matcher(option).matches()) { throw new IllegalArgumentException(String.format("Cannot add option '%s' as an option can only contain alphanumeric characters or '-' or '_'.", option)); } } CommandLineOption option = new CommandLineOption(Arrays.asList(options)); for (String optionStr : option.getOptions()) { optionsByString.put(optionStr, option); } return option; } private static class OptionString { private final String arg; private final String option; private OptionString(String arg, String option) { this.arg = arg; this.option = option; } public String getDisplayName() { return arg.startsWith("--") ? "--" + option : "-" + option; } @Override public String toString() { return getDisplayName(); } } private static abstract class ParserState { public abstract boolean maybeStartOption(String arg); boolean isOption(String arg) { return arg.matches("(?s)-.+"); } public abstract OptionParserState onStartOption(String arg, String option); public abstract ParserState onNonOption(String arg); public void onCommandLineEnd() { } } private abstract class OptionAwareParserState extends ParserState { protected final ParsedCommandLine commandLine; protected OptionAwareParserState(ParsedCommandLine commandLine) { this.commandLine = commandLine; } @Override public boolean maybeStartOption(String arg) { return isOption(arg); } @Override public ParserState onNonOption(String arg) { commandLine.addExtraValue(arg); return allowMixedOptions ? new AfterFirstSubCommand(commandLine) : new AfterOptions(commandLine); } } private class BeforeFirstSubCommand extends OptionAwareParserState { private BeforeFirstSubCommand(ParsedCommandLine commandLine) { super(commandLine); } @Override public OptionParserState onStartOption(String arg, String option) { OptionString optionString = new OptionString(arg, option); CommandLineOption commandLineOption = optionsByString.get(option); if (commandLineOption == null) { if (allowUnknownOptions) { return new UnknownOptionParserState(arg, commandLine, this); } else { throw new CommandLineArgumentException(String.format("Unknown command-line option '%s'.", optionString)); } } return new KnownOptionParserState(optionString, commandLineOption, commandLine, this); } } private class AfterFirstSubCommand extends OptionAwareParserState { private AfterFirstSubCommand(ParsedCommandLine commandLine) { super(commandLine); } @Override public OptionParserState onStartOption(String arg, String option) { CommandLineOption commandLineOption = optionsByString.get(option); if (commandLineOption == null) { return new UnknownOptionParserState(arg, commandLine, this); } return new KnownOptionParserState(new OptionString(arg, option), commandLineOption, commandLine, this); } } private static class AfterOptions extends ParserState { private final ParsedCommandLine commandLine; private AfterOptions(ParsedCommandLine commandLine) { this.commandLine = commandLine; } @Override public boolean maybeStartOption(String arg) { return false; } @Override public OptionParserState onStartOption(String arg, String option) { return new UnknownOptionParserState(arg, commandLine, this); } @Override public ParserState onNonOption(String arg) { commandLine.addExtraValue(arg); return this; } } private static class MissingOptionArgState extends ParserState { private final OptionParserState option; private MissingOptionArgState(OptionParserState option) { this.option = option; } @Override public boolean maybeStartOption(String arg) { return isOption(arg); } @Override public OptionParserState onStartOption(String arg, String option) { return this.option.onComplete().onStartOption(arg, option); } @Override public ParserState onNonOption(String arg) { return option.onArgument(arg); } @Override public void onCommandLineEnd() { option.onComplete(); } } private static abstract class OptionParserState { public abstract ParserState onStartNextArg(); public abstract ParserState onArgument(String argument); public abstract boolean getHasArgument(); public abstract ParserState onComplete(); } private class KnownOptionParserState extends OptionParserState { private final OptionString optionString; private final CommandLineOption option; private final ParsedCommandLine commandLine; private final ParserState state; private final List<String> values = new ArrayList<String>(); private KnownOptionParserState(OptionString optionString, CommandLineOption option, ParsedCommandLine commandLine, ParserState state) { this.optionString = optionString; this.option = option; this.commandLine = commandLine; this.state = state; } @Override public ParserState onArgument(String argument) { if (!getHasArgument()) { throw new CommandLineArgumentException(String.format("Command-line option '%s' does not take an argument.", optionString)); } if (argument.length() == 0) { throw new CommandLineArgumentException(String.format("An empty argument was provided for command-line option '%s'.", optionString)); } values.add(argument); return onComplete(); } @Override public ParserState onStartNextArg() { if (option.getAllowsArguments() && values.isEmpty()) { return new MissingOptionArgState(this); } return onComplete(); } @Override public boolean getHasArgument() { return option.getAllowsArguments(); } @Override public ParserState onComplete() { if (getHasArgument() && values.isEmpty()) { throw new CommandLineArgumentException(String.format("No argument was provided for command-line option '%s'.", optionString)); } ParsedCommandLineOption parsedOption = commandLine.addOption(optionString.option, option); if (values.size() + parsedOption.getValues().size() > 1 && !option.getAllowsMultipleArguments()) { throw new CommandLineArgumentException(String.format("Multiple arguments were provided for command-line option '%s'.", optionString)); } for (String value : values) { parsedOption.addArgument(value); } for (CommandLineOption otherOption : option.getGroupWith()) { commandLine.removeOption(otherOption); } return state; } } private static class UnknownOptionParserState extends OptionParserState { private final ParserState state; private final String arg; private final ParsedCommandLine commandLine; private UnknownOptionParserState(String arg, ParsedCommandLine commandLine, ParserState state) { this.arg = arg; this.commandLine = commandLine; this.state = state; } @Override public boolean getHasArgument() { return true; } @Override public ParserState onStartNextArg() { return onComplete(); } @Override public ParserState onArgument(String argument) { return onComplete(); } @Override public ParserState onComplete() { commandLine.addExtraValue(arg); return state; } } private static final class OptionComparator implements Comparator<CommandLineOption> { public int compare(CommandLineOption option1, CommandLineOption option2) { String min1 = Collections.min(option1.getOptions(), new OptionStringComparator()); String min2 = Collections.min(option2.getOptions(), new OptionStringComparator()); return new CaseInsensitiveStringComparator().compare(min1, min2); } } private static final class CaseInsensitiveStringComparator implements Comparator<String> { public int compare(String option1, String option2) { int diff = option1.compareToIgnoreCase(option2); if (diff != 0) { return diff; } return option1.compareTo(option2); } } private static final class OptionStringComparator implements Comparator<String> { public int compare(String option1, String option2) { boolean short1 = option1.length() == 1; boolean short2 = option2.length() == 1; if (short1 && !short2) { return -1; } if (!short1 && short2) { return 1; } return new CaseInsensitiveStringComparator().compare(option1, option2); } } }
/* * Copyright 2005-2007 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.service.impl; import com.thoughtworks.xstream.XStream; import com.thoughtworks.xstream.converters.MarshallingContext; import com.thoughtworks.xstream.converters.UnmarshallingContext; import com.thoughtworks.xstream.converters.collections.CollectionConverter; import com.thoughtworks.xstream.converters.reflection.ObjectAccessException; import com.thoughtworks.xstream.converters.reflection.PureJavaReflectionProvider; import com.thoughtworks.xstream.converters.reflection.ReflectionConverter; import com.thoughtworks.xstream.converters.reflection.ReflectionProvider; import com.thoughtworks.xstream.io.HierarchicalStreamReader; import com.thoughtworks.xstream.io.HierarchicalStreamWriter; import com.thoughtworks.xstream.mapper.Mapper; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.ojb.broker.core.proxy.ListProxyDefaultImpl; import org.apache.ojb.broker.core.proxy.SetProxyDefaultImpl; import org.kuali.rice.krad.service.KRADServiceLocator; import org.kuali.rice.krad.service.PersistenceService; import org.kuali.rice.krad.service.XmlObjectSerializerService; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; /** * This class is the service implementation for the XmlObjectSerializer structure. This is the default implementation that gets * delivered with Kuali. It utilizes the XStream open source libraries and framework. * * */ public class XmlObjectSerializerServiceImpl implements XmlObjectSerializerService { private static final Log LOG = LogFactory.getLog(XmlObjectSerializerServiceImpl.class); private PersistenceService persistenceService; private XStream xstream; public XmlObjectSerializerServiceImpl() { xstream = new XStream(new ProxyAwareJavaReflectionProvider()); xstream.registerConverter(new ProxyConverter(xstream.getMapper(), xstream.getReflectionProvider() )); // register converters so that ListProxyDefaultImpl and SetProxyDefaultImpl are // serialized as ArrayLists and HashSets xstream.registerConverter(new ListProxyDefaultImplConverter(xstream.getMapper())); xstream.registerConverter(new SetProxyDefaultImplConverter(xstream.getMapper())); } /** * @see org.kuali.rice.krad.service.XmlObjectSerializerService#toXml(java.lang.Object) */ public String toXml(Object object) { if ( LOG.isDebugEnabled() ) { LOG.debug( "toXml(" + object + ") : \n" + xstream.toXML(object) ); } return xstream.toXML(object); } /** * @see org.kuali.rice.krad.service.XmlObjectSerializerService#fromXml(java.lang.String) */ public Object fromXml(String xml) { if ( LOG.isDebugEnabled() ) { LOG.debug( "fromXml() : \n" + xml ); } if ( xml != null ) { xml = xml.replaceAll( "--EnhancerByCGLIB--[0-9a-f]{0,8}", "" ); } return xstream.fromXML(xml); } /** * This custom converter only handles proxies for BusinessObjects. List-type proxies are handled by configuring XStream to treat * ListProxyDefaultImpl as ArrayLists (see constructor for this service). */ public class ProxyConverter extends ReflectionConverter { public ProxyConverter(Mapper mapper, ReflectionProvider reflectionProvider) { super(mapper, reflectionProvider); } @Override // since the ReflectionConverter supertype defines canConvert without using a parameterized Class type, we must declare // the overridden version the same way @SuppressWarnings("unchecked") public boolean canConvert(Class clazz) { return clazz.getName().contains("CGLIB"); } @Override public void marshal(Object obj, HierarchicalStreamWriter writer, MarshallingContext context) { super.marshal(getPersistenceService().resolveProxy(obj), writer, context); } // we shouldn't need an unmarshal method because all proxy metadata is taken out of the XML, so we'll reserialize as a base BO. } public class ProxyAwareJavaReflectionProvider extends PureJavaReflectionProvider { public ProxyAwareJavaReflectionProvider() { super(); } /** * @see com.thoughtworks.xstream.converters.reflection.PureJavaReflectionProvider#visitSerializableFields(java.lang.Object, com.thoughtworks.xstream.converters.reflection.ReflectionProvider.Visitor) */ @Override public void visitSerializableFields(Object object, Visitor visitor) { for (Iterator iterator = fieldDictionary.fieldsFor(object.getClass()); iterator.hasNext();) { Field field = (Field) iterator.next(); if (!fieldModifiersSupported(field)) { continue; } validateFieldAccess(field); Object value = null; try { value = field.get(object); if (value != null && getPersistenceService().isProxied(value)) { value = getPersistenceService().resolveProxy(value); } } catch (IllegalArgumentException e) { throw new ObjectAccessException("Could not get field " + field.getClass() + "." + field.getName(), e); } catch (IllegalAccessException e) { throw new ObjectAccessException("Could not get field " + field.getClass() + "." + field.getName(), e); } visitor.visit(field.getName(), field.getType(), field.getDeclaringClass(), value); } } } public PersistenceService getPersistenceService() { if ( persistenceService == null ) { persistenceService = KRADServiceLocator.getPersistenceService(); } return persistenceService; } /** * Custom {@link com.thoughtworks.xstream.converters.Converter} that moves elements from a * {@link ListProxyDefaultImpl} into an {@link ArrayList} and marshals that instead. */ private static class ListProxyDefaultImplConverter extends CollectionConverter { ListProxyDefaultImplConverter(Mapper mapper) { super(mapper); } @Override @SuppressWarnings("unchecked") public boolean canConvert(Class type) { return ListProxyDefaultImpl.class.equals(type); } /** * moves elements from a {@link ListProxyDefaultImpl} into an {@link ArrayList} and marshals that instead. */ @Override @SuppressWarnings("unchecked") public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { // move data to an ArrayList ArrayList altered = new ArrayList((List)source); // and marshal that super.marshal(altered, writer, context); } /** * NOTE: using this class to unmarshal a ListProxyDefaultImpl inside Rice is unexpected, since those should * have been converted to ArrayLists during marshalling. However, in the interest of attempting to * provide a full Converter implementation here, we'll attempt to unmarshal it to an ArrayList by throwing * away all the funny fields that a ListProxyDefaultImpl contains (Helpfully, their names all start with "_")' */ @Override @SuppressWarnings("unchecked") public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { ArrayList result = null; while (reader.hasMoreChildren()) { try { reader.moveDown(); if (reader.getNodeName().startsWith("_")) { // do nothing } else { if (result == null) { result = new ArrayList(); } // lazy init addCurrentElementToCollection(reader, context, result, result); } } finally { reader.moveUp(); } } return result; } } /** * Custom {@link com.thoughtworks.xstream.converters.Converter} that moves elements from a * {@link SetProxyDefaultImpl} into a {@link HashSet} and marshals that instead. */ private static class SetProxyDefaultImplConverter extends CollectionConverter { SetProxyDefaultImplConverter(Mapper mapper) { super(mapper); } @Override @SuppressWarnings("unchecked") public boolean canConvert(Class type) { return SetProxyDefaultImpl.class.equals(type); } /** * moves elements from a {@link SetProxyDefaultImpl} into a {@link HashSet} and marshals that instead. */ @Override @SuppressWarnings("unchecked") public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) { // move data to a HashSet HashSet altered = new HashSet((Set)source); // and marshal that super.marshal(altered, writer, context); } /** * NOTE: using this class to unmarshal a SetProxyDefaultImpl inside Rice is unexpected, since those should * have been converted to HashSets during marshalling. However, in the interest of attempting to * provide a full Converter implementation here, we'll attempt to unmarshal it to an HashSet by throwing * away all the funny fields that a SetProxyDefaultImpl contains (Helpfully, their names all start with "_")' */ @Override @SuppressWarnings("unchecked") public Object unmarshal(HierarchicalStreamReader reader, UnmarshallingContext context) { HashSet result = null; while (reader.hasMoreChildren()) { try { reader.moveDown(); if (reader.getNodeName().startsWith("_")) { // do nothing } else { if (result == null) { result = new HashSet(); } // lazy init addCurrentElementToCollection(reader, context, result, result); } } finally { reader.moveUp(); } } return result; } } }
/* * Javolution - Java(tm) Solution for Real-Time and Embedded Systems * Copyright (c) 2012, Javolution (http://javolution.org/) * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.intellij.util.text; import com.intellij.openapi.util.text.CharSequenceWithStringHash; import com.intellij.openapi.util.text.Strings; import org.jetbrains.annotations.NotNull; /** * A pruned and optimized version of javolution.text.Text * * <p> This class represents an immutable character sequence with * fast {@link #concat concatenation}, {@link #insert insertion} and * {@link #delete deletion} capabilities (O[Log(n)]) instead of * O[n] for StringBuffer/StringBuilder).</p> * * <p><i> Implementation Note: To avoid expensive copy operations , * {@link ImmutableText} instances are broken down into smaller immutable * sequences, they form a minimal-depth binary tree. * The tree is maintained balanced automatically through <a * href="http://en.wikipedia.org/wiki/Tree_rotation">tree rotations</a>. * Insertion/deletions are performed in {@code O[Log(n)]} * instead of {@code O[n]} for * {@code StringBuffer/StringBuilder}.</i></p> * * @author <a href="mailto:jean-marie@dautelle.com">Jean-Marie Dautelle</a> * @author Wilfried Middleton * @version 5.3, January 10, 2007 */ @SuppressWarnings("UnnecessaryThis") final class ImmutableText extends ImmutableCharSequence implements CharArrayExternalizable, CharSequenceWithStringHash { /** * Holds the default size for primitive blocks of characters. */ private static final int BLOCK_SIZE = 1 << 6; /** * Holds the mask used to ensure a block boundary cesures. */ private static final int BLOCK_MASK = -BLOCK_SIZE; // visible for tests // Here (String | CompositeNode | ByteArrayCharSequence) is stored final CharSequence myNode; private ImmutableText(CharSequence node) { myNode = node; } /** * Returns the text representing the specified object. * * @param obj the object to represent as text. * @return the textual representation of the specified object. */ static ImmutableText valueOf(@NotNull Object obj) { if (obj instanceof ImmutableText) return (ImmutableText)obj; if (obj instanceof CharSequence) return valueOf((CharSequence)obj); return valueOf(String.valueOf(obj)); } private static ImmutableText valueOf(@NotNull CharSequence str) { if (str instanceof ByteArrayCharSequence) { return new ImmutableText(str); } if (str.length() == 0) { return EMPTY; } return new ImmutableText(str.toString()); } /** * When first loaded, ImmutableText contents are stored as a single large array. This saves memory but isn't * modification-friendly as it disallows slightly changed texts to retain most of the internal structure of the * original document. Whoever retains old non-chunked version will use more memory than really needed. * * @return a copy of the myNode better prepared for small modifications to fully enable structure-sharing capabilities */ private CharSequence ensureChunked() { if (length() > BLOCK_SIZE && !(myNode instanceof CompositeNode)) { return nodeOf(myNode, 0, length()); } return myNode; } private static CharSequence nodeOf(@NotNull CharSequence node, int offset, int length) { if (length <= BLOCK_SIZE) { // Use toString to avoid referencing the original byte[] array in case if node is ByteArrayCharSequence return node.subSequence(offset, offset + length).toString(); } // Splits on a block boundary. int half = ((length + BLOCK_SIZE) >> 1) & BLOCK_MASK; return new CompositeNode(nodeOf(node, offset, half), nodeOf(node, offset + half, length - half)); } private static final ImmutableText EMPTY = new ImmutableText(""); /** * Returns the length of this text. * * @return the number of characters (16-bits Unicode) composing this text. */ @Override public int length() { return myNode.length(); } /** * Concatenates the specified text to the end of this text. * This method is very fast (faster even than * {@code StringBuffer.append(String)}) and still returns * a text instance with an internal binary tree of minimal depth! * * @param that the text that is concatenated. * @return {@code this + that} */ private ImmutableText concat(ImmutableText that) { return that.length() == 0 ? this : length() == 0 ? that : new ImmutableText(concatNodes(ensureChunked(), that.ensureChunked())); } @Override public ImmutableText concat(@NotNull CharSequence sequence) { return concat(valueOf(sequence)); } /** * Returns a portion of this text. * * @param start the index of the first character inclusive. * @return the sub-text starting at the specified position. * @throws IndexOutOfBoundsException if {@code (start < 0) || * (start > this.length())} */ private ImmutableText subtext(int start) { return subtext(start, length()); } @Override public ImmutableCharSequence replace(int start, int end, @NotNull CharSequence seq) { if (start == end) return insert(start, seq); if (seq.length() == 0) return delete(start, end); if (start > end) { throw new IndexOutOfBoundsException(); } return subtext(0, start).concat(valueOf(seq)).concat(subtext(end)); } @Override public ImmutableText insert(int index, @NotNull CharSequence seq) { if (seq.length() == 0) return this; return subtext(0, index).concat(valueOf(seq)).concat(subtext(index)); } /** * Returns the text without the characters between the specified indexes. * * @param start the beginning index, inclusive. * @param end the ending index, exclusive. * @return {@code subtext(0, start).concat(subtext(end))} * @throws IndexOutOfBoundsException if {@code (start < 0) || (end < 0) || * (start > end) || (end > this.length()} */ @Override public ImmutableText delete(int start, int end) { if (start == end) return this; if (start > end) { throw new IndexOutOfBoundsException(); } return subtext(0, start).concat(subtext(end)); } @Override public CharSequence subSequence(final int start, final int end) { if (start == 0 && end == length()) return this; return new CharSequenceSubSequence(this, start, end); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof ImmutableText)) { return false; } return CharArrayUtil.regionMatches(this, 0, (ImmutableText)obj); } private transient int hash; /** * Returns the hash code for this text. * * @return the hash code value. */ @Override public int hashCode() { int h = hash; if (h == 0) { hash = h = Strings.stringHashCode(this, 0, length()); } return h; } @Override public char charAt(int index) { InnerLeaf leaf = myLastLeaf; if (leaf == null || index < leaf.start || index >= leaf.end) { myLastLeaf = leaf = findLeaf(index); } return leaf.leafNode.charAt(index - leaf.start); } private InnerLeaf myLastLeaf; private InnerLeaf findLeaf(int index) { if (index < 0) throw outOfRange(index); CharSequence node = myNode; int nodeLength = node.length(); int offset = 0; while (true) { if (index >= nodeLength) { throw outOfRange(index); } if (!(node instanceof CompositeNode)) { return new InnerLeaf(node, offset, offset + nodeLength); } CompositeNode composite = (CompositeNode)node; int headLength = composite.head.length(); if (index < headLength) { node = composite.head; nodeLength = headLength; } else { offset += headLength; index -= headLength; node = composite.tail; nodeLength -= headLength; } } } private static IndexOutOfBoundsException outOfRange(int index) { return new IndexOutOfBoundsException("Index out of range: " + index); } private static final class InnerLeaf { final CharSequence leafNode; final int start; final int end; private InnerLeaf(@NotNull CharSequence leafNode, int start, int end) { this.leafNode = leafNode; this.start = start; this.end = end; } } /** * Returns a portion of this text. * * @param start the index of the first character inclusive. * @param end the index of the last character exclusive. * @return the sub-text starting at the specified start position and * ending just before the specified end position. * @throws IndexOutOfBoundsException if {@code (start < 0) || (end < 0) || * (start > end) || (end > this.length())} */ @Override public ImmutableText subtext(int start, int end) { if (start < 0 || start > end || end > length()) { throw new IndexOutOfBoundsException(); } if (start == 0 && end == length()) { return this; } if (start == end) { return EMPTY; } return new ImmutableText(myNode.subSequence(start, end)); } /** * Copies the characters from this text into the destination * character array. * * @param start the index of the first character to copy. * @param end the index after the last character to copy. * @param dest the destination array. * @param destPos the start offset in the destination array. * @throws IndexOutOfBoundsException if {@code (start < 0) || (end < 0) || * (start > end) || (end > this.length())} */ @Override public void getChars(int start, int end, char @NotNull [] dest, int destPos) { getChars(myNode, start, end, dest, destPos); } private static void getChars(CharSequence cs, int start, int end, char @NotNull [] dest, int destPos) { if (cs instanceof String) { ((String)cs).getChars(start, end, dest, destPos); } else if (cs instanceof ByteArrayCharSequence) { ((ByteArrayCharSequence)cs).getChars(start, end, dest, destPos); } else { ((CompositeNode)cs).getChars(start, end, dest, destPos); } } /** * Returns the {@code String} representation of this text. * * @return the {@code java.lang.String} for this text. */ @Override @NotNull public String toString() { return myNode.toString(); } @NotNull private static CharSequence concatNodes(@NotNull CharSequence node1, @NotNull CharSequence node2) { // All Text instances are maintained balanced: // (head < tail * 2) & (tail < head * 2) final int length = node1.length() + node2.length(); if (length <= BLOCK_SIZE) { // Merges to primitive. // module is still targeted to Java 8, so plus-concatenation is compiled via StringBuilder // here concat() looks preferred return node1.toString().concat(node2.toString()); } // Returns a composite. CharSequence head = node1; CharSequence tail = node2; if (shouldRebalance(head, tail)) { // head too small, returns (head + tail/2) + (tail/2) do { if (((CompositeNode)tail).head.length() > ((CompositeNode)tail).tail.length()) { // Rotates to concatenate with smaller part. tail = ((CompositeNode)tail).rightRotation(); } head = concatNodes(head, ((CompositeNode)tail).head); tail = ((CompositeNode)tail).tail; } while (shouldRebalance(head, tail)); } else if (shouldRebalance(tail, head)) { // tail too small, returns (head/2) + (head/2 concat tail) do { if (((CompositeNode)head).tail.length() > ((CompositeNode)head).head.length()) { // Rotates to concatenate with smaller part. head = ((CompositeNode)head).leftRotation(); } tail = concatNodes(((CompositeNode)head).tail, tail); head = ((CompositeNode)head).head; } while (shouldRebalance(tail, head)); } return new CompositeNode(head, tail); } private static boolean shouldRebalance(CharSequence shorter, CharSequence longer) { return (shorter.length() << 1) < longer.length() && longer instanceof CompositeNode; } static final class CompositeNode implements CharSequence { final int count; final CharSequence head; final CharSequence tail; CompositeNode(CharSequence head, CharSequence tail) { count = head.length() + tail.length(); this.head = head; this.tail = tail; } @Override public int length() { return count; } @Override public char charAt(int index) { int headLength = head.length(); return index < headLength ? head.charAt(index) : tail.charAt(index - headLength); } CompositeNode rightRotation() { // See: http://en.wikipedia.org/wiki/Tree_rotation CharSequence P = this.head; if (!(P instanceof CompositeNode)) { return this; // Head not a composite, cannot rotate. } CharSequence A = ((CompositeNode)P).head; CharSequence B = ((CompositeNode)P).tail; //noinspection UnnecessaryLocalVariable CharSequence C = this.tail; return new CompositeNode(A, new CompositeNode(B, C)); } CompositeNode leftRotation() { // See: http://en.wikipedia.org/wiki/Tree_rotation CharSequence Q = this.tail; if (!(Q instanceof CompositeNode)) { return this; // Tail not a composite, cannot rotate. } CharSequence B = ((CompositeNode)Q).head; CharSequence C = ((CompositeNode)Q).tail; //noinspection UnnecessaryLocalVariable CharSequence A = this.head; return new CompositeNode(new CompositeNode(A, B), C); } void getChars(int start, int end, char @NotNull [] dest, int destPos) { final int cesure = head.length(); if (end <= cesure) { ImmutableText.getChars(head, start, end, dest, destPos); } else if (start >= cesure) { ImmutableText.getChars(tail, start - cesure, end - cesure, dest, destPos); } else { // Overlaps head and tail. ImmutableText.getChars(head, start, cesure, dest, destPos); ImmutableText.getChars(tail, 0, end - cesure, dest, destPos + cesure - start); } } @Override public CharSequence subSequence(int start, int end) { final int cesure = head.length(); if (end <= cesure) { return head.subSequence(start, end); } if (start >= cesure) { return tail.subSequence(start - cesure, end - cesure); } if (start == 0 && end == count) { return this; } // Overlaps head and tail. if (end - start < BLOCK_SIZE) { char[] data = new char[end - start]; ImmutableText.getChars(head, start, cesure, data, 0); ImmutableText.getChars(tail, 0, end - cesure, data, cesure - start); return new String(data); } return concatNodes(head.subSequence(start, cesure), tail.subSequence(0, end - cesure)); } @NotNull @Override public String toString() { int len = length(); char[] data = new char[len]; getChars(0, len, data, 0); return new String(data); } } }
/** * */ package com.github.jearls.SPRaceTracker.data.importexport; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.lang.reflect.Field; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; import javax.persistence.Entity; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.stream.XMLStreamReader; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import com.github.jearls.SPRaceTracker.data.DataStore; import com.github.jearls.SPRaceTracker.data.importexport.FieldInfo.ObjectIdentityInfo; import com.github.jearls.SPRaceTracker.data.importexport.StructuredObjectData.StructuredObjectDataMap; import com.github.jearls.SPRaceTracker.data.importexport.StructuredObjectData.DirectObjectData; import com.github.jearls.SPRaceTracker.data.importexport.StructuredObjectData.ReferencedObjectData; /** * @author jearls * */ public class XMLImporter extends DataObjectImporter { public static final long serialVersionUID = 1L; public class XMLImporterException extends ImporterExporterException { public static final long serialVersionUID = 1L; public XMLImporterException() { super(); } public XMLImporterException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } public XMLImporterException(String message, Throwable cause) { super(message, cause); } public XMLImporterException(String message) { super(message); } public XMLImporterException(Throwable cause) { super(cause); } } class NodeListIterator implements Iterable<Node>, Iterator<Node> { NodeList nodeList; int idx, len; public NodeListIterator(NodeList nodeList) { this.nodeList = nodeList; idx = 0; len = nodeList.getLength(); } public Iterator<Node> iterator() { return this; } boolean nodeIsIgnorable(Node node) { switch (node.getNodeType()) { case Node.COMMENT_NODE: return true; case Node.TEXT_NODE: if (node.getNodeValue().trim().length() == 0) { return true; } break; } return false; } public boolean hasNext() { while ((idx < len) && (nodeIsIgnorable(nodeList.item(idx)))) { idx += 1; } return (idx < len); } public Node next() { Node nextNode; do { if (idx == len) { throw new NoSuchElementException(); } nextNode = nodeList.item(idx); idx += 1; } while (nodeIsIgnorable(nextNode)); return nextNode; } public void remove() { throw new UnsupportedOperationException(); } } Reader in; String docName; XMLStreamReader xmlIn; public XMLImporter(DataStore dataStore, Reader in, String docName) throws ImporterExporterException { super(dataStore); this.docName = docName; this.in = in; } public XMLImporter(DataStore dataStore, InputStream in, String docName) throws ImporterExporterException { super(dataStore); this.docName = docName; this.in = new InputStreamReader(in); } public XMLImporter(DataStore dataStore, File in, String docName) throws ImporterExporterException { super(dataStore); this.docName = docName; try { this.in = new FileReader(in); } catch (FileNotFoundException e) { throw new ImporterExporterException(e); } } static class SavedObjectRelations { Class<?> objectClass = null; FieldInfo objectField = null; List<StructuredObjectDataMap> relatedObjects = null; SavedObjectRelations(Class<?> objectClass, FieldInfo objectField) { this(objectClass, objectField, new LinkedList<StructuredObjectDataMap>()); } public SavedObjectRelations(Class<?> objectClass, FieldInfo objectField, List<StructuredObjectDataMap> relatedObjects) { this.objectClass = objectClass; this.objectField = objectField; this.relatedObjects = relatedObjects; } } List<SavedObjectRelations> savedObjectRelationsList = new LinkedList<SavedObjectRelations>(); static class ObjectDataAndRelations { Class<?> objectClass = null; StructuredObjectDataMap objectFields = null; Map<FieldInfo, List<StructuredObjectDataMap>> objectRelations = null; ObjectDataAndRelations(Class<?> objectClass) { this.objectClass = objectClass; objectFields = new StructuredObjectDataMap(); objectRelations = new HashMap<FieldInfo, List<StructuredObjectDataMap>>(); } } ObjectDataAndRelations importObjectNodeData(Node objectNode, Class<?> objectClass, ClassAnalysis analysis, FieldInfo referencedFrom) throws ImporterExporterException { ObjectDataAndRelations objectData = new ObjectDataAndRelations(objectClass); for (Node fieldNode : new NodeListIterator(objectNode.getChildNodes())) { importFieldNode(fieldNode, objectClass, analysis, objectData, referencedFrom); } return objectData; } void importFieldNode(Node fieldNode, Class<?> objectClass, ClassAnalysis analysis, ObjectDataAndRelations objectData, FieldInfo referencedFrom) throws ImporterExporterException { String fieldNodeName = fieldNode.getNodeName(); try { Field field = objectClass.getField(fieldNodeName); if (analysis.fieldMap.containsKey(field)) { FieldInfo fieldInfo = new FieldInfo(analysis.fieldMap.get(field)); fieldInfo.referencedFrom = referencedFrom; if (fieldInfo.relatedObject == null) { String text = fieldNode.getTextContent(); Object fieldVal = convertStringToObject(text, field.getType()); // System.err.println(fieldInfo + " -> " + // fieldVal.toString()); objectData.objectFields.put(fieldInfo, new DirectObjectData(fieldVal)); } else { // System.err.println(fieldInfo + " has subfields..."); ObjectDataAndRelations subfieldData = new ObjectDataAndRelations(field.getType()); for (Node subfieldNode : new NodeListIterator( fieldNode.getChildNodes())) { // special case: there will never be // related-object-lists in a subfield, so we pass in // null for the objectRelations parameter importFieldNode(subfieldNode, field.getType(), ClassAnalysis.analyzeClass(field.getType()), subfieldData, fieldInfo); } objectData.objectFields .put(fieldInfo, new ReferencedObjectData( subfieldData.objectFields)); } } else if (analysis.relationMap.containsKey(field)) { FieldInfo fieldInfo = analysis.relationMap.get(field); List<StructuredObjectDataMap> relatedObjects = new LinkedList<StructuredObjectDataMap>(); // System.err.println(fieldInfo + // " is a collection of related objects..."); for (Node relatedObjectNode : new NodeListIterator( fieldNode.getChildNodes())) { relatedObjects .add(importObjectNode(relatedObjectNode, fieldInfo.relatedObject.objectClass, fieldInfo).objectFields); } objectData.objectRelations.put(fieldInfo, relatedObjects); } else { throw new XMLImporterException("Unknown " + objectClass.getSimpleName() + " field " + fieldNodeName + " - analysis = " + analysis); } } catch (NoSuchFieldException e) { throw new XMLImporterException("Unknown " + objectClass.getSimpleName() + " field " + fieldNodeName); } } ObjectDataAndRelations importObjectNode(Node objectNode, Class<?> objectClass, FieldInfo referencedFrom) throws ImporterExporterException { ClassAnalysis analysis = ClassAnalysis.analyzeClass(objectClass); String objectNodeName = objectNode.getNodeName(); if (!objectNodeName.equals(objectClass.getSimpleName())) { throw new XMLImporterException("Expected " + objectClass.getSimpleName() + " object node; found " + objectNodeName); } ObjectDataAndRelations objectData = importObjectNodeData(objectNode, objectClass, analysis, referencedFrom); ; // System.err.println("Class " + objectClass.getSimpleName() + " data " // + objectFields.objectData + " relations " + // objectFields.objectRelations); return objectData; } List<ObjectDataAndRelations> importObjectListNode(Node objectListNode) throws ImporterExporterException { String objectListName = objectListNode.getNodeName(); if (!objectListName.endsWith("List")) { throw new XMLImporterException("Expected class list node; found " + objectListName); } String objectClassName = objectListName.substring(0, objectListName.length() - 4); try { Class<?> objectClass = Class.forName(dataStore.getClass().getPackage().getName() + "." + objectClassName); if (objectClass.getAnnotation(Entity.class) == null) { throw new XMLImporterException( "Invalid class for class list node " + objectListName); } List<ObjectDataAndRelations> objects = new LinkedList<ObjectDataAndRelations>(); for (Node objectNode : new NodeListIterator( objectListNode.getChildNodes())) { ObjectDataAndRelations objectData = importObjectNode(objectNode, objectClass, null); objects.add(objectData); } return objects; } catch (ClassNotFoundException e) { throw new XMLImporterException("Unknown class for class list node " + objectListName); } } void importDocument(Element docRoot) throws ImporterExporterException { if (!docRoot.getNodeName().equals(docName)) { throw new XMLImporterException("Expected document root " + docName + "; found " + docRoot.getNodeName()); } List<ObjectDataAndRelations> allObjects = new LinkedList<ObjectDataAndRelations>(); for (Node objectListNode : new NodeListIterator(docRoot.getChildNodes())) { allObjects.addAll(importObjectListNode(objectListNode)); } for (ObjectDataAndRelations objectData : allObjects) { ObjectIdentityInfo oidInfo = new ObjectIdentityInfo(objectData.objectClass); StructuredObjectDataMap objectIdentity = new StructuredObjectDataMap(); for (FieldInfo idInfo : oidInfo.identityFieldMap.values()) { objectIdentity.put(idInfo, objectData.objectFields.get(idInfo)); } importDataObjectValues(objectData.objectClass, objectIdentity, objectData.objectFields); for (Entry<FieldInfo, List<StructuredObjectDataMap>> objectRelation : objectData.objectRelations .entrySet()) { importDataObjectRelations(objectData.objectClass, objectIdentity, objectRelation.getKey(), objectRelation.getValue()); } } } public void importData() throws ImporterExporterException { try { Document xmlDoc = DocumentBuilderFactory.newInstance().newDocumentBuilder() .parse(new InputSource(in)); initializeImport(); importDocument(xmlDoc.getDocumentElement()); finalizeImport(); } catch (SAXException e) { throw new XMLImporterException(e); } catch (IOException e) { throw new XMLImporterException(e); } catch (ParserConfigurationException e) { throw new XMLImporterException(e); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.api.records.impl.pb; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerRetryContext; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder; import org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto; import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto; import org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto; import org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto; import org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto; import com.google.protobuf.ByteString; import com.google.protobuf.TextFormat; @Private @Unstable public class ContainerLaunchContextPBImpl extends ContainerLaunchContext { ContainerLaunchContextProto proto = ContainerLaunchContextProto.getDefaultInstance(); ContainerLaunchContextProto.Builder builder = null; boolean viaProto = false; private Map<String, LocalResource> localResources = null; private ByteBuffer tokens = null; private ByteBuffer tokensConf = null; private Map<String, ByteBuffer> serviceData = null; private Map<String, String> environment = null; private List<String> commands = null; private Map<ApplicationAccessType, String> applicationACLS = null; private ContainerRetryContext containerRetryContext = null; public ContainerLaunchContextPBImpl() { builder = ContainerLaunchContextProto.newBuilder(); } public ContainerLaunchContextPBImpl(ContainerLaunchContextProto proto) { this.proto = proto; viaProto = true; } public ContainerLaunchContextProto getProto() { mergeLocalToProto(); proto = viaProto ? proto : builder.build(); viaProto = true; return proto; } @Override public int hashCode() { return getProto().hashCode(); } @Override public boolean equals(Object other) { if (other == null) return false; if (other.getClass().isAssignableFrom(this.getClass())) { return this.getProto().equals(this.getClass().cast(other).getProto()); } return false; } @Override public String toString() { return TextFormat.shortDebugString(getProto()); } protected final ByteBuffer convertFromProtoFormat(ByteString byteString) { return ProtoUtils.convertFromProtoFormat(byteString); } protected final ByteString convertToProtoFormat(ByteBuffer byteBuffer) { return ProtoUtils.convertToProtoFormat(byteBuffer); } private void mergeLocalToBuilder() { if (this.localResources != null) { addLocalResourcesToProto(); } if (this.tokens != null) { builder.setTokens(convertToProtoFormat(this.tokens)); } if (this.tokensConf != null) { builder.setTokensConf(convertToProtoFormat(this.tokensConf)); } if (this.serviceData != null) { addServiceDataToProto(); } if (this.environment != null) { addEnvToProto(); } if (this.commands != null) { addCommandsToProto(); } if (this.applicationACLS != null) { addApplicationACLs(); } if (this.containerRetryContext != null) { builder.setContainerRetryContext( convertToProtoFormat(this.containerRetryContext)); } } private void mergeLocalToProto() { if (viaProto) maybeInitBuilder(); mergeLocalToBuilder(); proto = builder.build(); viaProto = true; } private void maybeInitBuilder() { if (viaProto || builder == null) { builder = ContainerLaunchContextProto.newBuilder(proto); } viaProto = false; } @Override public List<String> getCommands() { initCommands(); return this.commands; } private void initCommands() { if (this.commands != null) { return; } ContainerLaunchContextProtoOrBuilder p = viaProto ? proto : builder; List<String> list = p.getCommandList(); this.commands = new ArrayList<String>(); for (String c : list) { this.commands.add(c); } } @Override public void setCommands(final List<String> commands) { if (commands == null) return; initCommands(); this.commands.clear(); this.commands.addAll(commands); } private void addCommandsToProto() { maybeInitBuilder(); builder.clearCommand(); if (this.commands == null) return; builder.addAllCommand(this.commands); } @Override public Map<String, LocalResource> getLocalResources() { initLocalResources(); return this.localResources; } private void initLocalResources() { if (this.localResources != null) { return; } ContainerLaunchContextProtoOrBuilder p = viaProto ? proto : builder; List<StringLocalResourceMapProto> list = p.getLocalResourcesList(); this.localResources = new HashMap<String, LocalResource>(); for (StringLocalResourceMapProto c : list) { this.localResources.put(c.getKey(), convertFromProtoFormat(c.getValue())); } } @Override public void setLocalResources( final Map<String, LocalResource> localResources) { if (localResources == null) return; checkLocalResources(localResources); initLocalResources(); this.localResources.clear(); this.localResources.putAll(localResources); } private void checkLocalResources(Map<String, LocalResource> localResources) { for (Map.Entry<String, LocalResource> rsrcEntry : localResources .entrySet()) { if (rsrcEntry.getValue() == null || rsrcEntry.getValue().getResource() == null) { throw new NullPointerException( "Null resource URL for local resource " + rsrcEntry.getKey() + " : " + rsrcEntry.getValue()); } } } private void addLocalResourcesToProto() { maybeInitBuilder(); builder.clearLocalResources(); if (localResources == null) return; Iterable<StringLocalResourceMapProto> iterable = new Iterable<StringLocalResourceMapProto>() { @Override public Iterator<StringLocalResourceMapProto> iterator() { return new Iterator<StringLocalResourceMapProto>() { Iterator<String> keyIter = localResources.keySet().iterator(); @Override public void remove() { throw new UnsupportedOperationException(); } @Override public StringLocalResourceMapProto next() { String key = keyIter.next(); return StringLocalResourceMapProto.newBuilder().setKey(key). setValue(convertToProtoFormat(localResources.get(key))).build(); } @Override public boolean hasNext() { return keyIter.hasNext(); } }; } }; builder.addAllLocalResources(iterable); } @Override public ByteBuffer getTokens() { ContainerLaunchContextProtoOrBuilder p = viaProto ? proto : builder; if (this.tokens != null) { return this.tokens; } if (!p.hasTokens()) { return null; } this.tokens = convertFromProtoFormat(p.getTokens()); return this.tokens; } @Override public void setTokens(ByteBuffer tokens) { maybeInitBuilder(); if (tokens == null) { builder.clearTokens(); } this.tokens = tokens; } @Override public ByteBuffer getTokensConf() { ContainerLaunchContextProtoOrBuilder p = viaProto ? proto : builder; if (this.tokensConf != null) { return this.tokensConf; } if (!p.hasTokensConf()) { return null; } this.tokensConf = convertFromProtoFormat(p.getTokensConf()); return this.tokensConf; } @Override public void setTokensConf(ByteBuffer tokensConf) { maybeInitBuilder(); if (tokensConf == null) { builder.clearTokensConf(); } this.tokensConf = tokensConf; } @Override public Map<String, ByteBuffer> getServiceData() { initServiceData(); return this.serviceData; } private void initServiceData() { if (this.serviceData != null) { return; } ContainerLaunchContextProtoOrBuilder p = viaProto ? proto : builder; List<StringBytesMapProto> list = p.getServiceDataList(); this.serviceData = new HashMap<String, ByteBuffer>(); for (StringBytesMapProto c : list) { this.serviceData.put(c.getKey(), convertFromProtoFormat(c.getValue())); } } @Override public void setServiceData(final Map<String, ByteBuffer> serviceData) { if (serviceData == null) return; initServiceData(); this.serviceData.putAll(serviceData); } private void addServiceDataToProto() { maybeInitBuilder(); builder.clearServiceData(); if (serviceData == null) return; Iterable<StringBytesMapProto> iterable = new Iterable<StringBytesMapProto>() { @Override public Iterator<StringBytesMapProto> iterator() { return new Iterator<StringBytesMapProto>() { Iterator<String> keyIter = serviceData.keySet().iterator(); @Override public void remove() { throw new UnsupportedOperationException(); } @Override public StringBytesMapProto next() { String key = keyIter.next(); return StringBytesMapProto.newBuilder().setKey(key).setValue( convertToProtoFormat(serviceData.get(key))).build(); } @Override public boolean hasNext() { return keyIter.hasNext(); } }; } }; builder.addAllServiceData(iterable); } @Override public Map<String, String> getEnvironment() { initEnv(); return this.environment; } private void initEnv() { if (this.environment != null) { return; } ContainerLaunchContextProtoOrBuilder p = viaProto ? proto : builder; List<StringStringMapProto> list = p.getEnvironmentList(); this.environment = new HashMap<String, String>(); for (StringStringMapProto c : list) { this.environment.put(c.getKey(), c.getValue()); } } @Override public void setEnvironment(final Map<String, String> env) { if (env == null) return; initEnv(); this.environment.clear(); this.environment.putAll(env); } private void addEnvToProto() { maybeInitBuilder(); builder.clearEnvironment(); if (environment == null) return; Iterable<StringStringMapProto> iterable = new Iterable<StringStringMapProto>() { @Override public Iterator<StringStringMapProto> iterator() { return new Iterator<StringStringMapProto>() { Iterator<String> keyIter = environment.keySet().iterator(); @Override public void remove() { throw new UnsupportedOperationException(); } @Override public StringStringMapProto next() { String key = keyIter.next(); String value = environment.get(key); if (value == null) { value = ""; } return StringStringMapProto.newBuilder().setKey(key) .setValue((value)).build(); } @Override public boolean hasNext() { return keyIter.hasNext(); } }; } }; builder.addAllEnvironment(iterable); } @Override public Map<ApplicationAccessType, String> getApplicationACLs() { initApplicationACLs(); return this.applicationACLS; } private void initApplicationACLs() { if (this.applicationACLS != null) { return; } ContainerLaunchContextProtoOrBuilder p = viaProto ? proto : builder; List<ApplicationACLMapProto> list = p.getApplicationACLsList(); this.applicationACLS = new HashMap<ApplicationAccessType, String>(list .size()); for (ApplicationACLMapProto aclProto : list) { this.applicationACLS.put(ProtoUtils.convertFromProtoFormat(aclProto .getAccessType()), aclProto.getAcl()); } } private void addApplicationACLs() { maybeInitBuilder(); builder.clearApplicationACLs(); if (applicationACLS == null) { return; } Iterable<? extends ApplicationACLMapProto> values = new Iterable<ApplicationACLMapProto>() { @Override public Iterator<ApplicationACLMapProto> iterator() { return new Iterator<ApplicationACLMapProto>() { Iterator<ApplicationAccessType> aclsIterator = applicationACLS .keySet().iterator(); @Override public boolean hasNext() { return aclsIterator.hasNext(); } @Override public ApplicationACLMapProto next() { ApplicationAccessType key = aclsIterator.next(); return ApplicationACLMapProto.newBuilder().setAcl( applicationACLS.get(key)).setAccessType( ProtoUtils.convertToProtoFormat(key)).build(); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; this.builder.addAllApplicationACLs(values); } @Override public void setApplicationACLs( final Map<ApplicationAccessType, String> appACLs) { if (appACLs == null) return; initApplicationACLs(); this.applicationACLS.clear(); this.applicationACLS.putAll(appACLs); } public ContainerRetryContext getContainerRetryContext() { ContainerLaunchContextProtoOrBuilder p = viaProto ? proto : builder; if (this.containerRetryContext != null) { return this.containerRetryContext; } if (!p.hasContainerRetryContext()) { return null; } this.containerRetryContext = convertFromProtoFormat( p.getContainerRetryContext()); return this.containerRetryContext; } public void setContainerRetryContext(ContainerRetryContext retryContext) { maybeInitBuilder(); if (retryContext == null) { builder.clearContainerRetryContext(); } this.containerRetryContext = retryContext; } private LocalResourcePBImpl convertFromProtoFormat(LocalResourceProto p) { return new LocalResourcePBImpl(p); } private LocalResourceProto convertToProtoFormat(LocalResource t) { return ((LocalResourcePBImpl)t).getProto(); } private ContainerRetryContextPBImpl convertFromProtoFormat( ContainerRetryContextProto p) { return new ContainerRetryContextPBImpl(p); } private ContainerRetryContextProto convertToProtoFormat( ContainerRetryContext t) { return ((ContainerRetryContextPBImpl)t).getProto(); } }
/** *============================================================================ * Copyright The Ohio State University Research Foundation, The University of Chicago - * Argonne National Laboratory, Emory University, SemanticBits LLC, and * Ekagra Software Technologies Ltd. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cagrid-general/LICENSE.txt for details. *============================================================================ **/ package test.gov.nih.nci.security.instancelevel; import gov.nih.nci.logging.api.user.UserInfoHelper; import gov.nih.nci.security.AuthorizationManager; import gov.nih.nci.security.SecurityServiceProvider; import gov.nih.nci.security.authorization.attributeLevel.AttributeSecuritySessionInterceptor; import gov.nih.nci.security.authorization.attributeLevel.UserClassAttributeMapCache; import gov.nih.nci.security.authorization.instancelevel.InstanceLevelSecurityHelper; import gov.nih.nci.security.exceptions.CSConfigurationException; import gov.nih.nci.security.exceptions.CSException; import gov.nih.nci.security.util.StringUtilities; import java.lang.reflect.Method; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.List; import junit.framework.TestCase; import org.hibernate.Criteria; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.cfg.Configuration; import org.springframework.dao.DataAccessResourceFailureException; import org.springframework.dao.DataRetrievalFailureException; import test.gov.nih.nci.security.instancelevel.domainobjects.Card; public class InstanceLevelSecurityTest42 extends TestCase { DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); // properties for configuration String csmApplicationContext = "instance42"; String hibernateCfgFileName = "instanceleveltest.hibernate.cfg.xml"; boolean instanceLevelSecurityForGroups = true; boolean instanceLevelSecurityForUser = true; boolean attributeLevelSecurityForGroups= true; boolean attributeLevelSecurityForUser = true; String userName = "user9";//SecurityContextHolder.getContext().getAuthentication().getName(); String[] groupNames = {"Group0","Group1","Group3"}; AuthorizationManager authorizationManager=null; public static void main(String[] args) { } public InstanceLevelSecurityTest42(String arg0) { super(arg0); } protected void setUp(){ try { authorizationManager = SecurityServiceProvider.getUserProvisioningManager(csmApplicationContext); } catch (CSConfigurationException e) { e.printStackTrace(); throw new DataRetrievalFailureException(e.getMessage()); } catch (CSException e) { e.printStackTrace(); throw new DataAccessResourceFailureException(e.getMessage()); } } protected void tearDown() throws Exception { super.tearDown(); } /* private void testUnSecured(){ SessionFactory sf=null; Configuration configuration = null; if(null == sf || sf.isClosed()){ configuration = new Configuration().configure(hibernateCfgFileName); sf = configuration.buildSessionFactory(); } Session session = null; session = sf.openSession(); Criteria criteria = session.createCriteria(Card.class); List l = criteria.list(); int size = l.size(); System.out.println("============= UNSECURED SYSTEM =================="); System.out.println("Total no of Cards on which user has access= "+l.size()); System.out.println("------------------------------------------------------"); session.close(); sf.close(); assertEquals("Incorrect number of cards retrieved",size, 53); // Expecting all cards in the deck including the joker. } */ public void testInstanceLevelSecurityForUser() throws Exception { SessionFactory sf=null; Configuration configuration = null; if(null == sf || sf.isClosed()){ configuration = new Configuration().configure(hibernateCfgFileName); InstanceLevelSecurityHelper.addFilters(authorizationManager, configuration); sf = configuration.buildSessionFactory(); } Session session = null; if(instanceLevelSecurityForUser ){ session = sf.openSession(); if (instanceLevelSecurityForUser){ InstanceLevelSecurityHelper.initializeFilters(userName, session, authorizationManager); } } if(session==null){ session = sf.openSession(); } System.out.println("Starting Instance Level Security Query for User : " + dateFormat.format(new java.util.Date())); Criteria criteria = session.createCriteria(Card.class); List results = criteria.list(); int size =results.size(); System.out.println("Done Instance Level Security Query for User : " + dateFormat.format(new java.util.Date())); System.out.println("============= INSTANCE LEVEL ONLY - FOR USER ONLY =================="); System.out.println("Total no of Cards on which user has access= "+results.size()); System.out.println("------------------------------------------------------"); /*for(Object obj : results) { printObject(obj, Card.class); }*/ session.close(); sf.close(); assertEquals("Incorrect number of cards retrieved",size, 52); // Expecting all cards in the deck } public void testInstanceLevelSecurityForGroups() throws Exception { SessionFactory sf=null; Configuration configuration = null; if(null == sf || sf.isClosed()){ configuration = new Configuration().configure(hibernateCfgFileName); InstanceLevelSecurityHelper.addFiltersForGroups(authorizationManager, configuration); sf = configuration.buildSessionFactory(); } Session session = null; if(instanceLevelSecurityForGroups || attributeLevelSecurityForGroups){ session = sf.openSession(); if (instanceLevelSecurityForGroups){ InstanceLevelSecurityHelper.initializeFiltersForGroups(groupNames, session, authorizationManager); } } System.out.println("Starting Instance Level Security Query for Groups : " + dateFormat.format(new java.util.Date())); Criteria criteria = session.createCriteria(Card.class); List results = criteria.list(); int size = results.size(); System.out.println("Done Instance Level Security Query for Groups : " + dateFormat.format(new java.util.Date())); System.out.println("============= INSTANCE LEVEL - FOR GROUPS ONLY =================="); System.out.println("Total no of Cards on which groups have access : " + results.size()); System.out.println("------------------------------------------------------"); /* for(Object obj : results) { try { printObject(obj, Card.class); } catch (Exception e) { e.printStackTrace(); } } */ session.close(); sf.close(); assertEquals("Incorrect number of cards retrieved",size, 52); // Expecting all cards in the deck } private void printObject(Object obj, Class klass) throws Exception { System.out.println("Printing "+ klass.getName()); Method[] methods = klass.getMethods(); for(Method method:methods) { if(method.getName().startsWith("get") && !method.getName().equals("getClass")) { System.out.print("\t"+method.getName().substring(3)+":"); Object val = method.invoke(obj, (Object[])null); if(val instanceof java.util.Set) System.out.println("size="+((Collection)val).size()); else System.out.println(val); } } } }
package com.gmail.boiledorange73.and4.ut.dl; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.SocketTimeoutException; import java.util.List; import java.util.ResourceBundle; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.params.HttpParams; import android.app.Activity; import android.app.IntentService; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager.NameNotFoundException; import android.net.Uri; import android.os.Bundle; public class DownloaderService extends IntentService { public static final int ST_NONE = -1; public static final int ST_DOWNLOADING = 1; public static final int ST_FINISHED = 2; public static final int ST_CANCEL = 3; public static final int ST_ERROR = -1; public static final String XKEY_REMOTE = "remote"; public static final String XKEY_LOCAL = "local"; public static final String XKEY_TITLE = "title"; public static final String XKEY_CALLBACK_ACTIVITY_CLASS = "callback_activity_class"; public static final String XKEY_HTTP_TIMEOUT_MS = "http_timeout_ms"; public static final String XKEY_SOCKET_TIMEOUT_MS = "socket_timeout_ms"; public static final String XKEY_STATUS = "status"; public static final String XKEY_FILESIZE = "filesize"; public static final String XKEY_PROGRESS = "progress"; public static final String XKEY_MESSAGE = "message"; public static final String BROADCAST_ACTION = "com.gmail.boiledorange73.and4.ut.dl.DownloaderService.CHANGE_STATUS"; private static final String DEFAULT_USERAGENT_NAME = "Downloader"; private static final String DEFAULT_USERAGENT_VERSION = "0.9"; private static final int DEFAULT_ID_NOTIFICATION = 1; private static final int BUFFERSIZE = 1024 * 1024 * 4; private static final int DEFAULT_SOCKET_TIMEOUT_MS = 10000; private static final int DEFAULT_HTTP_TIMEOUT_MS = 10000; private long mContentLength = -1; private long mProgress = -1; private boolean mWorking = false; private ResourceBundle mResourceBundle; // ---------------- // Constructors. // ---------------- /** * Constructor. Simply passes to the one of the super class. * * @param name * Used to name the worker thread, important only for debugging. */ public DownloaderService(String name) { super(name); // Java resource this.mResourceBundle = ResourceBundle .getBundle("com.gmail.boiledorange73.and4.ut.dl.messages"); } /** * Default constructor. Thread name must be "DownloaderService". */ public DownloaderService() { this("DownloaderService"); } // ---------------- // Public methods. // ---------------- /** * Gets notification id which is unique in the application (not system). * * @return Notification id. */ public int getNotificationId() { return DownloaderService.DEFAULT_ID_NOTIFICATION; } /** * Gets the user-agent text. User-agent is set to * "(Application Name)/(Application Version)". If you want to use another * user-agent text, override this. * * @return User-agent text. */ public String getUserAgent() { ApplicationInfo appinfo = this.getApplicationInfo(); if (appinfo != null && appinfo.name != null) { String userAgentName = appinfo.name; try { String userAgentVersion = this.getPackageManager() .getPackageInfo(this.getPackageName(), 0).versionName; return userAgentName + "/" + userAgentVersion; } catch (NameNotFoundException e) { e.printStackTrace(); return userAgentName; } } else { return DownloaderService.DEFAULT_USERAGENT_NAME + "/" + DownloaderService.DEFAULT_USERAGENT_VERSION; } } // ---------------- // Overriding methods. // ---------------- /** * Called when activity is destroyed. */ @Override public void onDestroy() { // The flag indicating now downloading is set to false. this.mWorking = false; // calls super class. super.onDestroy(); } /** * Called when this servie is called. * * @param intent * The intent. */ @Override protected void onHandleIntent(Intent intent) { String remote = intent.getStringExtra(DownloaderService.XKEY_REMOTE); String local = intent.getStringExtra(DownloaderService.XKEY_LOCAL); String title = intent.getStringExtra(DownloaderService.XKEY_TITLE); int http_timeout_ms = intent.getIntExtra( DownloaderService.XKEY_SOCKET_TIMEOUT_MS, DownloaderService.DEFAULT_SOCKET_TIMEOUT_MS); int socket_timeout_ms = intent.getIntExtra( DownloaderService.XKEY_SOCKET_TIMEOUT_MS, DownloaderService.DEFAULT_HTTP_TIMEOUT_MS); Class<? extends Activity> callbackActivityClass = this .calculateCallbackActivityClass(intent); this.doDownload(this.getUserAgent(), title, remote, local, callbackActivityClass, http_timeout_ms, socket_timeout_ms); } // ---------------- // Private methods, which handle notification. // ---------------- /** * Shows notification and broadcasts the message. * * @param notification_id * Notification ID, which is unique in the appliation (NOT in the * system). * @param icon_id * ID of icon for notification. * @param title * Title of data file. * @param text * The message. * @param callbackActivityClass * Activity which is activated when notification cell is tapped. */ private void showNotifycation(int notification_id, int icon_id, String title, String text, Class<? extends Activity> callbackActivityClass) { Notification notification = new Notification(icon_id, text, System.currentTimeMillis()); PendingIntent contentIntent = null; if (callbackActivityClass != null) { // The PendingIntent to launch our activity if the user selects this // notification contentIntent = PendingIntent.getActivity(this, 0, new Intent(this, callbackActivityClass), 0); } notification.setLatestEventInfo(this, title, text, contentIntent); ((NotificationManager) this .getSystemService(Context.NOTIFICATION_SERVICE)).notify( notification_id, notification); } /** * Shows notification and broadcasts that download is canceled. * * @param title * Title of data file. * @param callbackActivityClass * Activity which is activated when notification cell is tapped. */ private void showCancel(String title, Class<? extends Activity> callbackActivityClass) { String text = title + " " + this.mResourceBundle .getString("DownloaderServiceBase.S_CANCELED"); this.showNotifycation(this.getNotificationId(), android.R.drawable.stat_notify_error, this.mResourceBundle .getString("DownloaderServiceBase.W_CANCEL"), text, callbackActivityClass); Intent intent = new Intent(); intent.setAction(DownloaderService.BROADCAST_ACTION); intent.putExtra(DownloaderService.XKEY_STATUS, DownloaderService.ST_CANCEL); intent.putExtra(DownloaderService.XKEY_FILESIZE, this.mContentLength); intent.putExtra(DownloaderService.XKEY_PROGRESS, this.mProgress); this.sendBroadcast(intent); } /** * Shows notification and broadcasts that an error occurred. * * @param text * Error message. * @param callbackActivityClass * Activity which is activated when notification cell is tapped. */ private void showError(String text, Class<? extends Activity> callbackActivityClass) { this.showNotifycation( this.getNotificationId(), android.R.drawable.stat_notify_error, this.mResourceBundle.getString("DownloaderServiceBase.W_ERROR"), text, callbackActivityClass); Intent intent = new Intent(); intent.setAction(DownloaderService.BROADCAST_ACTION); intent.putExtra(DownloaderService.XKEY_STATUS, DownloaderService.ST_ERROR); intent.putExtra(DownloaderService.XKEY_MESSAGE, text); this.sendBroadcast(intent); } /** * Shows notification and broadcasts that a download error occurred. If both * of reason1 and reason2 are filled, reason is both of reason and separated * with white-space. If both of reason1 and reason2 are null, not reason is * printed. This method finally calls {@link #showError(String)}. * * @param title * Title of data file. * @param reason1 * A part of reason. * @param reason2 * A part of reason. * @param callbackActivityClass * Activity which is activated when notification cell is tapped. */ private void showDownloadError(String title, String reason1, String reason2, Class<? extends Activity> callbackActivityClass) { String mess = title + " " + this.mResourceBundle .getString("DownloaderServiceBase.S_FAILED_DOWNLOAD"); String reason = null; if (reason1 != null) { reason = reason1; } if (reason2 != null) { if (reason == null) { reason = reason2; } else { reason = reason + " " + reason2; } } if (reason != null) { mess = mess + " (" + reason + ")"; } this.showError(mess, callbackActivityClass); } /** * Shows notification and broadcasts that download is working. * * @param title * Title of data file. * @param callbackActivityClass * Activity which is activated when notification cell is tapped. */ private void showDownloading(String title, Class<? extends Activity> callbackActivityClass) { String text = title; if (this.mContentLength > 0) { int percent = (int) ((double) this.mProgress * 100.0 / (double) this.mContentLength); text = text + " " + String.valueOf(percent) + "%"; } this.showNotifycation(this.getNotificationId(), android.R.drawable.stat_sys_download, this.mResourceBundle .getString("DownloaderServiceBase.W_DOWNLOADING"), text, callbackActivityClass); Intent intent = new Intent(); intent.setAction(DownloaderService.BROADCAST_ACTION); intent.putExtra(DownloaderService.XKEY_STATUS, DownloaderService.ST_DOWNLOADING); intent.putExtra(DownloaderService.XKEY_FILESIZE, this.mContentLength); intent.putExtra(DownloaderService.XKEY_PROGRESS, this.mProgress); this.sendBroadcast(intent); } /** * Shows notification and broadcasts that download is finished. * * @param fileName * file name text, not full-path. * @param callbackActivityClass * Activity which is activated when notification cell is tapped. */ private void showFinished(String fileName, Class<? extends Activity> callbackActivityClass) { String text = fileName + " " + this.mResourceBundle .getString("DownloaderServiceBase.S_DOWNLOAD_FINISHED"); this.showNotifycation( this.getNotificationId(), android.R.drawable.stat_sys_download_done, this.mResourceBundle .getString("DownloaderServiceBase.W_DOWNLOAD_FINISHED"), text, callbackActivityClass); Intent intent = new Intent(); intent.setAction(DownloaderService.BROADCAST_ACTION); intent.putExtra(DownloaderService.XKEY_STATUS, DownloaderService.ST_FINISHED); intent.putExtra(DownloaderService.XKEY_FILESIZE, this.mProgress); intent.putExtra(DownloaderService.XKEY_PROGRESS, this.mProgress); this.sendBroadcast(intent); } // ---------------- // Main routine // ---------------- /** * Downloads the file. * * @param userAgent * The user-agent text which is passed to the server. * @param title * Title of data file. * @param remoteUriText * Remote URI text. * @param local * Local file path. * @param callbackActivityClass * Activity which is activated when notification cell is tapped. * @param connection_timeout_ms * Connection timeout milliseconds. If this is negative, * connection timeout is not set. * @param socket_timeout_ms * Timeout milliseconds between data arrivals. */ private void doDownload(String userAgent, String title, String remoteUriText, String local, Class<? extends Activity> callbackActivityClass, int connection_timeout_ms, int socket_timeout_ms) { // init this.mWorking = true; this.mContentLength = -1; this.mProgress = -1; // if (title == null) { Uri remoteUri = Uri.parse(remoteUriText); List<String> pathSegments = remoteUri.getPathSegments(); if (pathSegments != null && pathSegments.size() > 0) { title = pathSegments.get(pathSegments.size() - 1); } if (title == null) { title = this.mResourceBundle .getString("DownloaderServiceBase.S_NONAME"); } } // creates directory if (this.makeDir(local, callbackActivityClass) == false) { return; } // gets current file size. File localFile = new File(local); // Gets the progeress. if (localFile.exists()) { this.mProgress = localFile.length(); } else { this.mProgress = 0; } // notification this.showDownloading(title, callbackActivityClass); // checkpoint if (this.mWorking == false) { this.showCancel(title, callbackActivityClass); return; } // -------- HTTP // create client HttpClient client = new DefaultHttpClient(); // client / set params HttpParams sentParams = client.getParams(); if (sentParams != null && userAgent != null && userAgent.length() > 0) { sentParams.setParameter("http.useragent", userAgent); } // client / set timeout (ms) if (connection_timeout_ms > 0) { client.getParams().setParameter("http.connection.timeout", Integer.valueOf(connection_timeout_ms)); } if (socket_timeout_ms > 0) { client.getParams().setParameter("http.socket.timeout", Integer.valueOf(socket_timeout_ms)); } // Gets only head HttpUriRequest headReq = new HttpHead(remoteUriText); HttpResponse headRes; try { headRes = client.execute(headReq); if (headRes != null && headRes.getStatusLine().getStatusCode() == 200) { // gets content-length Header[] contentLengthHeaders = headRes .getHeaders("Content-Length"); if (contentLengthHeaders != null) { for (Header h : contentLengthHeaders) { if (h != null) { String hvs = h.getValue(); if (hvs != null) { long hv = Long.parseLong(hvs); if (this.mContentLength < 0 || hv > this.mContentLength) { this.mContentLength = hv; } } } } } } } catch (ClientProtocolException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (NumberFormatException e) { e.printStackTrace(); } // finished? if (this.mContentLength > 0 && this.mProgress >= this.mContentLength) { this.showFinished(title, callbackActivityClass); return; } // checkpoint if (this.mWorking == false) { this.showCancel(title, callbackActivityClass); return; } // gets content. // notification this.showDownloading(title, callbackActivityClass); // open connection HttpUriRequest contentReq = new HttpGet(remoteUriText); if (this.mProgress > 0) { contentReq.addHeader("Range", String.format("bytes=%d-", this.mProgress)); } HttpResponse contentRes = null; try { contentRes = client.execute(contentReq); } catch (ClientProtocolException e) { this.showError(e.getMessage(), callbackActivityClass); e.printStackTrace(); return; } catch (IOException e) { this.showError(e.getMessage(), callbackActivityClass); e.printStackTrace(); return; } if (contentRes == null) { this.showDownloadError(title, null, null, callbackActivityClass); } // check status. 200 - success, 206 - partial int statusCode = contentRes.getStatusLine().getStatusCode(); if (statusCode != 200 && statusCode != 206) { this.showDownloadError(title, statusCode + " " + contentRes.getStatusLine().getReasonPhrase(), null, callbackActivityClass); return; } // gets input stream. InputStream is = null; try { is = contentRes.getEntity().getContent(); } catch (IllegalStateException e) { this.showDownloadError(title, e.getMessage(), null, callbackActivityClass); e.printStackTrace(); return; } catch (IOException e) { this.showDownloadError(title, e.getMessage(), null, callbackActivityClass); e.printStackTrace(); return; } // sets up buffer byte[] buff = new byte[DownloaderService.BUFFERSIZE]; // gets output stream. FileOutputStream os = null; try { os = new FileOutputStream(localFile, statusCode == 206); int read; // checkpoint if (this.mWorking == false) { this.showCancel(title, callbackActivityClass); return; } // reading loop while ((read = is.read(buff, 0, DownloaderService.BUFFERSIZE)) >= 0) { // checkpoint if (this.mWorking == false) { this.showCancel(title, callbackActivityClass); return; } if (read > 0) { os.write(buff, 0, read); this.mProgress += read; // notification this.showDownloading(title, callbackActivityClass); } } this.showFinished(title, callbackActivityClass); } catch (FileNotFoundException e) { this.showDownloadError(title, e.getMessage(), null, callbackActivityClass); e.printStackTrace(); return; } catch (SocketTimeoutException e) { this.showDownloadError(title, this.mResourceBundle .getString("DownloaderServieBase.S_CONNECTION_DOWN"), e .getMessage(), callbackActivityClass); e.printStackTrace(); return; } catch (IOException e) { this.showDownloadError(title, e.getMessage(), null, callbackActivityClass); e.printStackTrace(); return; } finally { // closes output stream and input stream. if (os != null) { try { os.close(); } catch (IOException e) { e.printStackTrace(); } } if (is != null) { try { is.close(); } catch (IOException e) { e.printStackTrace(); } } } } // ---------------- /** * Calculates callback activity class, with checks whether received instance * if class of Activity. * * @param intent * Intent. * @return Class of Activity if received instance is suitable. Otherwise, * returns null. */ private Class<? extends Activity> calculateCallbackActivityClass( Intent intent) { if (intent == null) { return null; } Bundle extras = intent.getExtras(); if (extras == null) { return null; } if (!extras.containsKey(DownloaderService.XKEY_CALLBACK_ACTIVITY_CLASS)) { return null; } Object obj = intent .getSerializableExtra(DownloaderService.XKEY_CALLBACK_ACTIVITY_CLASS); if (obj == null) { return null; } if (((Class<?>) obj).asSubclass(Activity.class) != null) { return (Class<? extends Activity>) obj; } return null; } private boolean makeDir(String filePath, Class<? extends Activity> callbackActivityClass) { if (filePath == null || !(filePath.length() > 0)) { return true; } String curPath = ""; int ix; while (filePath.length() > 0 && (ix = filePath.indexOf(File.separator)) >= 0) { curPath = curPath + filePath.substring(0, ix); filePath = filePath.substring(ix + File.separator.length()); if (curPath.length() > 0) { File curFile = new File(curPath); if (!curFile.isDirectory()) { // not directory / no such file or directory if (curFile.exists()) { // not directory String fmt = this.mResourceBundle .getString("DownloaderServieBase.F_1_IS_NOT_DIRECTORY"); String mess = String.format(fmt, curPath); this.showError(mess, callbackActivityClass); return false; } else { // no such file or directory if (curFile.mkdir() == false) { // fails to create String fmt = this.mResourceBundle .getString("DownloaderServieBase.F_FAIL_MKDIR_1"); String mess = String.format(fmt, curPath); this.showError(mess, callbackActivityClass); return false; } } } } curPath = curPath + File.separator; } return true; } }
/* * Copyright (C) 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.googlecode.eyesfree.brailleback; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.ListPreference; import android.preference.Preference; import android.preference.PreferenceActivity; import android.preference.PreferenceManager; import android.text.TextUtils; import android.util.Log; import com.googlecode.eyesfree.braille.display.Display; import com.googlecode.eyesfree.braille.display.DisplayClient; import com.googlecode.eyesfree.braille.translate.TableInfo; import com.googlecode.eyesfree.brailleback.utils.PreferenceUtils; import com.googlecode.eyesfree.utils.LogUtils; import java.text.CollationKey; import java.text.Collator; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Activity used to set BrailleBack's service preferences. */ public class BrailleBackPreferencesActivity extends PreferenceActivity implements Display.OnConnectionStateChangeListener, Display.OnConnectionChangeProgressListener, TranslatorManager.OnTablesChangedListener, Preference.OnPreferenceChangeListener, Preference.OnPreferenceClickListener { /** Message from BrailleBackService, indicating that grade pref changed, so update display. */ public static final String INTENT_REFRESH_DISPLAY = "com.googlecode.eyesfree.brailleback.REFRESH_DISPLAY"; private final TableInfoComparator mTableInfoComparator = new TableInfoComparator(); private DisplayClient mDisplay; private TranslatorManager mTranslatorManager; private Preference mStatusPreference; private ListPreference mBrailleTypePreference; private ListPreference mSixDotTablePreference; private ListPreference mEightDotTablePreference; private Preference mOverlayPreference; private Preference mOverlayTutorialPreference; private Preference mLicensesPreference; private ListPreference mLogLevelPreference; private int mConnectionState = Display.STATE_NOT_CONNECTED; private String mConnectionProgress = null; private List<TableInfo> mTables; @SuppressWarnings("deprecation") @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); addPreferencesFromResource(R.xml.preferences); mStatusPreference = findPreferenceByResId(R.string.pref_connection_status_key); mStatusPreference.setOnPreferenceClickListener(this); assignKeyBindingsIntent(); mBrailleTypePreference = (ListPreference) findPreferenceByResId( R.string.pref_braille_type_key); mBrailleTypePreference.setOnPreferenceChangeListener(this); mSixDotTablePreference = (ListPreference) findPreferenceByResId( R.string.pref_six_dot_braille_table_key); mSixDotTablePreference.setOnPreferenceChangeListener(this); mEightDotTablePreference = (ListPreference) findPreferenceByResId( R.string.pref_eight_dot_braille_table_key); mEightDotTablePreference.setOnPreferenceChangeListener(this); mOverlayPreference = findPreferenceByResId( R.string.pref_braille_overlay_key); mOverlayPreference.setOnPreferenceChangeListener(this); mOverlayTutorialPreference = findPreferenceByResId( R.string.pref_braille_overlay_tutorial_key); mOverlayTutorialPreference.setOnPreferenceClickListener(this); mLicensesPreference = findPreferenceByResId(R.string.pref_os_license_key); mLicensesPreference.setOnPreferenceClickListener(this); mLogLevelPreference = (ListPreference) findPreferenceByResId( R.string.pref_log_level_key); mLogLevelPreference.setOnPreferenceChangeListener(this); if (BuildConfig.DEBUG) { int logLevel = PreferenceUtils.getLogLevel(this); updateListPreferenceSummary(mLogLevelPreference, Integer.toString(logLevel)); mLogLevelPreference.setEnabled(false); } } @Override public void onResume() { super.onResume(); mDisplay = new DisplayClient(this); mDisplay.setOnConnectionStateChangeListener(this); mDisplay.setOnConnectionChangeProgressListener(this); mTranslatorManager = new TranslatorManager(this); mTranslatorManager.addOnTablesChangedListener(this); onConnectionStateChanged(Display.STATE_NOT_CONNECTED); refreshPreferences(); registerReceiver(mRefreshReceiver, new IntentFilter(INTENT_REFRESH_DISPLAY)); } private BroadcastReceiver mRefreshReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { if (TextUtils.equals(intent.getAction(), INTENT_REFRESH_DISPLAY)) { refreshPreferences(); } } }; /** * Refresh displayed preference value for braille type, because it may be modified outside the * preferences activity. */ private void refreshPreferences() { SharedPreferences sharedPrefs = PreferenceManager.getDefaultSharedPreferences(this); final String prefKey = getString(R.string.pref_braille_type_key); final String prefDefault = getString(R.string.pref_braille_type_six_dot_value); final String prefValue = sharedPrefs.getString(prefKey, prefDefault); updateListPreferenceSummary(mBrailleTypePreference, prefValue); } @Override public void onPause() { super.onPause(); unregisterReceiver(mRefreshReceiver); mTranslatorManager.removeOnTablesChangedListener(this); mTranslatorManager.shutdown(); mDisplay.shutdown(); } @Override public void onConnectionStateChanged(int state) { mConnectionState = state; CharSequence summary; boolean enableBindings = false; switch (state) { case Display.STATE_CONNECTED: summary = getText(R.string.connstate_connected); enableBindings = true; break; case Display.STATE_NOT_CONNECTED: summary = getText(R.string.connstate_not_connected); break; default: summary = getText(R.string.connstate_error); break; } Preference bindingsPref = findPreferenceByResId(R.string.pref_key_bindings_key); bindingsPref.setEnabled(enableBindings); if (mConnectionProgress == null) { mStatusPreference.setSummary(summary); announceConnectionState(summary); } } @Override public void onConnectionChangeProgress(String description) { mConnectionProgress = description; if (description == null) { onConnectionStateChanged(mConnectionState); return; } // The description is localized by the server. mStatusPreference.setSummary(description); announceConnectionState(description); } private void announceConnectionState(CharSequence state) { // TODO: Ideally, this announcement would be sent from the // view of the actual preference, if there only was a way to get // to that node. getWindow().getDecorView().announceForAccessibility(state); } @Override public void onTablesChanged() { mTables = mTranslatorManager.getTranslatorClient().getTables(); addTableList(mSixDotTablePreference, false); addTableList(mEightDotTablePreference, true); } /** * Assigns the appropriate intent to the key bindings preference. */ private void assignKeyBindingsIntent() { Preference pref = findPreferenceByResId(R.string.pref_key_bindings_key); final Intent intent = new Intent(this, KeyBindingsActivity.class); pref.setIntent(intent); } /** * Returns the preference associated with the specified resource identifier. * * @param resId A string resource identifier. * @return The preference associated with the specified resource identifier. */ @SuppressWarnings("deprecation") private Preference findPreferenceByResId(int resId) { return findPreference(getString(resId)); } private void addTableList(ListPreference pref, boolean eightDot) { ArrayList<TableInfo> tables = new ArrayList<TableInfo>(); for (TableInfo info : mTables) { if (eightDot == info.isEightDot()) { tables.add(info); } } Collections.sort(tables, mTableInfoComparator); CharSequence[] entryValues = new CharSequence[tables.size() + 1]; CharSequence[] entries = new CharSequence[tables.size() + 1]; int index = 0; TableInfo defaultInfo = mTranslatorManager.findDefaultTableInfo( eightDot); if (defaultInfo != null) { entries[index] = getString( R.string.pref_braille_table_default_label, createTableDisplayName(defaultInfo)); } else { entries[index] = getText( R.string.pref_braille_table_default_none_label); } entryValues[index] = getString(R.string.table_value_default); ++index; for (TableInfo info : tables) { entries[index] = createTableDisplayName(info); entryValues[index] = info.getId(); ++index; } pref.setEntries(entries); pref.setEntryValues(entryValues); index = pref.findIndexOfValue(pref.getValue()); if (index < 0 || index >= entries.length) { LogUtils.log(this, Log.ERROR, "Unknown preference value for %s: %s", pref.getKey(), pref.getValue()); } else { pref.setSummary(entries[index]); } } private String createTableDisplayName(TableInfo tableInfo) { String localeDisplayName = tableInfo.getLocale().getDisplayName(); if (tableInfo.isEightDot()) { // The fact that this is computer braille is obvious // from context. return localeDisplayName; } List<TableInfo> related = mTranslatorManager.getRelatedTables(tableInfo); int gradeCount = 0; for (TableInfo relatedInfo : related) { if (relatedInfo.isEightDot()) { continue; } ++gradeCount; } if (gradeCount <= 1) { // Only one of our kind. if (TextUtils.isEmpty(tableInfo.getVariant())) { return localeDisplayName; } else { return getString(R.string.table_name_variant, localeDisplayName, tableInfo.getVariant()); } } if (TextUtils.isEmpty(tableInfo.getVariant())) { return getString(R.string.table_name_grade, localeDisplayName, tableInfo.getGrade()); } else { return getString(R.string.table_name_variant_grade, localeDisplayName, tableInfo.getVariant(), tableInfo.getGrade()); } } @Override public boolean onPreferenceChange(Preference preference, Object newValue) { // Always update the summary based on the list preference value. if (preference instanceof ListPreference) { boolean updated = updateListPreferenceSummary( (ListPreference) preference, (String) newValue); if (!updated) { return false; } } // If the overlay was turned on for the first time, launch the tutorial. if (preference == mOverlayPreference && newValue.equals(true)) { OverlayTutorialActivity.startIfFirstTime(this); } // If the log level was changed, update it in LogUtils. if (preference == mLogLevelPreference) { try { int logLevel = Integer.parseInt((String) newValue); return PreferenceUtils.updateLogLevel(logLevel); } catch (IllegalArgumentException e) { LogUtils.log(this, Log.ERROR, "illegal log level: %s", newValue); return false; } } return true; } @Override public boolean onPreferenceClick(Preference pref) { if (pref == mStatusPreference) { mDisplay.poll(); return true; } else if (pref == mOverlayTutorialPreference) { startActivity(new Intent(this, OverlayTutorialActivity.class)); return true; } else if (pref == mLicensesPreference) { Intent intent = WebViewDialog.getIntent(this, R.string.pref_os_license_title, "file:///android_asset/licenses.html"); startActivity(intent); } return false; } private boolean updateListPreferenceSummary( ListPreference listPreference, String newValue) { int index = listPreference.findIndexOfValue(newValue); CharSequence[] entries = listPreference.getEntries(); if (index < 0 || index >= entries.length) { LogUtils.log(this, Log.ERROR, "Unknown preference value for %s: %s", listPreference.getKey(), newValue); return false; } listPreference.setSummary(entries[index]); return true; } private static class TableInfoComparator implements Comparator<TableInfo> { private static final int KEY_MAP_INITIAL_CAPACITY = 50; private final Collator mCollator = Collator.getInstance(); private final Map<TableInfo, CollationKey> mCollationKeyMap = new HashMap<TableInfo, CollationKey>(KEY_MAP_INITIAL_CAPACITY); @Override public int compare(TableInfo first, TableInfo second) { if (first.equals(second)) { return 0; } int ret = getCollationKey(first).compareTo(getCollationKey(second)); if (ret == 0 && first.isEightDot() != second.isEightDot()) { ret = first.isEightDot() ? 1 : -1; } if (ret == 0) { ret = first.getGrade() - second.getGrade(); } return ret; } private CollationKey getCollationKey(TableInfo tableInfo) { CollationKey key = mCollationKeyMap.get(tableInfo); if (key == null) { key = mCollator.getCollationKey( tableInfo.getLocale().getDisplayName()); mCollationKeyMap.put(tableInfo, key); } return key; } } protected boolean isValidFragment(String fragmentName) { return false; } }
/* * Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.redhat.ceylon.langtools.tools.javac.tree; import com.redhat.ceylon.langtools.tools.javac.code.*; import com.redhat.ceylon.langtools.tools.javac.code.Symbol.*; import com.redhat.ceylon.langtools.tools.javac.code.Type.*; import com.redhat.ceylon.langtools.tools.javac.util.*; import com.redhat.ceylon.langtools.tools.javac.util.JCDiagnostic.DiagnosticPosition; import com.redhat.ceylon.langtools.tools.javac.tree.JCTree.*; import static com.redhat.ceylon.langtools.tools.javac.code.Flags.*; import static com.redhat.ceylon.langtools.tools.javac.code.Kinds.*; import static com.redhat.ceylon.langtools.tools.javac.code.TypeTag.*; /** Factory class for trees. * * <p><b>This is NOT part of any supported API. * If you write code that depends on this, you do so at your own risk. * This code and its internal interfaces are subject to change or * deletion without notice.</b> */ public class TreeMaker implements JCTree.Factory { /** The context key for the tree factory. */ protected static final Context.Key<TreeMaker> treeMakerKey = new Context.Key<TreeMaker>(); /** Get the TreeMaker instance. */ public static TreeMaker instance(Context context) { TreeMaker instance = context.get(treeMakerKey); if (instance == null) instance = new TreeMaker(context); return instance; } /** The position at which subsequent trees will be created. */ public int pos = Position.NOPOS; /** The toplevel tree to which created trees belong. */ public JCCompilationUnit toplevel; /** The current name table. */ Names names; Types types; /** The current symbol table. */ Symtab syms; /** Create a tree maker with null toplevel and NOPOS as initial position. */ protected TreeMaker(Context context) { context.put(treeMakerKey, this); this.pos = Position.NOPOS; this.toplevel = null; this.names = Names.instance(context); this.syms = Symtab.instance(context); this.types = Types.instance(context); } /** Create a tree maker with a given toplevel and FIRSTPOS as initial position. */ protected TreeMaker(JCCompilationUnit toplevel, Names names, Types types, Symtab syms) { this.pos = Position.FIRSTPOS; this.toplevel = toplevel; this.names = names; this.types = types; this.syms = syms; } /** Create a new tree maker for a given toplevel. */ public TreeMaker forToplevel(JCCompilationUnit toplevel) { return new TreeMaker(toplevel, names, types, syms); } /** Reassign current position. */ public TreeMaker at(int pos) { this.pos = pos; return this; } /** Reassign current position. */ public TreeMaker at(DiagnosticPosition pos) { this.pos = (pos == null ? Position.NOPOS : pos.getStartPosition()); return this; } /** * Create given tree node at current position. * @param defs a list of ClassDef, Import, and Skip */ public JCCompilationUnit TopLevel(List<JCAnnotation> packageAnnotations, JCExpression pid, List<JCTree> defs) { Assert.checkNonNull(packageAnnotations); for (JCTree node : defs) Assert.check(node instanceof JCClassDecl || node instanceof JCImport || node instanceof JCSkip || node instanceof JCErroneous || (node instanceof JCExpressionStatement && ((JCExpressionStatement)node).expr instanceof JCErroneous), node.getClass().getSimpleName()); JCCompilationUnit tree = new JCCompilationUnit(packageAnnotations, pid, defs, null, null, null, null); tree.pos = pos; return tree; } public JCImport Import(JCTree qualid, boolean importStatic) { JCImport tree = new JCImport(qualid, importStatic); tree.pos = pos; return tree; } public JCClassDecl ClassDef(JCModifiers mods, Name name, List<JCTypeParameter> typarams, JCExpression extending, List<JCExpression> implementing, List<JCTree> defs) { JCClassDecl tree = new JCClassDecl(mods, name, typarams, extending, implementing, defs, null); tree.pos = pos; return tree; } public JCMethodDecl MethodDef(JCModifiers mods, Name name, JCExpression restype, List<JCTypeParameter> typarams, List<JCVariableDecl> params, List<JCExpression> thrown, JCBlock body, JCExpression defaultValue) { return MethodDef( mods, name, restype, typarams, null, params, thrown, body, defaultValue); } public JCMethodDecl MethodDef(JCModifiers mods, Name name, JCExpression restype, List<JCTypeParameter> typarams, JCVariableDecl recvparam, List<JCVariableDecl> params, List<JCExpression> thrown, JCBlock body, JCExpression defaultValue) { JCMethodDecl tree = new JCMethodDecl(mods, name, restype, typarams, recvparam, params, thrown, body, defaultValue, null); tree.pos = pos; return tree; } public JCVariableDecl VarDef(JCModifiers mods, Name name, JCExpression vartype, JCExpression init) { JCVariableDecl tree = new JCVariableDecl(mods, name, vartype, init, null); tree.pos = pos; return tree; } public JCVariableDecl ReceiverVarDef(JCModifiers mods, JCExpression name, JCExpression vartype) { JCVariableDecl tree = new JCVariableDecl(mods, name, vartype); tree.pos = pos; return tree; } public JCSkip Skip() { JCSkip tree = new JCSkip(); tree.pos = pos; return tree; } public JCBlock Block(long flags, List<JCStatement> stats) { JCBlock tree = new JCBlock(flags, stats); tree.pos = pos; return tree; } public JCDoWhileLoop DoLoop(JCStatement body, JCExpression cond) { JCDoWhileLoop tree = new JCDoWhileLoop(body, cond); tree.pos = pos; return tree; } public JCWhileLoop WhileLoop(JCExpression cond, JCStatement body) { JCWhileLoop tree = new JCWhileLoop(cond, body); tree.pos = pos; return tree; } public JCForLoop ForLoop(List<JCStatement> init, JCExpression cond, List<JCExpressionStatement> step, JCStatement body) { JCForLoop tree = new JCForLoop(init, cond, step, body); tree.pos = pos; return tree; } public JCEnhancedForLoop ForeachLoop(JCVariableDecl var, JCExpression expr, JCStatement body) { JCEnhancedForLoop tree = new JCEnhancedForLoop(var, expr, body); tree.pos = pos; return tree; } public JCLabeledStatement Labelled(Name label, JCStatement body) { JCLabeledStatement tree = new JCLabeledStatement(label, body); tree.pos = pos; return tree; } public JCSwitch Switch(JCExpression selector, List<JCCase> cases) { JCSwitch tree = new JCSwitch(selector, cases); tree.pos = pos; return tree; } public JCCase Case(JCExpression pat, List<JCStatement> stats) { JCCase tree = new JCCase(pat, stats); tree.pos = pos; return tree; } public JCSynchronized Synchronized(JCExpression lock, JCBlock body) { JCSynchronized tree = new JCSynchronized(lock, body); tree.pos = pos; return tree; } public JCTry Try(JCBlock body, List<JCCatch> catchers, JCBlock finalizer) { return Try(List.<JCTree>nil(), body, catchers, finalizer); } public JCTry Try(List<JCTree> resources, JCBlock body, List<JCCatch> catchers, JCBlock finalizer) { JCTry tree = new JCTry(resources, body, catchers, finalizer); tree.pos = pos; return tree; } public JCCatch Catch(JCVariableDecl param, JCBlock body) { JCCatch tree = new JCCatch(param, body); tree.pos = pos; return tree; } public JCConditional Conditional(JCExpression cond, JCExpression thenpart, JCExpression elsepart) { JCConditional tree = new JCConditional(cond, thenpart, elsepart); tree.pos = pos; return tree; } public JCIf If(JCExpression cond, JCStatement thenpart, JCStatement elsepart) { JCIf tree = new JCIf(cond, thenpart, elsepart); tree.pos = pos; return tree; } public JCExpressionStatement Exec(JCExpression expr) { JCExpressionStatement tree = new JCExpressionStatement(expr); tree.pos = pos; return tree; } public JCBreak Break(Name label) { JCBreak tree = new JCBreak(label, null); tree.pos = pos; return tree; } public JCContinue Continue(Name label) { JCContinue tree = new JCContinue(label, null); tree.pos = pos; return tree; } public JCReturn Return(JCExpression expr) { JCReturn tree = new JCReturn(expr); tree.pos = pos; return tree; } public JCThrow Throw(JCExpression expr) { JCThrow tree = new JCThrow(expr); tree.pos = pos; return tree; } public JCAssert Assert(JCExpression cond, JCExpression detail) { JCAssert tree = new JCAssert(cond, detail); tree.pos = pos; return tree; } public JCMethodInvocation Apply(List<JCExpression> typeargs, JCExpression fn, List<JCExpression> args) { JCMethodInvocation tree = new JCMethodInvocation(typeargs, fn, args); tree.pos = pos; return tree; } public JCNewClass NewClass(JCExpression encl, List<JCExpression> typeargs, JCExpression clazz, List<JCExpression> args, JCClassDecl def) { JCNewClass tree = new JCNewClass(encl, typeargs, clazz, args, def); tree.pos = pos; return tree; } public JCNewArray NewArray(JCExpression elemtype, List<JCExpression> dims, List<JCExpression> elems) { JCNewArray tree = new JCNewArray(elemtype, dims, elems); tree.pos = pos; return tree; } public JCLambda Lambda(List<JCVariableDecl> params, JCTree body) { JCLambda tree = new JCLambda(params, body); tree.pos = pos; return tree; } public JCParens Parens(JCExpression expr) { JCParens tree = new JCParens(expr); tree.pos = pos; return tree; } public JCAssign Assign(JCExpression lhs, JCExpression rhs) { JCAssign tree = new JCAssign(lhs, rhs); tree.pos = pos; return tree; } public JCAssignOp Assignop(JCTree.Tag opcode, JCTree lhs, JCTree rhs) { JCAssignOp tree = new JCAssignOp(opcode, lhs, rhs, null); tree.pos = pos; return tree; } public JCUnary Unary(JCTree.Tag opcode, JCExpression arg) { JCUnary tree = new JCUnary(opcode, arg); tree.pos = pos; return tree; } public JCBinary Binary(JCTree.Tag opcode, JCExpression lhs, JCExpression rhs) { JCBinary tree = new JCBinary(opcode, lhs, rhs, null); tree.pos = pos; return tree; } public JCTypeCast TypeCast(JCTree clazz, JCExpression expr) { JCTypeCast tree = new JCTypeCast(clazz, expr); tree.pos = pos; return tree; } public JCInstanceOf TypeTest(JCExpression expr, JCTree clazz) { JCInstanceOf tree = new JCInstanceOf(expr, clazz); tree.pos = pos; return tree; } public JCArrayAccess Indexed(JCExpression indexed, JCExpression index) { JCArrayAccess tree = new JCArrayAccess(indexed, index); tree.pos = pos; return tree; } public JCFieldAccess Select(JCExpression selected, Name selector) { JCFieldAccess tree = new JCFieldAccess(selected, selector, null); tree.pos = pos; return tree; } public JCMemberReference Reference(JCMemberReference.ReferenceMode mode, Name name, JCExpression expr, List<JCExpression> typeargs) { JCMemberReference tree = new JCMemberReference(mode, name, expr, typeargs); tree.pos = pos; return tree; } public JCIdent Ident(Name name) { JCIdent tree = new JCIdent(name, null); tree.pos = pos; return tree; } public JCLiteral Literal(TypeTag tag, Object value) { JCLiteral tree = new JCLiteral(tag, value); tree.pos = pos; return tree; } public JCPrimitiveTypeTree TypeIdent(TypeTag typetag) { JCPrimitiveTypeTree tree = new JCPrimitiveTypeTree(typetag); tree.pos = pos; return tree; } public JCArrayTypeTree TypeArray(JCExpression elemtype) { JCArrayTypeTree tree = new JCArrayTypeTree(elemtype); tree.pos = pos; return tree; } public JCTypeApply TypeApply(JCExpression clazz, List<JCExpression> arguments) { JCTypeApply tree = new JCTypeApply(clazz, arguments); tree.pos = pos; return tree; } public JCTypeUnion TypeUnion(List<JCExpression> components) { JCTypeUnion tree = new JCTypeUnion(components); tree.pos = pos; return tree; } public JCTypeIntersection TypeIntersection(List<JCExpression> components) { JCTypeIntersection tree = new JCTypeIntersection(components); tree.pos = pos; return tree; } public JCTypeParameter TypeParameter(Name name, List<JCExpression> bounds) { return TypeParameter(name, bounds, List.<JCAnnotation>nil()); } public JCTypeParameter TypeParameter(Name name, List<JCExpression> bounds, List<JCAnnotation> annos) { JCTypeParameter tree = new JCTypeParameter(name, bounds, annos); tree.pos = pos; return tree; } public JCWildcard Wildcard(TypeBoundKind kind, JCTree type) { JCWildcard tree = new JCWildcard(kind, type); tree.pos = pos; return tree; } public TypeBoundKind TypeBoundKind(BoundKind kind) { TypeBoundKind tree = new TypeBoundKind(kind); tree.pos = pos; return tree; } public JCAnnotation Annotation(JCTree annotationType, List<JCExpression> args) { JCAnnotation tree = new JCAnnotation(Tag.ANNOTATION, annotationType, args); tree.pos = pos; return tree; } public JCAnnotation TypeAnnotation(JCTree annotationType, List<JCExpression> args) { JCAnnotation tree = new JCAnnotation(Tag.TYPE_ANNOTATION, annotationType, args); tree.pos = pos; return tree; } public JCModifiers Modifiers(long flags, List<JCAnnotation> annotations) { JCModifiers tree = new JCModifiers(flags, annotations); boolean noFlags = (flags & (Flags.ModifierFlags | Flags.ANNOTATION)) == 0; tree.pos = (noFlags && annotations.isEmpty()) ? Position.NOPOS : pos; return tree; } public JCModifiers Modifiers(long flags) { return Modifiers(flags, List.<JCAnnotation>nil()); } public JCAnnotatedType AnnotatedType(List<JCAnnotation> annotations, JCExpression underlyingType) { JCAnnotatedType tree = new JCAnnotatedType(annotations, underlyingType); tree.pos = pos; return tree; } public JCErroneous Erroneous() { return Erroneous(List.<JCTree>nil()); } public JCErroneous Erroneous(List<? extends JCTree> errs) { JCErroneous tree = new JCErroneous(errs); tree.pos = pos; return tree; } public LetExpr LetExpr(List<JCStatement> defs, JCTree expr) { LetExpr tree = new LetExpr(defs, expr); tree.pos = pos; return tree; } /* *************************************************************************** * Derived building blocks. ****************************************************************************/ public JCClassDecl AnonymousClassDef(JCModifiers mods, List<JCTree> defs) { return ClassDef(mods, names.empty, List.<JCTypeParameter>nil(), null, List.<JCExpression>nil(), defs); } public LetExpr LetExpr(JCVariableDecl def, JCTree expr) { LetExpr tree = new LetExpr(List.<JCStatement>of(def), expr); tree.pos = pos; return tree; } public LetExpr LetExpr(JCVariableDecl def, List<JCStatement> stats, JCTree expr) { LetExpr tree = new LetExpr(new ListBuffer<JCStatement>().append(def).appendList(stats).toList(), expr); tree.pos = pos; return tree; } public LetExpr LetExpr(List<JCVariableDecl> defs, List<JCStatement> stats, JCTree expr) { LetExpr tree = new LetExpr(new ListBuffer<JCStatement>().appendList((List)defs).appendList(stats).toList(), expr); tree.pos = pos; return tree; } public LetExpr LetExpr(List<JCStatement> prestats, List<JCVariableDecl> defs, List<JCStatement> stats, JCTree expr) { LetExpr tree = new LetExpr(new ListBuffer<JCStatement>() .appendList(prestats) .appendList((List)defs) .appendList(stats).toList(), expr); tree.pos = pos; return tree; } /** Create an identifier from a symbol. */ public JCIdent Ident(Symbol sym) { return (JCIdent)new JCIdent((sym.name != names.empty) ? sym.name : sym.flatName(), sym) .setPos(pos) .setType(sym.type); } /** Create a selection node from a qualifier tree and a symbol. * @param base The qualifier tree. */ public JCExpression Select(JCExpression base, Symbol sym) { return new JCFieldAccess(base, sym.name, sym).setPos(pos).setType(sym.type); } /** Create a qualified identifier from a symbol, adding enough qualifications * to make the reference unique. */ public JCExpression QualIdent(Symbol sym) { return isUnqualifiable(sym) ? Ident(sym) : Select(QualIdent(sym.owner), sym); } /** Create an identifier that refers to the variable declared in given variable * declaration. */ public JCExpression Ident(JCVariableDecl param) { return Ident(param.sym); } /** Create a list of identifiers referring to the variables declared * in given list of variable declarations. */ public List<JCExpression> Idents(List<JCVariableDecl> params) { ListBuffer<JCExpression> ids = new ListBuffer<JCExpression>(); for (List<JCVariableDecl> l = params; l.nonEmpty(); l = l.tail) ids.append(Ident(l.head)); return ids.toList(); } /** Create a tree representing `this', given its type. */ public JCExpression This(Type t) { return Ident(new VarSymbol(FINAL, names._this, t, t.tsym)); } /** Create a tree representing a class literal. */ public JCExpression ClassLiteral(ClassSymbol clazz) { return ClassLiteral(clazz.type); } /** Create a tree representing a class literal. */ public JCExpression ClassLiteral(Type t) { VarSymbol lit = new VarSymbol(STATIC | PUBLIC | FINAL, names._class, t, t.tsym); return Select(Type(t), lit); } /** Create a tree representing `super', given its type and owner. */ public JCIdent Super(Type t, TypeSymbol owner) { return Ident(new VarSymbol(FINAL, names._super, t, owner)); } /** * Create a method invocation from a method tree and a list of * argument trees. */ public JCMethodInvocation App(JCExpression meth, List<JCExpression> args) { return Apply(null, meth, args).setType(meth.type.getReturnType()); } /** * Create a no-arg method invocation from a method tree */ public JCMethodInvocation App(JCExpression meth) { return Apply(null, meth, List.<JCExpression>nil()).setType(meth.type.getReturnType()); } /** Create a method invocation from a method tree and a list of argument trees. */ public JCExpression Create(Symbol ctor, List<JCExpression> args) { Type t = ctor.owner.erasure(types); JCNewClass newclass = NewClass(null, null, Type(t), args, null); newclass.constructor = ctor; newclass.setType(t); return newclass; } /** Create a tree representing given type. */ public JCExpression Type(Type t) { if (t == null) return null; JCExpression tp; switch (t.getTag()) { case BYTE: case CHAR: case SHORT: case INT: case LONG: case FLOAT: case DOUBLE: case BOOLEAN: case VOID: tp = TypeIdent(t.getTag()); break; case TYPEVAR: tp = Ident(t.tsym); break; case WILDCARD: { WildcardType a = ((WildcardType) t); tp = Wildcard(TypeBoundKind(a.kind), Type(a.type)); break; } case CLASS: Type outer = t.getEnclosingType(); JCExpression clazz = outer.hasTag(CLASS) && t.tsym.owner.kind == TYP ? Select(Type(outer), t.tsym) : QualIdent(t.tsym); tp = t.getTypeArguments().isEmpty() ? clazz : TypeApply(clazz, Types(t.getTypeArguments())); break; case ARRAY: tp = TypeArray(Type(types.elemtype(t))); break; case ERROR: tp = TypeIdent(ERROR); break; default: throw new AssertionError("unexpected type: " + t); } return tp.setType(t); } /** Create a list of trees representing given list of types. */ public List<JCExpression> Types(List<Type> ts) { ListBuffer<JCExpression> lb = new ListBuffer<JCExpression>(); for (List<Type> l = ts; l.nonEmpty(); l = l.tail) lb.append(Type(l.head)); return lb.toList(); } /** Create a variable definition from a variable symbol and an initializer * expression. */ public JCVariableDecl VarDef(VarSymbol v, JCExpression init) { return (JCVariableDecl) new JCVariableDecl( Modifiers(v.flags(), Annotations(v.getRawAttributes())), v.name, Type(v.type), init, v).setPos(pos).setType(v.type); } /** Create annotation trees from annotations. */ public List<JCAnnotation> Annotations(List<Attribute.Compound> attributes) { if (attributes == null) return List.nil(); ListBuffer<JCAnnotation> result = new ListBuffer<JCAnnotation>(); for (List<Attribute.Compound> i = attributes; i.nonEmpty(); i=i.tail) { Attribute a = i.head; result.append(Annotation(a)); } return result.toList(); } public JCLiteral Literal(Object value) { JCLiteral result = null; if (value instanceof String) { result = Literal(CLASS, value). setType(syms.stringType.constType(value)); } else if (value instanceof Integer) { result = Literal(INT, value). setType(syms.intType.constType(value)); } else if (value instanceof Long) { result = Literal(LONG, value). setType(syms.longType.constType(value)); } else if (value instanceof Byte) { result = Literal(BYTE, value). setType(syms.byteType.constType(value)); } else if (value instanceof Character) { int v = (int) (((Character) value).toString().charAt(0)); result = Literal(CHAR, value). setType(syms.charType.constType(v)); } else if (value instanceof Double) { result = Literal(DOUBLE, value). setType(syms.doubleType.constType(value)); } else if (value instanceof Float) { result = Literal(FLOAT, value). setType(syms.floatType.constType(value)); } else if (value instanceof Short) { result = Literal(SHORT, value). setType(syms.shortType.constType(value)); } else if (value instanceof Boolean) { int v = ((Boolean) value) ? 1 : 0; result = Literal(BOOLEAN, v). setType(syms.booleanType.constType(v)); } else { throw new AssertionError(value); } return result; } class AnnotationBuilder implements Attribute.Visitor { JCExpression result = null; public void visitConstant(Attribute.Constant v) { result = Literal(v.type.getTag(), v.value); } public void visitClass(Attribute.Class clazz) { result = ClassLiteral(clazz.classType).setType(syms.classType); } public void visitEnum(Attribute.Enum e) { result = QualIdent(e.value); } public void visitError(Attribute.Error e) { result = Erroneous(); } public void visitCompound(Attribute.Compound compound) { if (compound instanceof Attribute.TypeCompound) { result = visitTypeCompoundInternal((Attribute.TypeCompound) compound); } else { result = visitCompoundInternal(compound); } } public JCAnnotation visitCompoundInternal(Attribute.Compound compound) { ListBuffer<JCExpression> args = new ListBuffer<JCExpression>(); for (List<Pair<Symbol.MethodSymbol,Attribute>> values = compound.values; values.nonEmpty(); values=values.tail) { Pair<MethodSymbol,Attribute> pair = values.head; JCExpression valueTree = translate(pair.snd); args.append(Assign(Ident(pair.fst), valueTree).setType(valueTree.type)); } return Annotation(Type(compound.type), args.toList()); } public JCAnnotation visitTypeCompoundInternal(Attribute.TypeCompound compound) { ListBuffer<JCExpression> args = new ListBuffer<JCExpression>(); for (List<Pair<Symbol.MethodSymbol,Attribute>> values = compound.values; values.nonEmpty(); values=values.tail) { Pair<MethodSymbol,Attribute> pair = values.head; JCExpression valueTree = translate(pair.snd); args.append(Assign(Ident(pair.fst), valueTree).setType(valueTree.type)); } return TypeAnnotation(Type(compound.type), args.toList()); } public void visitArray(Attribute.Array array) { ListBuffer<JCExpression> elems = new ListBuffer<JCExpression>(); for (int i = 0; i < array.values.length; i++) elems.append(translate(array.values[i])); result = NewArray(null, List.<JCExpression>nil(), elems.toList()).setType(array.type); } JCExpression translate(Attribute a) { a.accept(this); return result; } JCAnnotation translate(Attribute.Compound a) { return visitCompoundInternal(a); } JCAnnotation translate(Attribute.TypeCompound a) { return visitTypeCompoundInternal(a); } } AnnotationBuilder annotationBuilder = new AnnotationBuilder(); /** Create an annotation tree from an attribute. */ public JCAnnotation Annotation(Attribute a) { return annotationBuilder.translate((Attribute.Compound)a); } public JCAnnotation TypeAnnotation(Attribute a) { return annotationBuilder.translate((Attribute.TypeCompound) a); } /** Create a method definition from a method symbol and a method body. */ public JCMethodDecl MethodDef(MethodSymbol m, JCBlock body) { return MethodDef(m, m.type, body); } /** Create a method definition from a method symbol, method type * and a method body. */ public JCMethodDecl MethodDef(MethodSymbol m, Type mtype, JCBlock body) { return (JCMethodDecl) new JCMethodDecl( Modifiers(m.flags(), Annotations(m.getRawAttributes())), m.name, Type(mtype.getReturnType()), TypeParams(mtype.getTypeArguments()), null, // receiver type Params(mtype.getParameterTypes(), m), Types(mtype.getThrownTypes()), body, null, m).setPos(pos).setType(mtype); } /** Create a type parameter tree from its name and type. */ public JCTypeParameter TypeParam(Name name, TypeVar tvar) { return (JCTypeParameter) TypeParameter(name, Types(types.getBounds(tvar))).setPos(pos).setType(tvar); } /** Create a list of type parameter trees from a list of type variables. */ public List<JCTypeParameter> TypeParams(List<Type> typarams) { ListBuffer<JCTypeParameter> tparams = new ListBuffer<JCTypeParameter>(); for (List<Type> l = typarams; l.nonEmpty(); l = l.tail) tparams.append(TypeParam(l.head.tsym.name, (TypeVar)l.head)); return tparams.toList(); } /** Create a value parameter tree from its name, type, and owner. */ public JCVariableDecl Param(Name name, Type argtype, Symbol owner) { return VarDef(new VarSymbol(PARAMETER, name, argtype, owner), null); } /** Create a a list of value parameter trees x0, ..., xn from a list of * their types and an their owner. */ public List<JCVariableDecl> Params(List<Type> argtypes, Symbol owner) { ListBuffer<JCVariableDecl> params = new ListBuffer<JCVariableDecl>(); MethodSymbol mth = (owner.kind == MTH) ? ((MethodSymbol)owner) : null; if (mth != null && mth.params != null && argtypes.length() == mth.params.length()) { for (VarSymbol param : ((MethodSymbol)owner).params) params.append(VarDef(param, null)); } else { int i = 0; for (List<Type> l = argtypes; l.nonEmpty(); l = l.tail) params.append(Param(paramName(i++), l.head, owner)); } return params.toList(); } /** Wrap a method invocation in an expression statement or return statement, * depending on whether the method invocation expression's type is void. */ public JCStatement Call(JCExpression apply) { return apply.type.hasTag(VOID) ? Exec(apply) : Return(apply); } /** Construct an assignment from a variable symbol and a right hand side. */ public JCStatement Assignment(Symbol v, JCExpression rhs) { return Exec(Assign(Ident(v), rhs).setType(v.type)); } /** Construct an index expression from a variable and an expression. */ public JCArrayAccess Indexed(Symbol v, JCExpression index) { JCArrayAccess tree = new JCArrayAccess(QualIdent(v), index); tree.type = ((ArrayType)v.type).elemtype; return tree; } /** Make an attributed type cast expression. */ public JCTypeCast TypeCast(Type type, JCExpression expr) { return (JCTypeCast)TypeCast(Type(type), expr).setType(type); } /* *************************************************************************** * Helper methods. ****************************************************************************/ /** Can given symbol be referred to in unqualified form? */ boolean isUnqualifiable(Symbol sym) { if (sym.name == names.empty || sym.owner == null || // sym.owner == syms.rootPackage || sym.owner.kind == MTH || sym.owner.kind == VAR) { return true; } else if (sym.kind == TYP && toplevel != null) { Scope.Entry e; e = toplevel.namedImportScope.lookup(sym.name); if (e.scope != null) { return e.sym == sym && e.next().scope == null; } e = toplevel.packge.members().lookup(sym.name); if (e.scope != null) { return e.sym == sym && e.next().scope == null; } e = toplevel.starImportScope.lookup(sym.name); if (e.scope != null) { return e.sym == sym && e.next().scope == null; } } return false; } /** The name of synthetic parameter number `i'. */ public Name paramName(int i) { return names.fromString("x" + i); } /** The name of synthetic type parameter number `i'. */ public Name typaramName(int i) { return names.fromString("A" + i); } }
// Copyright (c) Microsoft Corporation. // All rights reserved. // // This code is licensed under the MIT License. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files(the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and / or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions : // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package com.microsoft.aad.adal; import android.annotation.SuppressLint; import android.content.Context; import android.content.pm.ApplicationInfo; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.pm.PackageManager.NameNotFoundException; import android.content.pm.Signature; import android.support.test.InstrumentationRegistry; import android.support.test.runner.AndroidJUnit4; import android.util.Base64; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import java.io.UnsupportedEncodingException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URLEncoder; import java.security.MessageDigest; import javax.crypto.SecretKey; import javax.crypto.SecretKeyFactory; import javax.crypto.spec.PBEKeySpec; import javax.crypto.spec.SecretKeySpec; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @RunWith(AndroidJUnit4.class) public class PackageHelperTests { private static final int TEST_UID = 13; private byte[] mTestSignature; private String mTestTag; private Context mContext; @SuppressLint("PackageManagerGetSignatures") @Before public void setUp() throws Exception { mContext = InstrumentationRegistry.getContext(); System.setProperty("dexmaker.dexcache", mContext.getCacheDir().getPath()); if (AuthenticationSettings.INSTANCE.getSecretKeyData() == null) { // use same key for tests final int iterationCount = 100; final int keyLength = 256; SecretKeyFactory keyFactory = SecretKeyFactory .getInstance("PBEWithSHA256And256BitAES-CBC-BC"); SecretKey tempkey = keyFactory.generateSecret(new PBEKeySpec("test".toCharArray(), "abcdedfdfd".getBytes("UTF-8"), iterationCount, keyLength)); SecretKey secretKey = new SecretKeySpec(tempkey.getEncoded(), "AES"); AuthenticationSettings.INSTANCE.setSecretKey(secretKey.getEncoded()); } AuthenticationSettings.INSTANCE.setBrokerPackageName("invalid_do_not_switch"); AuthenticationSettings.INSTANCE.setBrokerSignature("invalid_do_not_switch"); // ADAL is set to this signature for now PackageInfo info = mContext.getPackageManager().getPackageInfo(mContext.getPackageName(), PackageManager.GET_SIGNATURES); // Broker App can be signed with multiple certificates. It will look // all of them // until it finds the correct one for ADAL broker. for (Signature signature : info.signatures) { mTestSignature = signature.toByteArray(); MessageDigest md = MessageDigest.getInstance("SHA"); md.update(mTestSignature); mTestTag = Base64.encodeToString(md.digest(), Base64.NO_WRAP); break; } } @After public void tearDown() throws Exception { Logger.getInstance().setExternalLogger(null); } @Test public void testGetCurrentSignatureForPackage() throws NameNotFoundException, IllegalArgumentException, ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { Context mockContext = getMockContext(new Signature(mTestSignature), mContext.getPackageName(), 0); Object packageHelper = getInstance(mockContext); Method m = ReflectionUtils.getTestMethod(packageHelper, "getCurrentSignatureForPackage", String.class); // act String actual = (String) m.invoke(packageHelper, mContext.getPackageName()); // assert assertEquals("should be same info", mTestTag, actual); // act actual = (String) m.invoke(packageHelper, (String) null); // assert assertNull("should return null", actual); } @Test public void testGetUIDForPackage() throws NameNotFoundException, IllegalArgumentException, ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { Context mockContext = getMockContext(new Signature(mTestSignature), mContext.getPackageName(), TEST_UID); Object packageHelper = getInstance(mockContext); Method m = ReflectionUtils.getTestMethod(packageHelper, "getUIDForPackage", String.class); // act int actual = (Integer) m.invoke(packageHelper, mContext.getPackageName()); // assert assertEquals("should be same UID", TEST_UID, actual); // act actual = (Integer) m.invoke(packageHelper, (String) null); // assert assertEquals("should return 0", 0, actual); } @Test public void testRedirectUrl() throws NameNotFoundException, IllegalArgumentException, ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, UnsupportedEncodingException { Context mockContext = getMockContext(new Signature(mTestSignature), mContext.getPackageName(), 0); Object packageHelper = getInstance(mockContext); Method m = ReflectionUtils.getTestMethod(packageHelper, "getBrokerRedirectUrl", String.class, String.class); // act String actual = (String) m.invoke(packageHelper, mContext.getPackageName(), mTestTag); // assert assertTrue("should have packagename", actual.contains(mContext.getPackageName())); assertTrue("should have signature url encoded", actual.contains(URLEncoder.encode(mTestTag, AuthenticationConstants.ENCODING_UTF8))); } private static Object getInstance(Context mockContext) throws IllegalArgumentException, ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { Class<?> c = Class.forName("com.microsoft.aad.adal.PackageHelper"); Constructor<?> constructorParams = c.getDeclaredConstructor(Context.class); constructorParams.setAccessible(true); return constructorParams.newInstance(mockContext); } private Context getMockContext(final Signature signature, final String packageName, final int callingUID) throws NameNotFoundException { Context mockContext = mock(Context.class); // insert packagemanager mocks PackageManager mockPackageManager = getPackageManager(signature, packageName, callingUID); when(mockContext.getPackageManager()).thenReturn(mockPackageManager); when(mockContext.getPackageName()).thenReturn(packageName); return mockContext; } @SuppressLint("PackageManagerGetSignatures") private PackageManager getPackageManager(final Signature signature, final String packageName, final int callingUID) throws NameNotFoundException { PackageManager mockPackage = mock(PackageManager.class); PackageInfo info = new PackageInfo(); Signature[] signatures = new Signature[1]; signatures[0] = signature; info.signatures = signatures; ApplicationInfo appInfo = new ApplicationInfo(); appInfo.name = packageName; appInfo.uid = callingUID; when(mockPackage.getPackageInfo(packageName, PackageManager.GET_SIGNATURES)).thenReturn( info); when(mockPackage.getApplicationInfo(packageName, 0)).thenReturn(appInfo); Context mock = mock(Context.class); when(mock.getPackageManager()).thenReturn(mockPackage); return mockPackage; } }
// jTDS JDBC Driver for Microsoft SQL Server and Sybase // Copyright (C) 2004 The jTDS Project // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // package net.sourceforge.jtds.jdbc; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.EOFException; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.RandomAccessFile; import java.net.Socket; import java.net.SocketException; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.LinkedList; import net.sourceforge.jtds.ssl.SocketFactories; import net.sourceforge.jtds.ssl.Ssl; import net.sourceforge.jtds.util.Logger; /** * This class mananges the physical connection to the SQL Server and * serialises its use amongst a number of virtual sockets. * This allows one physical connection to service a number of concurrent * statements. * <p> * Constraints and assumptions: * <ol> * <li>Callers will not attempt to read from the server without issuing a request first. * <li>The end of a server reply can be identified as byte 2 of the header is non zero. * </ol> * </p> * Comments: * <ol> * <li>This code will discard unread server data if a new request is issued. * Currently the higher levels of the driver attempt to do this but may be * we can just rely on this code instead. * <li>A cancel can be issued by a caller only if the server is currently sending * data for the caller otherwise the cancel is ignored. * <li>Cancel packets on their own are returned as extra records appended to the * previous packet so that the TdsCore module can process them. * </ol> * This version of the class will start to cache results to disk once a predetermined * maximum buffer memory threshold has been passed. Small result sets that will fit * within a specified limit (default 8 packets) will continue to be held in memory * (even if the memory threshold has been passed) in the interests of efficiency. * * @author Mike Hutchinson. * @version $Id: SharedSocket.java,v 1.22 2005/01/04 17:12:53 alin_sinpalean Exp $ */ class SharedSocket { /** * This inner class contains the state information for the virtual socket. */ private static class VirtualSocket { /** * The stream ID of the stream objects owning this state. */ int owner; /** * Memory resident packet queue. */ LinkedList pktQueue; /** * True to discard network data. */ boolean flushInput; /** * True if output is complete TDS packet. */ boolean complete; /** * File object for disk packet queue. */ File queueFile; /** * I/O Stream for disk packet queue. */ RandomAccessFile diskQueue; /** * Number of packets cached to disk. */ int pktsOnDisk; /** * Total of input packets in memory or disk. */ int inputPkts; /** * Constuct object to hold state information for each caller. * @param streamId the Response/Request stream id. */ VirtualSocket(int streamId) { this.owner = streamId; this.pktQueue = new LinkedList(); this.flushInput = false; this.complete = false; this.queueFile = null; this.diskQueue = null; this.pktsOnDisk = 0; this.inputPkts = 0; } } /** * The shared network socket. */ private Socket socket; /** * Output stream for network socket. */ private DataOutputStream out; /** * Input stream for network socket. */ private DataInputStream in; /** * Current maxium input buffer size. */ private int maxBufSize = TdsCore.MIN_PKT_SIZE; /** * Table of stream objects sharing this socket. */ private ArrayList socketTable = new ArrayList(); /** * The Stream ID of the object that is expecting a response from the server. */ private int responseOwner = -1; /** * Buffer for packet header. */ private byte hdrBuf[] = new byte[8]; /** * Total memory usage in all instances of the driver * NB. Access to this field should probably be synchronized * but in practice lost updates will not matter much and I think * all VMs tend to do atomic saves to integer variables. */ private static int globalMemUsage = 0; /** * Peak memory usage for debug purposes. */ private static int peakMemUsage = 0; /** * Max memory limit to use for buffers. * Only when this limit is exceeded will the driver * start caching to disk. */ private static int memoryBudget = 100000; // 100K /** * Minimum number of packets that will be cached in memory * before the driver tries to write to disk even if * memoryBudget has been exceeded. */ private static int minMemPkts = 8; /** * Global flag to indicate that security constraints mean * that attempts to create work files will fail. */ private static boolean securityViolation; /** * Tds protocol version */ private int tdsVersion; /** * The servertype one of Driver.SQLSERVER or Driver.SYBASE */ private int serverType; /** * The character set to use for converting strings to/from bytes. */ private CharsetInfo charsetInfo; /** * Count of packets received. */ private int packetCount; /** * TDS done token. */ private static final byte TDS_DONE_TOKEN = (byte) 253; protected SharedSocket() { } /** * Construct a <code>SharedSocket</code> object specifying host name and * port. * * @param host the SQL Server host name * @param port the connection port eg 1433 * @param tdsVersion the TDS protocol version * @param tcpNoDelay <code>true</code> to enable TCP_NODELAY on the * underlying socket; <code>false</code> to disable * @throws IOException if socket open fails */ SharedSocket(String host, int port, int tdsVersion, int serverType, boolean tcpNoDelay, String ssl, String instance) throws IOException, UnknownHostException { setTdsVersion(tdsVersion); setServerType(serverType); this.socket = createSocket(host, port, ssl, instance); setOut(new DataOutputStream(socket.getOutputStream())); setIn(new DataInputStream(socket.getInputStream())); this.socket.setTcpNoDelay(tcpNoDelay); } /** * Set the character set descriptor to be used to translate byte arrays to * or from Strings. * * @param charsetInfo the character set descriptor */ void setCharsetInfo(CharsetInfo charsetInfo) { this.charsetInfo = charsetInfo; } /** * Retrieve the character set descriptor used to translate byte arrays to * or from Strings. */ CharsetInfo getCharsetInfo() { return charsetInfo; } /** * Retrieve the character set name used to translate byte arrays to * or from Strings. * * @return the character set name as a <code>String</code> */ String getCharset() { return charsetInfo.getCharset(); } /** * Obtain an instance of a server request stream for this socket. * * @return the server request stream as a <code>RequestStream</code> */ RequestStream getRequestStream() { synchronized (socketTable) { int id; for (id = 0; id < socketTable.size(); id++) { if (socketTable.get(id) == null) { break; } } VirtualSocket vsock = new VirtualSocket(id); if (id >= socketTable.size()) { socketTable.add(vsock); } else { socketTable.set(id, vsock); } return new RequestStream(this, id); } } /** * Obtain an instance of a server response stream for this socket. * NB. getRequestStream() must be used first to obtain the RequestStream * needed as a parameter for this method. * * @param requestStream an existing server request stream object obtained * from this <code>SharedSocket</code> * @return the server response stream as a <code>ResponseStream</code> */ ResponseStream getResponseStream(RequestStream requestStream) { return new ResponseStream(this, requestStream.getStreamId()); } /** * Retrieve the TDS version that is active on the connection * supported by this socket. * * @return the TDS version as an <code>int</code> */ int getTdsVersion() { return tdsVersion; } /** * Set the TDS version field. * * @param tdsVersion the TDS version as an <code>int</code> */ protected void setTdsVersion(int tdsVersion) { this.tdsVersion = tdsVersion; } /** * Retrieve the SQL Server type that is associated with the connection * supported by this socket. * <ol> * <li>Microsoft SQL Server. * <li>Sybase SQL Server. * </ol> * @return the SQL Server type as an <code>int</code> */ int getServerType() { return serverType; } /** * Set the SQL Server type field. * * @param serverType the SQL Server type as an <code>int</code> */ protected void setServerType(int serverType) { this.serverType = serverType; } /** * Set the global buffer memory limit for all instances of this driver. * * @param memoryBudget the global memory budget */ static void setMemoryBudget(int memoryBudget) { SharedSocket.memoryBudget = memoryBudget; } /** * Get the global buffer memory limit for all instancs of this driver. * * @return the memory limit as an <code>int</code> */ static int getMemoryBudget() { return SharedSocket.memoryBudget; } /** * Set the minimum number of packets to cache in memory before * writing to disk. * * @param minMemPkts the minimum number of packets to cache */ static void setMinMemPkts(int minMemPkts) { SharedSocket.minMemPkts = minMemPkts; } /** * Get the minimum number of memory cached packets. * * @return minimum memory packets as an <code>int</code> */ static int getMinMemPkts() { return SharedSocket.minMemPkts; } /** * Get the connected status of this socket. * * @return <code>true</code> if the underlying socket is connected */ boolean isConnected() { return this.socket != null; } /** * Send a TDS cancel packet to the server. * * @param streamId the <code>RequestStream</code> id */ void cancel(int streamId) { // // Only send if response pending for the caller. // Caller must have aquired connection mutex first. // NB. This method will not work with local named pipes // as this thread will be blocked in the write until the // reading thread has returned from the read. // if (responseOwner == streamId) { try { // // Send a cancel packet. // byte[] cancel = new byte[8]; cancel[0] = TdsCore.CANCEL_PKT; cancel[1] = 1; cancel[2] = 0; cancel[3] = 8; cancel[4] = 0; cancel[5] = 0; cancel[6] = (getTdsVersion() >= Driver.TDS70) ? (byte) 1 : 0; cancel[7] = 0; getOut().write(cancel, 0, 8); getOut().flush(); if (Logger.isActive()) { Logger.logPacket(streamId, false, cancel); } } catch (IOException e) { // Ignore error as network is probably dead anyway } } } /** * Close the socket and release all resources. * * @throws IOException if the socket close fails */ void close() throws IOException { if (Logger.isActive()) { Logger.println("TdsSocket: Max buffer memory used = " + (peakMemUsage / 1024) + "KB"); } synchronized (socketTable) { // See if any temporary files need deleting for (int i = 0; i < socketTable.size(); i++) { VirtualSocket vsock = (VirtualSocket) socketTable.get(i); if (vsock != null && vsock.diskQueue != null) { try { vsock.diskQueue.close(); vsock.queueFile.delete(); } catch (IOException ioe) { // Ignore errors } } } // Close physical socket if (socket != null) { socket.close(); } } } /** * Force close the socket causing any pending reads/writes to fail. * <p> * Used by the login timer to abort a login attempt. */ void forceClose() { if (socket != null) { try { socket.close(); } catch (IOException ioe) { // Ignore } finally { socket = null; } } } /** * Deallocate a stream linked to this socket. * * @param streamId the <code>ResponseStream</code> id */ void closeStream(int streamId) { synchronized (socketTable) { VirtualSocket vsock = lookup(streamId); if (vsock.diskQueue != null) { try { vsock.diskQueue.close(); vsock.queueFile.delete(); } catch (IOException ioe) { // Ignore errors } } socketTable.set(streamId, null); } } /** * Send a network packet. If output for another virtual socket is * in progress this packet will be sent later. * * @param streamId the originating <code>RequestStream</code> object * @param buffer the data to send * @return the same buffer received if emptied or another buffer w/ the * same size if the incoming buffer is cached (to avoid copying) * @throws IOException if an I/O error occurs */ byte[] sendNetPacket(int streamId, byte buffer[]) throws IOException { synchronized (socketTable) { VirtualSocket vsock = lookup(streamId); while (vsock.inputPkts > 0) { // // There is unread data in the input buffers. // As we are sending another packet we can just discard it now. // if (Logger.isActive()) { Logger.println("TdsSocket: Unread data in input packet queue"); } dequeueInput(vsock); } if (responseOwner != -1) { // // Complex case there is another stream's data in the network pipe // or we had our own incomplete request to discard first // Read and store other stream's data or flush our own. // VirtualSocket other = (VirtualSocket)socketTable.get(responseOwner); byte[] tmpBuf = null; boolean ourData = (other.owner == streamId); do { // Reuse the buffer if it's our data; we don't need it tmpBuf = readPacket(ourData ? tmpBuf : null); if (!ourData) { // We need to save this input as it belongs to // Another thread. enqueueInput(other, tmpBuf); } // Any of our input is discarded. } while (tmpBuf[1] == 0); // Read all data to complete TDS packet responseOwner = -1; } // // At this point we know that we are able to send the first // or subsequent packet of a new request. // getOut().write(buffer, 0, getPktLen(buffer, 2)); if (buffer[1] != 0) { getOut().flush(); // We are the response owner now responseOwner = streamId; } return buffer; } } /** * Get a network packet. This may be read from the network directly or from * previously cached buffers. * * @param streamId the originating ResponseStream object * @param buffer the data buffer to receive the object (may be replaced) * @return the data in a <code>byte[]</code> buffer * @throws IOException if an I/O error occurs */ byte[] getNetPacket(int streamId, byte buffer[]) throws IOException { synchronized (socketTable) { VirtualSocket vsock = lookup(streamId); // // Return any cached input // if (vsock.inputPkts > 0) { return dequeueInput(vsock); } // // Nothing cached see if we are expecting network data // if (responseOwner == -1) { throw new IOException("Stream " + streamId + " attempting to read when no request has been sent"); } // // OK There should be data, check that it is for this stream // if (responseOwner != streamId) { // Error we are trying to read another thread's request. throw new IOException("Stream " + streamId + " is trying to read data that belongs to stream " + responseOwner); } // // Simple case we are reading our input directly from the server // buffer = readPacket(buffer); if (buffer[1] != 0) { // End of response connection now free responseOwner = -1; } return buffer; } } /** * Save a packet buffer in a memory queue or to a disk queue if the global * memory limit for the driver has been exceeded. * * @param vsock the virtual socket owning this data * @param buffer the data to queue */ private void enqueueInput(VirtualSocket vsock, byte[] buffer) throws IOException { // // Check to see if we should start caching to disk // if (globalMemUsage + buffer.length > memoryBudget && vsock.pktQueue.size() >= minMemPkts && !securityViolation && vsock.diskQueue == null) { // Try to create a disk file for the queue try { vsock.queueFile = File.createTempFile("jtds", ".tmp"); vsock.queueFile.deleteOnExit(); vsock.diskQueue = new RandomAccessFile(vsock.queueFile, "rw"); // Write current cache contents to disk and free memory byte[] tmpBuf; while (vsock.pktQueue.size() > 0) { tmpBuf = (byte[]) vsock.pktQueue.removeFirst(); vsock.diskQueue.write(tmpBuf, 0, getPktLen(tmpBuf, 2)); vsock.pktsOnDisk++; } } catch (java.lang.SecurityException se) { // Not allowed to cache to disk so carry on in memory securityViolation = true; vsock.queueFile = null; vsock.diskQueue = null; } } if (vsock.diskQueue != null) { // Cache file exists so append buffer to it vsock.diskQueue.write(buffer, 0, getPktLen(buffer, 2)); vsock.pktsOnDisk++; } else { // Will cache in memory vsock.pktQueue.addLast(buffer); globalMemUsage += buffer.length; if (globalMemUsage > peakMemUsage) { peakMemUsage = globalMemUsage; } } vsock.inputPkts++; return; } /** * Read a cached packet from the in memory queue or from a disk based queue. * * @param vsock the virtual socket owning this data * @return a buffer containing the packet */ private byte[] dequeueInput(VirtualSocket vsock) throws IOException { byte[] buffer = null; if (vsock.pktsOnDisk > 0) { // Data is cached on disk if (vsock.diskQueue.getFilePointer() == vsock.diskQueue.length()) { // First read so rewind() file vsock.diskQueue.seek(0L); } vsock.diskQueue.readFully(hdrBuf, 0, 8); int len = getPktLen(hdrBuf, 2); buffer = new byte[len]; System.arraycopy(hdrBuf, 0, buffer, 0, 8); vsock.diskQueue.readFully(buffer, 8, len - 8); vsock.pktsOnDisk--; if (vsock.pktsOnDisk < 1) { // File now empty so close and delete it try { vsock.diskQueue.close(); vsock.queueFile.delete(); } finally { vsock.queueFile = null; vsock.diskQueue = null; } } } else if (vsock.pktQueue.size() > 0) { buffer = (byte[]) vsock.pktQueue.removeFirst(); globalMemUsage -= buffer.length; } if (buffer != null) { vsock.inputPkts--; } return buffer; } /** * Read a physical TDS packet from the network. * * @param buffer a buffer to read the data into (if it fits) or null * @return either the incoming buffer if it was large enough or a newly * allocated buffer with the read packet */ private byte[] readPacket(byte buffer[]) throws IOException { do { // // Read rest of header try { getIn().readFully(hdrBuf); } catch (EOFException e) { throw new IOException("DB server closed connection."); } byte packetType = hdrBuf[0]; if (packetType != TdsCore.LOGIN_PKT && packetType != TdsCore.QUERY_PKT && packetType != TdsCore.REPLY_PKT) { throw new IOException("Unknown packet type 0x" + Integer.toHexString(packetType)); } // figure out how many bytes are remaining in this packet. int len = getPktLen(hdrBuf, 2); if (len < 8 || len > 65536) { throw new IOException("Invalid network packet length " + len); } if (buffer == null || len > buffer.length) { // Create or expand the buffer as required buffer = new byte[len]; if (len > maxBufSize) { maxBufSize = len; } } // Preserve the packet header in the buffer System.arraycopy(hdrBuf, 0, buffer, 0, 8); try { getIn().readFully(buffer, 8, len - 8); } catch (EOFException e) { throw new IOException("DB server closed connection."); } // // SQL Server 2000 < SP3 does not set the last packet // flag in the NT challenge packet. // If this is the first packet and the length is correct // force the last packet flag on. // if (++packetCount == 1 && serverType == Driver.SQLSERVER && "NTLMSSP".equals(new String(buffer, 11, 7))) { buffer[1] = 1; } } while (isCancelAck(buffer)); // Discard stray cancel packets return buffer; } /** * Identify isolated cancel packets so that we can count them. * * @param buffer the packet to check whether it's a cancel ACK or not */ private boolean isCancelAck(byte[] buffer) { if (buffer[1] == 0) { return false; // Not complete TDS packet } if (getPktLen(buffer, 2) != 17) { return false; // Too short to contain cancel or has other stuff } if (buffer[8] != TDS_DONE_TOKEN || (buffer[9] & TdsCore.DONE_CANCEL) == 0) { return false; // Not a cancel packet } if (Logger.isActive()) { Logger.println("TdsSocket: Cancel packet read"); } return true; } /** * Retrieves the virtual socket with the given id. * * @param streamId id of the virtual socket to retrieve */ private VirtualSocket lookup(int streamId) { if (streamId < 0 || streamId > socketTable.size()) { throw new IllegalArgumentException("Invalid parameter stream ID " + streamId); } VirtualSocket vsock = (VirtualSocket)socketTable.get(streamId); if (vsock.owner != streamId) { throw new IllegalStateException("Internal error: bad stream ID " + streamId); } return vsock; } /** * Convert two bytes (in network byte order) in a byte array into a Java * short integer. * * @param buf array of data * @param offset index into the buf array where the short integer is * stored * @return the 16 bit unsigned value as an <code>int</code> */ static int getPktLen(byte buf[], int offset) { int lo = ((int) buf[offset + 1] & 0xff); int hi = (((int) buf[offset] & 0xff) << 8); return hi | lo; } /** * Set the socket timeout. * * @param timeout the timeout value in milliseconds */ protected void setTimeout(int timeout) throws SocketException { socket.setSoTimeout(timeout); } /** * Getter for {@link SharedSocket#in} field. * * @return {@link InputStream} used for communication. */ protected DataInputStream getIn() { return in; } /** * Setter for {@link SharedSocket#in} field. * * @param in The {@link InputStream} to be used for communication. */ protected void setIn(DataInputStream in) { this.in = in; } /** * Getter for {@link SharedSocket#out} field. * * @return {@link OutputStream} used for communication. */ protected DataOutputStream getOut() { return out; } /** * Setter for {@link SharedSocket#out} field. * * @param out The {@link OutputStream} to be used for communication. */ protected void setOut(DataOutputStream out) { this.out = out; } /** * Returns the socket factory appropriate for the security settings. * * @param ssl the security setting * @param instance the DB instance name; needed if SSL is on * @return a <code>SocketFactory</code> instance */ private static Socket createSocket(String host, int port, String ssl, String instance) throws UnknownHostException, IOException { if (ssl.equals(Ssl.SSL_OFF)){ return new Socket(host, port); } else { return SocketFactories.getSocketFactory(ssl, instance) .createSocket(host, port); } } }
package cmput301f17t01.bronzify; import android.drm.DrmStore; import android.support.test.espresso.Espresso; import android.support.test.espresso.contrib.DrawerActions; import android.support.test.espresso.contrib.NavigationViewActions; import android.support.test.espresso.contrib.PickerActions; import android.support.test.filters.LargeTest; import android.support.test.rule.ActivityTestRule; import android.support.test.runner.AndroidJUnit4; import android.support.v7.app.ActionBarDrawerToggle; import android.util.Log; import android.widget.DatePicker; import android.widget.EditText; import android.widget.TextView; import com.google.firebase.database.ThrowOnExtraProperties; import org.hamcrest.Matchers; import org.junit.FixMethodOrder; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.MethodSorters; import java.util.Calendar; import java.util.Date; import cmput301f17t01.bronzify.activities.LoginActivity; import cmput301f17t01.bronzify.activities.MyHomeActivity; import static android.support.test.espresso.Espresso.onData; import static android.support.test.espresso.Espresso.onView; import static android.support.test.espresso.action.ViewActions.clearText; import static android.support.test.espresso.action.ViewActions.click; import static android.support.test.espresso.action.ViewActions.closeSoftKeyboard; import static android.support.test.espresso.action.ViewActions.replaceText; import static android.support.test.espresso.action.ViewActions.typeText; import static android.support.test.espresso.assertion.ViewAssertions.matches; import static android.support.test.espresso.matcher.RootMatchers.isDialog; import static android.support.test.espresso.matcher.ViewMatchers.hasSibling; import static android.support.test.espresso.matcher.ViewMatchers.isClickable; import static android.support.test.espresso.matcher.ViewMatchers.isDisplayed; import static android.support.test.espresso.matcher.ViewMatchers.isSelected; import static android.support.test.espresso.matcher.ViewMatchers.withClassName; import static android.support.test.espresso.matcher.ViewMatchers.withContentDescription; import static android.support.test.espresso.matcher.ViewMatchers.withId; import static android.support.test.espresso.matcher.ViewMatchers.withInputType; import static android.support.test.espresso.matcher.ViewMatchers.withText; import static android.text.InputType.TYPE_CLASS_TEXT; import static org.hamcrest.CoreMatchers.allOf; import static org.hamcrest.CoreMatchers.anything; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.core.StringEndsWith.endsWith; import static org.hamcrest.core.StringStartsWith.startsWith; /** * Created by noahkryzanowski on 2017-12-03. */ @RunWith(AndroidJUnit4.class) @FixMethodOrder(MethodSorters.NAME_ASCENDING) @LargeTest public class EspressoTesting { @Rule public ActivityTestRule<LoginActivity> mActivityRule = new ActivityTestRule(LoginActivity.class); /** * This test case will create the users with the name "test_user_001", "test_user_001" * and "follow_user_001". It is commented out, because the users are already created. * It can easily be run to create the users in a single step. * */ @Test public void test0CreateUsers() { //Create user "test_user_001" onView(withId(R.id.enter_id)).perform(typeText("test_user_001")); onView(withId(R.id.enter_id)).check(matches(withText("test_user_001"))); onView(withId(R.id.login_button)).perform(closeSoftKeyboard()); onView(withId(R.id.register_button)).perform(click()); onView(withId(R.id.enter_id)).perform(clearText()); //Create user "follow_user_001" onView(withId(R.id.enter_id)).perform(typeText("follow_user_001")); onView(withId(R.id.enter_id)).check(matches(withText("follow_user_001"))); onView(withId(R.id.login_button)).perform(closeSoftKeyboard()); onView(withId(R.id.register_button)).perform(click()); onView(withId(R.id.enter_id)).perform(clearText()); //Create user "follow_user_002" onView(withId(R.id.enter_id)).perform(typeText("follow_user_002")); onView(withId(R.id.enter_id)).check(matches(withText("follow_user_002"))); onView(withId(R.id.login_button)).perform(closeSoftKeyboard()); onView(withId(R.id.register_button)).perform(click()); } /** * This test case will create a new habit type, with the name Running. It will * log in and log out, and ensure that the data entered is correct * */ @Test public void test1CreateNewHabitType() { //Login onView(withId(R.id.enter_id)).perform(typeText("test_user_001")); onView(withId(R.id.enter_id)).check(matches(withText("test_user_001"))); onView(withId(R.id.login_button)).perform(closeSoftKeyboard()); onView(withId(R.id.login_button)).perform(click()); onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.MyHabits)); //Create a new habit onView(withId(R.id.createNewHabit)).perform(click()); onView(withId(R.id.textHabitName)).perform(typeText("Running")); onView(withId(R.id.textHabitName)).check(matches(withText("Running"))); onView(withId(R.id.textHabitReason)).perform(typeText("Get fit")); onView(withId(R.id.textHabitReason)).check(matches(withText("Get fit"))); onView(withId(R.id.buttonSelectDate)).perform(click()); onView(withClassName(Matchers.equalTo(DatePicker.class.getName()))).perform(PickerActions.setDate(2017, 12, 5)); onView(withText("OK")).perform(click()); onView(withId(R.id.buttonMonday)).perform(click()); onView(withId(R.id.buttonWednesday)).perform(click()); onView(withId(R.id.buttonFriday)).perform(click()); onView(withId(R.id.buttonCreate)).perform(closeSoftKeyboard()); onView(withId(R.id.buttonCreate)).perform(click()); //Logout onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.LogOut)); } /** * This test case will edit the habit type that was created in test1. It will change the name * of the habit from "Running" to "Skating", as well as removing Wednesday from the recurring * days every week. * */ @Test public void test2EditHabitType() { //Login onView(withId(R.id.enter_id)).perform(typeText("test_user_001")); onView(withId(R.id.enter_id)).check(matches(withText("test_user_001"))); onView(withId(R.id.login_button)).perform(closeSoftKeyboard()); onView(withId(R.id.login_button)).perform(click()); onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.MyHabits)); //Click on the correct habit in the list onView(withId(R.id.habitTypeRow)).check(matches(withText(containsString("Running")))); onView(withId(R.id.habitTypeRow)).perform(click()); //Edit the parameters of the habit onView(withId(R.id.buttonEdit)).perform(click()); //onData(withId(R.id.textHabitName)).atPosition(0).perform(click()); onView(withId(R.id.textHabitName)).perform(replaceText("Skating")); //onView(withText("Running")).perform(click()); //onView(withText("Running")).perform(replaceText("Skating")); onView(withId(R.id.buttonEdit)).perform(closeSoftKeyboard()); onView(withId(R.id.buttonWednesday)).perform(click()); onView(withId(R.id.buttonEdit)).perform(click()); //Logout onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.LogOut)); } /** * This test case will delete the habit type that was created in test2. The habit event skating * will no longer exist after it has been run. * */ @Test public void test3DeleteHabitType() { //Login onView(withId(R.id.enter_id)).perform(typeText("test_user_001")); onView(withId(R.id.enter_id)).check(matches(withText("test_user_001"))); onView(withId(R.id.login_button)).perform(closeSoftKeyboard()); onView(withId(R.id.login_button)).perform(click()); onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.MyHabits)); //Click on the correct habit in the list onView(withId(R.id.habitTypeRow)).check(matches(withText(containsString("Skating")))); onView(withId(R.id.habitTypeRow)).perform(click()); //Delete the habit onView(withId(R.id.buttonEdit)).perform(click()); onView(withId(R.id.buttonDelete)).perform(click()); //Logout onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.LogOut)); } /** * This test case will log in as "follow_user_001" and will follow the user "test_user_001". * Then "test_user_001" will log into their account to accept the follow request. * */ @Test public void test4FollowUser() { //Login with the first user onView(withId(R.id.enter_id)).perform(typeText("follow_user_001")); onView(withId(R.id.enter_id)).check(matches(withText("follow_user_001"))); onView(withId(R.id.login_button)).perform(closeSoftKeyboard()); onView(withId(R.id.register_button)).perform(click()); onView(withId(R.id.login_button)).perform(click()); onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.MyProfile)); onView(withId(R.id.followButton)).perform(click()); onView(allOf(withClassName(endsWith("EditText")))).perform(typeText("followed_user_002")); onView(withText("OK")).perform(click()); //Logout from the first users account onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.LogOut)); //Login with the second user //onView(withId(R.id.enter_id)).perform(typeText("followed_user_002")); //onView(withId(R.id.enter_id)).check(matches(withText("followed_user_002"))); onView(withId(R.id.enter_id)).perform(typeText("followed_user_002")); onView(withId(R.id.enter_id)).check(matches(withText("followed_user_002"))); onView(withId(R.id.login_button)).perform(closeSoftKeyboard()); onView(withId(R.id.register_button)).perform(click()); onView(withId(R.id.login_button)).perform(click()); onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.MyProfile)); //onData(allOf(withText("Tommy"), hasSibling(withText("Tommy")))).atPosition(0).perform(click()); //onData(allOf(withId(R.id.acceptFollow), hasSibling(withText("Tommy")))).atPosition(0).perform(click()); //onData(allOf(withId(R.id.acceptFollow), hasSibling(withText("1")), hasSibling(withId(R.id.followReqRecycler)))).perform(click()); onView(withId(R.id.acceptFollow)).perform(click()); //onData(withText("Tommy")).perform(click()); //onView(withId(R.id.habitTypeRow)).perform(click()); } /** * This test case will add two new events to the user "test_user_002" and demonstrate that * all the events that will occur today will display on the MyHome tab. The first event will * not be completed and the second event will be completed. * */ @Test public void test5Today() { //Login onView(withId(R.id.enter_id)).perform(typeText("test_user_002")); onView(withId(R.id.enter_id)).check(matches(withText("test_user_002"))); onView(withId(R.id.login_button)).perform(closeSoftKeyboard()); onView(withId(R.id.register_button)).perform(click()); onView(withId(R.id.login_button)).perform(click()); onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.MyHabits)); //Create the first habit onView(withId(R.id.createNewHabit)).perform(click()); onView(withId(R.id.textHabitName)).perform(typeText("Rowing")); onView(withId(R.id.textHabitName)).check(matches(withText("Rowing"))); onView(withId(R.id.textHabitReason)).perform(typeText("Get fit")); onView(withId(R.id.textHabitReason)).check(matches(withText("Get fit"))); onView(withId(R.id.buttonSelectDate)).perform(click()); Calendar cal = Calendar.getInstance(); int day = cal.get(Calendar.DAY_OF_MONTH); onView(withClassName(Matchers.equalTo(DatePicker.class.getName()))).perform(PickerActions.setDate(2017, 12, day)); onView(withText("OK")).perform(click()); onView(withId(R.id.buttonMonday)).perform(click()); onView(withId(R.id.buttonCreate)).perform(closeSoftKeyboard()); onView(withId(R.id.buttonCreate)).perform(click()); //Create the second habit onView(withId(R.id.createNewHabit)).perform(click()); onView(withId(R.id.textHabitName)).perform(typeText("Swimming")); onView(withId(R.id.textHabitName)).check(matches(withText("Swimming"))); onView(withId(R.id.textHabitReason)).perform(typeText("Too cold outside")); onView(withId(R.id.textHabitReason)).check(matches(withText("Too cold outside"))); onView(withId(R.id.buttonSelectDate)).perform(click()); onView(withClassName(Matchers.equalTo(DatePicker.class.getName()))).perform(PickerActions.setDate(2017, 12, day)); onView(withText("OK")).perform(click()); onView(withId(R.id.buttonMonday)).perform(click()); onView(withId(R.id.buttonCreate)).perform(closeSoftKeyboard()); onView(withId(R.id.buttonCreate)).perform(click()); //Change to MyHome onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.MyHome)); //Logout onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.LogOut)); } /** * This test case will show all of the past events that the user has had, completed or not. * */ @Test public void test6HabitHistory() { //Login onView(withId(R.id.enter_id)).perform(typeText("test_user_001")); onView(withId(R.id.enter_id)).check(matches(withText("test_user_001"))); onView(withId(R.id.login_button)).perform(closeSoftKeyboard()); onView(withId(R.id.login_button)).perform(click()); onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.MyHistory)); //Logout onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.LogOut)); } /** * This test case will add and delete a user, validating that the user account is * successfully deleted. * */ @Test public void test7AddDeleteUser() { //Login onView(withId(R.id.enter_id)).perform(typeText("test_user_003")); onView(withId(R.id.enter_id)).check(matches(withText("test_user_003"))); onView(withId(R.id.register_button)).perform(closeSoftKeyboard()); onView(withId(R.id.register_button)).perform(click()); onView(withId(R.id.login_button)).perform(click()); onView(withId(R.id.drawer_layout)).perform(DrawerActions.open()); onView(withId(R.id.nav_view)).perform(NavigationViewActions.navigateTo(R.id.MyProfile)); onView(withId(R.id.deleteButton)).perform(click()); onView(withText("OK")).perform(click()); onView(withId(R.id.enter_id)).perform(typeText("test_user_003")); onView(withId(R.id.enter_id)).check(matches(withText("test_user_003"))); onView(withId(R.id.login_button)).perform(closeSoftKeyboard()); onView(withId(R.id.login_button)).perform(click()); onView(withClassName(endsWith("TextView"))).check(matches(withText("Invalid User ID"))); } }
package com.redhat.ceylon.compiler.java.runtime.metamodel; import java.util.List; import ceylon.language.Anything; import ceylon.language.Iterator; import ceylon.language.Sequential; import ceylon.language.empty_; import ceylon.language.finished_; import ceylon.language.meta.declaration.FunctionDeclaration$impl; import ceylon.language.meta.declaration.OpenType; import com.redhat.ceylon.compiler.java.Util; import com.redhat.ceylon.compiler.java.metadata.Ceylon; import com.redhat.ceylon.compiler.java.metadata.Defaulted; import com.redhat.ceylon.compiler.java.metadata.Ignore; import com.redhat.ceylon.compiler.java.metadata.Name; import com.redhat.ceylon.compiler.java.metadata.Sequenced; import com.redhat.ceylon.compiler.java.metadata.TypeInfo; import com.redhat.ceylon.compiler.java.metadata.TypeParameter; import com.redhat.ceylon.compiler.java.metadata.TypeParameters; import com.redhat.ceylon.compiler.java.metadata.Variance; import com.redhat.ceylon.compiler.java.runtime.model.TypeDescriptor; import com.redhat.ceylon.compiler.typechecker.model.Functional; import com.redhat.ceylon.compiler.typechecker.model.Method; import com.redhat.ceylon.compiler.typechecker.model.Parameter; import com.redhat.ceylon.compiler.typechecker.model.ParameterList; import com.redhat.ceylon.compiler.typechecker.model.ProducedType; import com.redhat.ceylon.compiler.typechecker.model.ProducedTypedReference; import com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration; @Ceylon(major = 7) @com.redhat.ceylon.compiler.java.metadata.Class public class FreeFunction extends FreeFunctionOrValue implements ceylon.language.meta.declaration.FunctionDeclaration, AnnotationBearing { @Ignore public static final TypeDescriptor $TypeDescriptor$ = TypeDescriptor.klass(FreeFunction.class); private Sequential<? extends ceylon.language.meta.declaration.TypeParameter> typeParameters; private OpenType type; private Sequential<? extends ceylon.language.meta.declaration.FunctionOrValueDeclaration> parameterList; public FreeFunction(com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration declaration) { super(declaration); // FIXME: make lazy // FIXME: share with ClassOrInterface List<com.redhat.ceylon.compiler.typechecker.model.TypeParameter> typeParameters = ((Functional) declaration).getTypeParameters(); ceylon.language.meta.declaration.TypeParameter[] typeParametersArray = new ceylon.language.meta.declaration.TypeParameter[typeParameters.size()]; int i=0; for(com.redhat.ceylon.compiler.typechecker.model.TypeParameter tp : typeParameters){ typeParametersArray[i++] = new com.redhat.ceylon.compiler.java.runtime.metamodel.FreeTypeParameter(tp); } this.typeParameters = Util.sequentialWrapper(ceylon.language.meta.declaration.TypeParameter.$TypeDescriptor$, typeParametersArray); this.type = Metamodel.getMetamodel(declaration.getType()); List<ParameterList> parameterLists = ((Functional)declaration).getParameterLists(); ParameterList parameterList = parameterLists.get(0); List<Parameter> modelParameters = parameterList.getParameters(); ceylon.language.meta.declaration.FunctionOrValueDeclaration[] parameters = new ceylon.language.meta.declaration.FunctionOrValueDeclaration[modelParameters.size()]; i=0; for(Parameter modelParameter : modelParameters){ parameters[i] = (ceylon.language.meta.declaration.FunctionOrValueDeclaration)Metamodel.getOrCreateMetamodel(modelParameter.getModel()); i++; } this.parameterList = Util.sequentialWrapper(ceylon.language.meta.declaration.FunctionOrValueDeclaration.$TypeDescriptor$, parameters); } @Override @Ignore public FunctionDeclaration$impl $ceylon$language$meta$declaration$FunctionDeclaration$impl() { return null; } @Override @TypeInfo("ceylon.language::Sequential<ceylon.language.meta.declaration::FunctionOrValueDeclaration>") public Sequential<? extends ceylon.language.meta.declaration.FunctionOrValueDeclaration> getParameterDeclarations(){ return parameterList; } @Override @TypeInfo("ceylon.language.meta.declaration::FunctionOrValueDeclaration|ceylon.language::Null") public ceylon.language.meta.declaration.FunctionOrValueDeclaration getParameterDeclaration(@Name("name") String name){ Iterator<?> iterator = parameterList.iterator(); Object o; while((o = iterator.next()) != finished_.get_()){ ceylon.language.meta.declaration.FunctionOrValueDeclaration pd = (ceylon.language.meta.declaration.FunctionOrValueDeclaration) o; if(pd.getName().equals(name)) return pd; } return null; } @Override @TypeInfo("ceylon.language::Sequential<ceylon.language.meta.declaration::TypeParameter>") public Sequential<? extends ceylon.language.meta.declaration.TypeParameter> getTypeParameterDeclarations() { return typeParameters; } @Override @TypeInfo("ceylon.language.meta.declaration::TypeParameter|ceylon.language::Null") public ceylon.language.meta.declaration.TypeParameter getTypeParameterDeclaration(@Name("name") String name) { Iterator<? extends ceylon.language.meta.declaration.TypeParameter> iterator = typeParameters.iterator(); Object it; while((it = iterator.next()) != finished_.get_()){ ceylon.language.meta.declaration.TypeParameter tp = (ceylon.language.meta.declaration.TypeParameter) it; if(tp.getName().equals(name)) return tp; } return null; } @SuppressWarnings({ "unchecked", "rawtypes" }) @Ignore @Override public <Return extends Object, Arguments extends Sequential<? extends Object>> ceylon.language.meta.model.Function<Return, Arguments> apply(TypeDescriptor $reifiedReturn, TypeDescriptor $reifiedArguments){ return apply($reifiedReturn,$reifiedArguments,(Sequential)empty_.get_()); } @Override @TypeInfo("ceylon.language.meta.model::Function<Return,Arguments>") @TypeParameters({ @TypeParameter("Return"), @TypeParameter(value = "Arguments", satisfies = "ceylon.language::Sequential<ceylon.language::Anything>") }) public <Return extends Object, Arguments extends Sequential<? extends Object>> ceylon.language.meta.model.Function<Return, Arguments> apply( @Ignore TypeDescriptor $reifiedReturn, @Ignore TypeDescriptor $reifiedArguments, @Name("typeArguments") @TypeInfo("ceylon.language::Sequential<ceylon.language.meta.model::Type<ceylon.language::Anything>>") @Sequenced Sequential<? extends ceylon.language.meta.model.Type<?>> typeArguments){ if(!getToplevel()) throw new ceylon.language.meta.model.TypeApplicationException("Cannot apply a member declaration with no container type: use memberApply"); List<com.redhat.ceylon.compiler.typechecker.model.ProducedType> producedTypes = Metamodel.getProducedTypes(typeArguments); Metamodel.checkTypeArguments(null, declaration, producedTypes); com.redhat.ceylon.compiler.typechecker.model.ProducedReference appliedFunction = declaration.getProducedReference(null, producedTypes); TypeDescriptor reifiedType = Metamodel.getTypeDescriptorForFunction(appliedFunction); TypeDescriptor reifiedArguments = Metamodel.getTypeDescriptorForArguments(declaration.getUnit(), (Functional) declaration, appliedFunction); Metamodel.checkReifiedTypeArgument("apply", "Function<$1,$2>", Variance.OUT, declaration.getUnit().getCallableReturnType(appliedFunction.getFullType()), $reifiedReturn, Variance.IN, Metamodel.getProducedTypeForArguments(declaration.getUnit(), (Functional)declaration, appliedFunction), $reifiedArguments); return new AppliedFunction<Return,Arguments>(reifiedType, reifiedArguments, appliedFunction, this, null, null); } @SuppressWarnings({ "rawtypes", "unchecked" }) @Ignore @Override public <Container, Return, Arguments extends Sequential<? extends Object>> ceylon.language.meta.model.Method<Container, Return, Arguments> memberApply(TypeDescriptor $reifiedContainer, TypeDescriptor $reifiedReturn, TypeDescriptor $reifiedArguments, ceylon.language.meta.model.Type<? extends Container> containerType){ return this.<Container, Return, Arguments>memberApply($reifiedContainer, $reifiedReturn, $reifiedArguments, containerType, (Sequential)empty_.get_()); } @TypeInfo("ceylon.language.meta.model::Method<Container,Return,Arguments>") @TypeParameters({ @TypeParameter("Container"), @TypeParameter("Return"), @TypeParameter(value = "Arguments", satisfies = "ceylon.language::Sequential<ceylon.language::Anything>") }) @Override public <Container, Return, Arguments extends Sequential<? extends Object>> ceylon.language.meta.model.Method<Container, Return, Arguments> memberApply( @Ignore TypeDescriptor $reifiedContainer, @Ignore TypeDescriptor $reifiedReturn, @Ignore TypeDescriptor $reifiedArguments, @Name("containerType") ceylon.language.meta.model.Type<? extends Container> containerType, @Name("typeArguments") @Sequenced Sequential<? extends ceylon.language.meta.model.Type<?>> typeArguments){ if(getToplevel()) throw new ceylon.language.meta.model.TypeApplicationException("Cannot apply a toplevel declaration to a container type: use apply"); return getAppliedMethod($reifiedContainer, $reifiedReturn, $reifiedArguments, typeArguments, containerType); } <Container, Type, Arguments extends ceylon.language.Sequential<? extends Object>> ceylon.language.meta.model.Method<Container, Type, Arguments> getAppliedMethod(@Ignore TypeDescriptor $reifiedContainer, @Ignore TypeDescriptor $reifiedType, @Ignore TypeDescriptor $reifiedArguments, Sequential<? extends ceylon.language.meta.model.Type<?>> typeArguments, ceylon.language.meta.model.Type<? extends Object> container){ List<com.redhat.ceylon.compiler.typechecker.model.ProducedType> producedTypes = Metamodel.getProducedTypes(typeArguments); ProducedType containerType = Metamodel.getModel(container); Metamodel.checkQualifyingType(containerType, declaration); Metamodel.checkTypeArguments(containerType, declaration, producedTypes); // find the proper qualifying type ProducedType memberQualifyingType = containerType.getSupertype((TypeDeclaration) declaration.getContainer()); final ProducedTypedReference appliedFunction = ((com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration)declaration).getProducedTypedReference(memberQualifyingType, producedTypes); TypeDescriptor reifiedType = Metamodel.getTypeDescriptorForFunction(appliedFunction); TypeDescriptor reifiedArguments = Metamodel.getTypeDescriptorForArguments(declaration.getUnit(), (Functional) declaration, appliedFunction); TypeDescriptor reifiedContainer = Metamodel.getTypeDescriptorForProducedType(containerType); Metamodel.checkReifiedTypeArgument("memberApply", "Method<$1,$2,$3>", Variance.IN, containerType, $reifiedContainer, Variance.OUT, appliedFunction.getType(), $reifiedType, Variance.IN, Metamodel.getProducedTypeForArguments(declaration.getUnit(), (Functional)declaration, appliedFunction), $reifiedArguments); return new AppliedMethod<Container, Type, Arguments>(reifiedContainer, reifiedType, reifiedArguments, appliedFunction, this, container); } @Override @TypeInfo("ceylon.language.meta.declaration::OpenType") public OpenType getOpenType() { return type; } @Override public boolean getAnnotation(){ return declaration.isAnnotation(); } @SuppressWarnings({ "unchecked", "rawtypes" }) @Ignore @Override public ceylon.language.Sequential<? extends ceylon.language.meta.model.Type<?>> invoke$typeArguments(){ return (ceylon.language.Sequential<? extends ceylon.language.meta.model.Type<?>>)(Sequential)empty_.get_(); } @SuppressWarnings({ "unchecked", "rawtypes" }) @Ignore @Override public java.lang.Object invoke(){ return invoke((ceylon.language.Sequential<? extends ceylon.language.meta.model.Type<?>>)(Sequential)empty_.get_()); } @Ignore @Override public java.lang.Object invoke( ceylon.language.Sequential<? extends ceylon.language.meta.model.Type<?>> typeArguments){ return invoke(typeArguments, empty_.get_()); } @TypeInfo("ceylon.language::Anything") @Override public java.lang.Object invoke( @Name("typeArguments") @Defaulted @TypeInfo("ceylon.language::Sequential<ceylon.language.meta.model::Type<ceylon.language::Anything>>") ceylon.language.Sequential<? extends ceylon.language.meta.model.Type<?>> typeArguments, @Name("arguments") @Sequenced @TypeInfo("ceylon.language::Sequential<ceylon.language::Anything>") ceylon.language.Sequential<?> arguments){ return apply(Anything.$TypeDescriptor$, TypeDescriptor.NothingType, typeArguments).apply(arguments); } @SuppressWarnings({ "unchecked", "rawtypes" }) @Ignore @Override public ceylon.language.Sequential<? extends ceylon.language.meta.model.Type<?>> memberInvoke$typeArguments(java.lang.Object container){ return (ceylon.language.Sequential<? extends ceylon.language.meta.model.Type<?>>)(Sequential)empty_.get_(); } @SuppressWarnings({ "unchecked", "rawtypes" }) @Ignore @Override public java.lang.Object memberInvoke(java.lang.Object container){ return memberInvoke(container, (ceylon.language.Sequential<? extends ceylon.language.meta.model.Type<?>>)(Sequential)empty_.get_()); } @Ignore @Override public java.lang.Object memberInvoke( java.lang.Object container, ceylon.language.Sequential<? extends ceylon.language.meta.model.Type<?>> typeArguments){ return memberInvoke(container, typeArguments, empty_.get_()); } @SuppressWarnings("unchecked") @TypeInfo("ceylon.language::Anything") @Override public java.lang.Object memberInvoke( @Name("container") @TypeInfo("ceylon.language::Anything") java.lang.Object container, @Name("typeArguments") @Defaulted @TypeInfo("ceylon.language::Sequential<ceylon.language.meta.model::Type<ceylon.language::Anything>>") ceylon.language.Sequential<? extends ceylon.language.meta.model.Type<?>> typeArguments, @Name("arguments") @Sequenced @TypeInfo("ceylon.language::Sequential<ceylon.language::Anything>") ceylon.language.Sequential<?> arguments){ ceylon.language.meta.model.Type<?> containerType = Metamodel.getAppliedMetamodel(Metamodel.getTypeDescriptor(container)); return memberApply(TypeDescriptor.NothingType, Anything.$TypeDescriptor$, TypeDescriptor.NothingType, containerType, typeArguments).bind(container).apply(arguments); } @Override public int hashCode() { return Metamodel.hashCode(this, "function"); } @Override public boolean equals(Object obj) { if(obj == null) return false; if(obj == this) return true; if(obj instanceof FreeFunction == false) return false; return Metamodel.equalsForSameType(this, (FreeFunction)obj); } @Override public String toString() { return "function "+super.toString(); } @Ignore @Override public TypeDescriptor $getType$() { return $TypeDescriptor$; } @Ignore @Override public java.lang.annotation.Annotation[] $getJavaAnnotations$() { // FIXME: this could be a FunctionalParameter! return Metamodel.getJavaMethod((Method) declaration).getAnnotations(); } }
package br.ufsc.lehmann.msm.artigo.problems; import java.sql.Array; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import com.google.common.collect.Multimap; import com.google.common.collect.MultimapBuilder; import br.ufsc.core.trajectory.EqualsDistanceFunction; import br.ufsc.core.trajectory.Semantic; import br.ufsc.core.trajectory.SemanticTrajectory; import br.ufsc.core.trajectory.SpatialDistanceFunction; import br.ufsc.core.trajectory.StopSemantic; import br.ufsc.core.trajectory.TPoint; import br.ufsc.core.trajectory.TemporalDuration; import br.ufsc.core.trajectory.semantic.AttributeDescriptor; import br.ufsc.core.trajectory.semantic.AttributeType; import br.ufsc.core.trajectory.semantic.Move; import br.ufsc.core.trajectory.semantic.Stop; import br.ufsc.core.trajectory.semantic.StopMove; import br.ufsc.db.source.DataRetriever; import br.ufsc.db.source.DataSource; import br.ufsc.db.source.DataSourceType; import br.ufsc.lehmann.AngleDistance; import br.ufsc.lehmann.DTWDistance; import br.ufsc.lehmann.EllipsesDistance; import br.ufsc.lehmann.MoveSemantic; import br.ufsc.lehmann.NumberDistance; import br.ufsc.lehmann.msm.artigo.StopMoveSemantic; import br.ufsc.utils.Angle; import br.ufsc.utils.Distance; import br.ufsc.utils.EuclideanDistanceFunction; import smile.math.Math; public class GeolifeUniversityDatabaseReader implements IDataReader { private static final int SAMPLING_RATE = 1; private static final SpatialDistanceFunction GEO_DISTANCE_FUNCTION = new EuclideanDistanceFunction(); private static final SpatialDistanceFunction DISTANCE_FUNCTION = GEO_DISTANCE_FUNCTION; private static int SEMANTICS_COUNTER = 3; public static final BasicSemantic<Integer> USER_ID = new BasicSemantic<>(SEMANTICS_COUNTER++); public static final BasicSemantic<String> TRANSPORTATION_MODE = new BasicSemantic<>(SEMANTICS_COUNTER++); public static final BasicSemantic<String> REGION_INTEREST = new BasicSemantic<>(SEMANTICS_COUNTER++); public static final BasicSemantic<String> PATH = new BasicSemantic<>(SEMANTICS_COUNTER++); public static final BasicSemantic<String> DIRECTION = new BasicSemantic<>(SEMANTICS_COUNTER++); public static final StopSemantic STOP_REGION_SEMANTIC = new StopSemantic(SEMANTICS_COUNTER, new AttributeDescriptor<Stop, String>(AttributeType.STOP_REGION, new EqualsDistanceFunction<String>())); public static final StopSemantic STOP_CENTROID_SEMANTIC = new StopSemantic(SEMANTICS_COUNTER, new AttributeDescriptor<Stop, TPoint>(AttributeType.STOP_CENTROID, GEO_DISTANCE_FUNCTION)); public static final StopSemantic STOP_STREET_NAME_SEMANTIC = new StopSemantic(SEMANTICS_COUNTER++, new AttributeDescriptor<Stop, String>(AttributeType.STOP_STREET_NAME, new EqualsDistanceFunction<String>())); public static final MoveSemantic MOVE_ANGLE_SEMANTIC = new MoveSemantic(SEMANTICS_COUNTER, new AttributeDescriptor<Move, Double>(AttributeType.MOVE_ANGLE, new AngleDistance())); public static final MoveSemantic MOVE_DISTANCE_SEMANTIC = new MoveSemantic(SEMANTICS_COUNTER, new AttributeDescriptor<Move, Number>(AttributeType.MOVE_TRAVELLED_DISTANCE, new NumberDistance())); public static final MoveSemantic MOVE_POINTS_SEMANTIC = new MoveSemantic(SEMANTICS_COUNTER, new AttributeDescriptor<Move, TPoint[]>(AttributeType.MOVE_POINTS, new DTWDistance(GEO_DISTANCE_FUNCTION))); public static final MoveSemantic MOVE_ELLIPSES_SEMANTIC = new MoveSemantic(SEMANTICS_COUNTER++, new AttributeDescriptor<Move, TPoint[]>(AttributeType.MOVE_POINTS, new EllipsesDistance(GEO_DISTANCE_FUNCTION))); public static final StopMoveSemantic STOP_MOVE_COMBINED = new StopMoveSemantic(STOP_STREET_NAME_SEMANTIC, MOVE_ANGLE_SEMANTIC, new AttributeDescriptor<StopMove, Object>(AttributeType.STOP_STREET_NAME_MOVE_ANGLE, new EqualsDistanceFunction<Object>())); public static final BasicSemantic<String> PATH_WITH_DIRECTION = new BasicSemantic<String>(SEMANTICS_COUNTER++) { @Override public String getData(SemanticTrajectory p, int i) { return DIRECTION.getData(p, i) + "/" + PATH.getData(p, i); } }; public static final BasicSemantic<Number> SPATIAL_X = new BasicSemantic<>(SEMANTICS_COUNTER++, new NumberDistance()); public static final BasicSemantic<Number> SPATIAL_Y = new BasicSemantic<>(SEMANTICS_COUNTER++, new NumberDistance()); private boolean onlyStops; private String pointsTable; private String moveTable; private String stopTable; private Boolean normalized; public GeolifeUniversityDatabaseReader(boolean onlyStops) { this(onlyStops, false); } public GeolifeUniversityDatabaseReader(boolean onlyStops, boolean withTransportation) { this.onlyStops = onlyStops; stopTable = "stops_moves.geolife_inside_university_stop_5_pois"; moveTable = "stops_moves.geolife_inside_university_move_5_pois"; pointsTable = "geolife.geolife_inside_university_5_pois"; } public GeolifeUniversityDatabaseReader(boolean onlyStops, String stopTable, String moveTable, String pointsTable) { this(onlyStops, stopTable, moveTable, pointsTable, Boolean.FALSE); } public GeolifeUniversityDatabaseReader(boolean onlyStops, String stopTable, String moveTable, String pointsTable, Boolean normalized) { this.onlyStops = onlyStops; this.stopTable = stopTable; this.moveTable = moveTable; this.pointsTable = pointsTable; this.normalized = normalized; } public List<SemanticTrajectory> read() { try { DataSource source = new DataSource("postgres", "postgres", "localhost", 5432, "postgis", DataSourceType.PGSQL, "stops_moves.geolife_with_pois_university_stop", null, null); DataRetriever retriever = source.getRetriever(); System.out.println("Executing SQL..."); Connection conn = retriever.getConnection(); List<SemanticTrajectory> ret = null; try { conn.setAutoCommit(false); Statement st = conn.createStatement(); st.setFetchSize(1000); // ResultSet stopsData = st.executeQuery( "SELECT stop_id, start_lat, start_lon, begin, end_lat, end_lon, length, centroid_lat, " + // "centroid_lon, start_time, end_time, street, \"POI\" " + // "FROM " + stopTable); Map<Integer, Stop> stops = new HashMap<>(); while (stopsData.next()) { int stopId = stopsData.getInt("stop_id"); Stop stop = stops.get(stopId); if (stop == null) { stop = new Stop(stopId, stopsData.getString("POI"), // stopsData.getTimestamp("start_time").getTime(), // stopsData.getTimestamp("end_time").getTime(), // new TPoint(stopsData.getDouble("start_lat"), stopsData.getDouble("start_lon")), // stopsData.getInt("begin"), // new TPoint(stopsData.getDouble("end_lat"), stopsData.getDouble("end_lon")), // stopsData.getInt("length"), // new TPoint(stopsData.getDouble("centroid_lat"), stopsData.getDouble("centroid_lon")), // stopsData.getString("POI"), // stopsData.getString("street")// ); stops.put(stopId, stop); } } Map<Integer, Move> moves = new HashMap<>(); ResultSet movesData = st .executeQuery("SELECT move_id, start_time, start_stop_id, begin, end_time, end_stop_id, length " + // "FROM " + moveTable); while (movesData.next()) { int moveId = movesData.getInt("move_id"); Move move = moves.get(moveId); if (move == null) { int startStopId = movesData.getInt("start_stop_id"); if (movesData.wasNull()) { startStopId = -1; } int endStopId = movesData.getInt("end_stop_id"); if (movesData.wasNull()) { endStopId = -1; } move = new Move(moveId, // stops.get(startStopId), // stops.get(endStopId), // movesData.getTimestamp("start_time").getTime(), // movesData.getTimestamp("end_time").getTime(), // movesData.getInt("begin"), // movesData.getInt("length"), // null); moves.put(moveId, move); } } st.close(); List<Move> usedMoves = new ArrayList<Move>(); if (onlyStops) { ret = readStopsTrajectories(null, conn, stops, moves, usedMoves); } else { ret = readRawPoints(null, conn, stops, moves); } compute(usedMoves); } finally { conn.close(); } if(false) { Multimap<String, SemanticTrajectory> trajs = MultimapBuilder.hashKeys().arrayListValues().build(); for (SemanticTrajectory semanticTrajectory : ret) { trajs.put(PATH_WITH_DIRECTION.getData(semanticTrajectory, 0), semanticTrajectory); } Map<String, Collection<SemanticTrajectory>> asMap = trajs.asMap(); List<String> toResize = asMap.entrySet().stream().filter(entry -> entry.getValue().size() > 100).map(Map.Entry::getKey).collect(Collectors.toList()); List<String> toRemove = asMap.entrySet().stream().filter(entry -> entry.getValue().size() < 1).map(Map.Entry::getKey).collect(Collectors.toList()); ret = new ArrayList<>(); for (Map.Entry<String, Collection<SemanticTrajectory>> entry : asMap.entrySet()) { if(!toRemove.contains(entry.getKey())) { List<SemanticTrajectory> t = new ArrayList<>(entry.getValue()); if(!toResize.contains(entry.getKey())) { ret.addAll(t); } else { ret.addAll(t.stream().skip(100).collect(Collectors.toList())); } } } } return ret; } catch (Exception e) { throw new RuntimeException(e); } } public List<Stop> exportStops(String... zones) throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { DataSource source = new DataSource("postgres", "postgres", "localhost", 5432, "lehmann", DataSourceType.PGSQL, "stops_moves.geolife_with_pois_university_stop", null, null); DataRetriever retriever = source.getRetriever(); System.out.println("Executing SQL..."); // Connection conn = retriever.getConnection(); conn.setAutoCommit(false); String sql = "SELECT stop_id, start_lat, start_lon, begin, end_lat, end_lon, length, centroid_lat, " + // "centroid_lon, start_time, end_time, street " + // "FROM " + stopTable; if(zones != null && zones.length > 0) { sql += " where stop_id in (select semantic_stop_id from " + pointsTable + " where trim(\"POI\") in (SELECT * FROM unnest(?))) "; } PreparedStatement st = conn.prepareStatement(sql); st.setFetchSize(1000); if(zones != null && zones.length > 0) { Array array = conn.createArrayOf("varchar", zones); st.setArray(1, array); } ResultSet stopsData = st.executeQuery(); Map<Integer, Stop> stops = new HashMap<>(); while (stopsData.next()) { int stopId = stopsData.getInt("stop_id"); Stop stop = stops.get(stopId); if (stop == null) { stop = new Stop(stopId, null, // stopsData.getTimestamp("start_time").getTime(), // stopsData.getTimestamp("end_time").getTime(), // new TPoint(stopsData.getDouble("start_lat"), stopsData.getDouble("start_lon")), // stopsData.getInt("begin"), // new TPoint(stopsData.getDouble("end_lat"), stopsData.getDouble("end_lon")), // stopsData.getInt("length"), // new TPoint(stopsData.getDouble("centroid_lat"), stopsData.getDouble("centroid_lon")),// stopsData.getString("street")// ); stops.put(stopId, stop); } } return new ArrayList<>(stops.values()); } private List<SemanticTrajectory> readStopsTrajectories(String[] zones, Connection conn, Map<Integer, Stop> stops, Map<Integer, Move> moves, List<Move> usedMoves) throws SQLException { String transportationColumn = "'NONE'"; // String sql = "select tid, gid, time, lat," + " lon, folder_id as user_id, " + transportationColumn + " as transporationMode, \"POI\", semantic_stop_id, semantic_move_id, path, direction " + "from " + pointsTable// + " where 1=1 ";// if(zones != null && zones.length > 0) { sql += "and POI in (SELECT * FROM unnest(?)) "; } // sql += "and (direction, path) in (select direction, path from "+ pointsTable + " group by direction, path having count(distinct tid) > 3) "; // // sql += " and tid in (16302," + // "15474," + // "13671," + // "13862," + // "12764," + // "21675" + // ")"; // sql += "order by tid, time, gid"; PreparedStatement preparedStatement = conn.prepareStatement(sql); if(zones != null && zones.length > 0) { Array array = conn.createArrayOf("varchar", zones); preparedStatement.setArray(1, array); } ResultSet data = preparedStatement.executeQuery(); Multimap<Integer, GeolifeRecord> records = MultimapBuilder.hashKeys().linkedListValues().build(); System.out.println("Fetching..."); while(data.next()) { Integer stop = data.getInt("semantic_stop_id"); if(data.wasNull()) { stop = null; } Integer move = data.getInt("semantic_move_id"); if(data.wasNull()) { move = null; } GeolifeRecord record = new GeolifeRecord( data.getInt("tid"), data.getInt("gid"), data.getTimestamp("time"), data.getDouble("lon"), data.getDouble("lat"), data.getInt("user_id"), data.getString("transporationMode"), data.getString("POI"), data.getString("path"), data.getString("direction"), stop, move ); records.put(record.getTid(), record); } System.out.printf("Loaded %d GPS points from database\n", records.size()); System.out.printf("Loaded %d trajectories from database\n", records.keySet().size()); List<SemanticTrajectory> ret = new ArrayList<>(); Set<Integer> keys = records.keySet(); DescriptiveStatistics stats = new DescriptiveStatistics(); for (Integer trajId : keys) { SemanticTrajectory s = new SemanticTrajectory(trajId, SEMANTICS_COUNTER); Collection<GeolifeRecord> collection = records.get(trajId); int i = 0; for (GeolifeRecord record : collection) { TPoint point = new TPoint(record.getLatitude(), record.getLongitude(), record.getTime()); if(record.getSemanticStop() != null) { Stop stop = stops.get(record.getSemanticStop()); if(stop == null) { throw new RuntimeException("Stop does not found"); } stop.addPoint(point); if(i > 0) { if(STOP_CENTROID_SEMANTIC.getData(s, i - 1) == stop) { continue; } Stop previousStop = STOP_CENTROID_SEMANTIC.getData(s, i - 1); if(previousStop != null && previousStop.getNextMove() == null) { Move move = new Move(-1, previousStop, stop, previousStop.getEndTime(), stop.getStartTime(), stop.getBegin() - 1, 0, new TPoint[0], Angle.getAngle(previousStop.getEndPoint(), stop.getStartPoint()), Distance.getDistance(new TPoint[] {previousStop.getEndPoint(), stop.getStartPoint()}, DISTANCE_FUNCTION)); previousStop.setNextMove(move); stop.setPreviousMove(move); } } s.addData(i, STOP_CENTROID_SEMANTIC, stop); s.addData(i, Semantic.TEMPORAL, new TemporalDuration(Instant.ofEpochMilli(stop.getStartTime()), Instant.ofEpochMilli(stop.getEndTime()))); s.addData(i, Semantic.GID, record.getGid()); s.addData(i, SPATIAL_X, stop.getCentroid().getX()); s.addData(i, SPATIAL_Y, stop.getCentroid().getY()); s.addData(i, Semantic.SPATIAL_LATLON, stop.getCentroid()); s.addData(i, USER_ID, record.getUserId()); s.addData(i, REGION_INTEREST, record.getPOI()); s.addData(i, PATH, record.getPath()); s.addData(i, DIRECTION, record.getDirection()); s.addData(i, PATH_WITH_DIRECTION, PATH_WITH_DIRECTION.getData(s, i)); stop.setRegion(record.getPOI()); i++; } else if(record.getSemanticMoveId() != null) { Move move = moves.get(record.getSemanticMoveId()); if(move == null) { throw new RuntimeException("Move does not found"); } if(!usedMoves.contains(move)) { usedMoves.add(move); } move.getStart().setNextMove(move); move.getEnd().setPreviousMove(move); TPoint[] points = (TPoint[]) move.getAttribute(AttributeType.MOVE_POINTS); List<TPoint> a = new ArrayList<TPoint>(points == null ? Collections.emptyList() : Arrays.asList(points)); if(a.isEmpty()) { a.add(point); } else { TPoint tPoint = a.get(a.size() - 1); if(tPoint.getTime() + (SAMPLING_RATE * 1000) < record.getTime().getTime()) { a.add(point); } } move.setAttribute(AttributeType.MOVE_POINTS, a.toArray(new TPoint[a.size()])); move.setAttribute(AttributeType.TRAJECTORY, s); } } stats.addValue(s.length()); ret.add(s); } System.out.printf("Semantic Trajectories statistics: mean - %.2f, min - %.2f, max - %.2f, sd - %.2f\n", stats.getMean(), stats.getMin(), stats.getMax(), stats.getStandardDeviation()); return ret; } private List<SemanticTrajectory> readRawPoints(String[] zones, Connection conn, Map<Integer, Stop> stops, Map<Integer, Move> moves) throws SQLException { String transportationColumn = "'NONE'"; // String sql = "select tid, gid, time, lon, lat, folder_id as user_id, " + transportationColumn + " as transportationMode, \"POI\", semantic_stop_id, semantic_move_id, path, direction " + "from " + pointsTable// + " where 1=1 ";// if(zones != null && zones.length > 0) { sql += "and POI in (SELECT * FROM unnest(?)) "; } // sql += "and (direction, path) in (select direction, path from "+ pointsTable + " group by direction, path having count(distinct tid) > 3) "; // sql += "order by tid, time, gid"; PreparedStatement preparedStatement = conn.prepareStatement(sql); if(zones != null && zones.length > 0) { Array array = conn.createArrayOf("varchar", zones); preparedStatement.setArray(1, array); } ResultSet data = preparedStatement.executeQuery(); Multimap<Integer, GeolifeRecord> records = MultimapBuilder.hashKeys().linkedListValues().build(); System.out.println("Fetching..."); while(data.next()) { Integer stop = data.getInt("semantic_stop_id"); if(data.wasNull()) { stop = null; } Integer move = data.getInt("semantic_move_id"); if(data.wasNull()) { move = null; } GeolifeRecord record = new GeolifeRecord( data.getInt("tid"), data.getInt("gid"), data.getTimestamp("time"), data.getDouble("lon"), data.getDouble("lat"), data.getInt("user_id"), data.getString("transportationMode"), data.getString("POI"), data.getString("path"), data.getString("direction"), stop, move ); records.put(record.getTid(), record); } System.out.printf("Loaded %d GPS points from database\n", records.size()); System.out.printf("Loaded %d trajectories from database\n", records.keySet().size()); List<SemanticTrajectory> ret = new ArrayList<>(); Set<Integer> keys = records.keySet(); DescriptiveStatistics stats = new DescriptiveStatistics(); for (Integer trajId : keys) { SemanticTrajectory s = new SemanticTrajectory(trajId, SEMANTICS_COUNTER); Collection<GeolifeRecord> collection = records.get(trajId); Map<String, double[]> dimensionValues = new HashMap<>(); dimensionValues.put("x", new double[collection.size()]); dimensionValues.put("y", new double[collection.size()]); int j = 0; for (Iterator iterator = collection.iterator(); iterator.hasNext();j++) { GeolifeRecord record = (GeolifeRecord) iterator.next(); dimensionValues.get("x")[j] = record.getLongitude(); dimensionValues.get("y")[j] = record.getLatitude(); } if(normalized) { Math.standardize(dimensionValues.get("x")); Math.standardize(dimensionValues.get("y")); } int i = 0; for (GeolifeRecord record : collection) { if(i > 0) { TPoint p = Semantic.SPATIAL.getData(s, i - 1); if(p.getTime() + (SAMPLING_RATE * 1000) > record.getTime().getTime()) { continue; } } s.addData(i, Semantic.GID, record.getGid()); double x = dimensionValues.get("x")[i]; double y = dimensionValues.get("y")[i]; TPoint point = new TPoint(x, y, record.getTime()); s.addData(i, Semantic.SPATIAL, point); s.addData(i, Semantic.TEMPORAL, new TemporalDuration(Instant.ofEpochMilli(record.getTime().getTime()), Instant.ofEpochMilli(record.getTime().getTime()))); s.addData(i, SPATIAL_X, point.getX()); s.addData(i, SPATIAL_Y, point.getY()); s.addData(i, USER_ID, record.getUserId()); s.addData(i, TRANSPORTATION_MODE, record.getTransportationMode()); s.addData(i, REGION_INTEREST, record.getPOI()); s.addData(i, DIRECTION, record.getDirection()); s.addData(i, PATH, record.getPath()); s.addData(i, PATH_WITH_DIRECTION, PATH_WITH_DIRECTION.getData(s, i)); if(record.getSemanticStop() != null) { Stop stop = stops.get(record.getSemanticStop()); s.addData(i, STOP_CENTROID_SEMANTIC, stop); stop.setRegion(record.getPOI()); } if(record.getSemanticMoveId() != null) { Move move = moves.get(record.getSemanticMoveId()); TPoint[] points = (TPoint[]) move.getAttribute(AttributeType.MOVE_POINTS); List<TPoint> a = new ArrayList<TPoint>(points == null ? Collections.emptyList() : Arrays.asList(points)); a.add(point); move.setAttribute(AttributeType.MOVE_POINTS, a.toArray(new TPoint[a.size()])); s.addData(i, MOVE_ANGLE_SEMANTIC, move); } i++; } stats.addValue(s.length()); ret.add(s); } System.out.printf("Semantic Trajectories statistics: mean - %.2f, min - %.2f, max - %.2f, sd - %.2f\n", stats.getMean(), stats.getMin(), stats.getMax(), stats.getStandardDeviation()); return ret; } private void compute(Collection<Move> moves) { for (Move move : moves) { List<TPoint> points = new ArrayList<>(); if(move.getStart() != null) { points.add(move.getStart().getEndPoint()); } if(move.getPoints() != null) { points.addAll(Arrays.asList(move.getPoints())); } if(move.getEnd() != null) { points.add(move.getEnd().getStartPoint()); } move.setAttribute(AttributeType.MOVE_ANGLE, Angle.getAngle(points.get(0), points.get(points.size() - 1))); double distance = Distance.getDistance(points.toArray(new TPoint[points.size()]), DISTANCE_FUNCTION); move.setAttribute(AttributeType.MOVE_TRAVELLED_DISTANCE, distance); } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vcs.changes; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.NotNullComputable; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.AbstractVcs; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.FileStatus; import com.intellij.openapi.vcs.ProjectLevelVcsManager; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.Navigatable; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.intellij.vcsUtil.VcsUtil; import gnu.trove.THashSet; import gnu.trove.TObjectHashingStrategy; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.util.*; import java.util.stream.Stream; import static com.intellij.util.containers.ContainerUtil.newArrayList; import static com.intellij.util.containers.ContainerUtil.newTroveSet; import static java.util.stream.Collectors.*; /** * @author max */ public class ChangesUtil { private static final Key<Boolean> INTERNAL_OPERATION_KEY = Key.create("internal vcs operation"); public static final TObjectHashingStrategy<FilePath> FILE_PATH_BY_PATH_ONLY_HASHING_STRATEGY = new TObjectHashingStrategy<FilePath>() { @Override public int computeHashCode(@NotNull FilePath path) { return path.getPath().hashCode(); } @Override public boolean equals(@NotNull FilePath path1, @NotNull FilePath path2) { return StringUtil.equals(path1.getPath(), path2.getPath()); } }; private ChangesUtil() {} @NotNull public static FilePath getFilePath(@NotNull Change change) { ContentRevision revision = change.getAfterRevision(); if (revision == null) { revision = change.getBeforeRevision(); assert revision != null; } return revision.getFile(); } @Nullable public static FilePath getBeforePath(@NotNull Change change) { ContentRevision revision = change.getBeforeRevision(); return revision == null ? null : revision.getFile(); } @Nullable public static FilePath getAfterPath(@NotNull Change change) { ContentRevision revision = change.getAfterRevision(); return revision == null ? null : revision.getFile(); } @Nullable public static AbstractVcs getVcsForChange(@NotNull Change change, @NotNull Project project) { AbstractVcs result = ChangeListManager.getInstance(project).getVcsFor(change); return result != null ? result : ProjectLevelVcsManager.getInstance(project).getVcsFor(getFilePath(change)); } @NotNull public static Set<AbstractVcs> getAffectedVcses(@NotNull Collection<Change> changes, @NotNull Project project) { return ContainerUtil.map2SetNotNull(changes, change -> getVcsForChange(change, project)); } @Nullable public static AbstractVcs getVcsForFile(@NotNull VirtualFile file, @NotNull Project project) { return ProjectLevelVcsManager.getInstance(project).getVcsFor(file); } @Nullable public static AbstractVcs getVcsForFile(@NotNull File file, @NotNull Project project) { return ProjectLevelVcsManager.getInstance(project).getVcsFor(VcsUtil.getFilePath(file)); } @NotNull public static List<FilePath> getPaths(@NotNull Collection<Change> changes) { THashSet<FilePath> distinctPaths = getAllPaths(changes.stream()) .collect(toCollection(() -> newTroveSet(FILE_PATH_BY_PATH_ONLY_HASHING_STRATEGY))); return newArrayList(distinctPaths); } @NotNull public static List<File> getIoFilesFromChanges(@NotNull Collection<Change> changes) { return getAllPaths(changes.stream()) .map(FilePath::getIOFile) .distinct() .collect(toList()); } @NotNull public static Stream<FilePath> getAllPaths(@NotNull Stream<Change> changes) { return changes .flatMap(change -> Stream.of(getBeforePath(change), getAfterPath(change))) .filter(Objects::nonNull); } /** * @deprecated Use {@link ChangesUtil#getAfterRevisionsFiles(Stream)}. */ @SuppressWarnings("unused") // Required for compatibility with external plugins. @Deprecated @NotNull public static VirtualFile[] getFilesFromChanges(@NotNull Collection<Change> changes) { return getAfterRevisionsFiles(changes.stream()).toArray(VirtualFile[]::new); } @NotNull public static Stream<VirtualFile> getAfterRevisionsFiles(@NotNull Stream<Change> changes) { return getAfterRevisionsFiles(changes, false); } @NotNull public static Stream<VirtualFile> getAfterRevisionsFiles(@NotNull Stream<Change> changes, boolean refresh) { LocalFileSystem fileSystem = LocalFileSystem.getInstance(); return changes .map(Change::getAfterRevision) .filter(Objects::nonNull) .map(ContentRevision::getFile) .map(path -> refresh ? fileSystem.refreshAndFindFileByPath(path.getPath()) : path.getVirtualFile()) .filter(Objects::nonNull) .filter(VirtualFile::isValid); } @NotNull public static Navigatable[] getNavigatableArray(@NotNull Project project, @NotNull VirtualFile[] files) { return getNavigatableArray(project, Stream.of(files)); } @NotNull public static Navigatable[] getNavigatableArray(@NotNull Project project, @NotNull Stream<VirtualFile> files) { return files .filter(file -> !file.isDirectory()) .map(file -> new OpenFileDescriptor(project, file)) .toArray(Navigatable[]::new); } @Nullable public static ChangeList getChangeListIfOnlyOne(@NotNull Project project, @Nullable Change[] changes) { ChangeListManager manager = ChangeListManager.getInstance(project); String changeListName = manager.getChangeListNameIfOnlyOne(changes); return changeListName == null ? null : manager.findChangeList(changeListName); } public static FilePath getCommittedPath(@NotNull Project project, FilePath filePath) { // check if the file has just been renamed (IDEADEV-15494) Change change = ChangeListManager.getInstance(project).getChange(filePath); if (change != null) { ContentRevision beforeRevision = change.getBeforeRevision(); ContentRevision afterRevision = change.getAfterRevision(); if (beforeRevision != null && afterRevision != null && !beforeRevision.getFile().equals(afterRevision.getFile()) && afterRevision.getFile().equals(filePath)) { filePath = beforeRevision.getFile(); } } return filePath; } public static FilePath getLocalPath(@NotNull Project project, FilePath filePath) { // check if the file has just been renamed (IDEADEV-15494) Change change = ApplicationManager.getApplication().runReadAction(new Computable<Change>() { @Override @Nullable public Change compute() { if (project.isDisposed()) throw new ProcessCanceledException(); return ChangeListManager.getInstance(project).getChange(filePath); } }); if (change != null) { ContentRevision beforeRevision = change.getBeforeRevision(); ContentRevision afterRevision = change.getAfterRevision(); if (beforeRevision != null && afterRevision != null && !beforeRevision.getFile().equals(afterRevision.getFile()) && beforeRevision.getFile().equals(filePath)) { return afterRevision.getFile(); } } return filePath; } @Nullable public static VirtualFile findValidParentUnderReadAction(@NotNull FilePath path) { VirtualFile file = path.getVirtualFile(); return file != null ? file : getValidParentUnderReadAction(path); } @Nullable public static VirtualFile findValidParentAccurately(@NotNull FilePath filePath) { VirtualFile result = filePath.getVirtualFile(); if (result == null && !ApplicationManager.getApplication().isReadAccessAllowed()) { result = LocalFileSystem.getInstance().refreshAndFindFileByPath(filePath.getPath()); } if (result == null) { result = getValidParentUnderReadAction(filePath); } return result; } @Nullable private static VirtualFile getValidParentUnderReadAction(@NotNull FilePath filePath) { return ApplicationManager.getApplication().runReadAction(new Computable<VirtualFile>() { @Override public VirtualFile compute() { VirtualFile result = null; FilePath parent = filePath; LocalFileSystem lfs = LocalFileSystem.getInstance(); while (result == null && parent != null) { result = lfs.findFileByPath(parent.getPath()); parent = parent.getParentPath(); } return result; } }); } @Nullable public static String getProjectRelativePath(@NotNull Project project, @Nullable File fileName) { if (fileName == null) return null; VirtualFile baseDir = project.getBaseDir(); if (baseDir == null) return fileName.toString(); String relativePath = FileUtil.getRelativePath(VfsUtilCore.virtualToIoFile(baseDir), fileName); if (relativePath != null) return relativePath; return fileName.toString(); } public static boolean isBinaryContentRevision(@Nullable ContentRevision revision) { return revision instanceof BinaryContentRevision && !revision.getFile().isDirectory(); } public static boolean isBinaryChange(@NotNull Change change) { return isBinaryContentRevision(change.getBeforeRevision()) || isBinaryContentRevision(change.getAfterRevision()); } public static boolean isTextConflictingChange(@NotNull Change change) { FileStatus status = change.getFileStatus(); return FileStatus.MERGED_WITH_CONFLICTS.equals(status) || FileStatus.MERGED_WITH_BOTH_CONFLICTS.equals(status); } @FunctionalInterface public interface PerVcsProcessor<T> { void process(@NotNull AbstractVcs vcs, @NotNull List<T> items); } @FunctionalInterface public interface VcsSeparator<T> { @Nullable AbstractVcs getVcsFor(@NotNull T item); } public static <T> void processItemsByVcs(@NotNull Collection<T> items, @NotNull VcsSeparator<T> separator, @NotNull PerVcsProcessor<T> processor) { Map<AbstractVcs, List<T>> changesByVcs = ApplicationManager.getApplication().runReadAction( new NotNullComputable<Map<AbstractVcs, List<T>>>() { @NotNull @Override public Map<AbstractVcs, List<T>> compute() { return items.stream().collect(groupingBy(separator::getVcsFor)); } }); changesByVcs.forEach((vcs, vcsItems) -> { if (vcs != null) { processor.process(vcs, vcsItems); } }); } public static void processChangesByVcs(@NotNull Project project, @NotNull Collection<Change> changes, @NotNull PerVcsProcessor<Change> processor) { processItemsByVcs(changes, change -> getVcsForChange(change, project), processor); } public static void processVirtualFilesByVcs(@NotNull Project project, @NotNull Collection<VirtualFile> files, @NotNull PerVcsProcessor<VirtualFile> processor) { processItemsByVcs(files, file -> getVcsForFile(file, project), processor); } public static void processFilePathsByVcs(@NotNull Project project, @NotNull Collection<FilePath> files, @NotNull PerVcsProcessor<FilePath> processor) { processItemsByVcs(files, filePath -> getVcsForFile(filePath.getIOFile(), project), processor); } @NotNull public static List<File> filePathsToFiles(@NotNull Collection<FilePath> filePaths) { return filePaths.stream() .map(FilePath::getIOFile) .collect(toList()); } public static boolean hasFileChanges(@NotNull Collection<Change> changes) { return changes.stream() .map(ChangesUtil::getFilePath) .anyMatch(path -> !path.isDirectory()); } public static void markInternalOperation(@NotNull Iterable<Change> changes, boolean set) { for (Change change : changes) { VirtualFile file = change.getVirtualFile(); if (file != null) { markInternalOperation(file, set); } } } public static void markInternalOperation(@NotNull VirtualFile file, boolean set) { file.putUserData(INTERNAL_OPERATION_KEY, set); } public static boolean isInternalOperation(@NotNull VirtualFile file) { return Boolean.TRUE.equals(file.getUserData(INTERNAL_OPERATION_KEY)); } /** * Find common ancestor for changes (included both before and after files) */ @Nullable public static File findCommonAncestor(@NotNull Collection<Change> changes) { File ancestor = null; for (Change change : changes) { File currentChangeAncestor = getCommonBeforeAfterAncestor(change); if (currentChangeAncestor == null) return null; if (ancestor == null) { ancestor = currentChangeAncestor; } else { ancestor = FileUtil.findAncestor(ancestor, currentChangeAncestor); if (ancestor == null) return null; } } return ancestor; } @Nullable private static File getCommonBeforeAfterAncestor(@NotNull Change change) { FilePath before = getBeforePath(change); FilePath after = getAfterPath(change); return before == null ? ObjectUtils.assertNotNull(after).getIOFile() : after == null ? before.getIOFile() : FileUtil.findAncestor(before.getIOFile(), after.getIOFile()); } }