gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.api.http.server; import static org.apache.asterix.api.http.servlet.ServletConstants.HYRACKS_CONNECTION_ATTR; import static org.apache.asterix.api.http.servlet.ServletConstants.HYRACKS_DATASET_ATTR; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.concurrent.ConcurrentMap; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.asterix.app.result.ResultReader; import org.apache.asterix.app.result.ResultUtil; import org.apache.asterix.app.translator.QueryTranslator; import org.apache.asterix.common.config.GlobalConfig; import org.apache.asterix.common.context.IStorageComponentProvider; import org.apache.asterix.common.exceptions.AsterixException; import org.apache.asterix.compiler.provider.ILangCompilationProvider; import org.apache.asterix.lang.aql.parser.TokenMgrError; import org.apache.asterix.lang.common.base.IParser; import org.apache.asterix.lang.common.base.IParserFactory; import org.apache.asterix.lang.common.base.Statement; import org.apache.asterix.metadata.MetadataManager; import org.apache.asterix.translator.IStatementExecutor; import org.apache.asterix.translator.IStatementExecutor.ResultDelivery; import org.apache.asterix.translator.IStatementExecutorFactory; import org.apache.asterix.translator.SessionConfig; import org.apache.asterix.translator.SessionConfig.OutputFormat; import org.apache.hyracks.algebricks.core.algebra.prettyprint.AlgebricksAppendable; import org.apache.hyracks.api.client.IHyracksClientConnection; import org.apache.hyracks.api.dataset.IHyracksDataset; import org.apache.hyracks.client.dataset.HyracksDataset; import org.apache.hyracks.http.api.IServletRequest; import org.apache.hyracks.http.api.IServletResponse; import org.apache.hyracks.http.server.AbstractServlet; import org.apache.hyracks.http.server.utils.HttpUtil; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpResponseStatus; public abstract class RestApiServlet extends AbstractServlet { private static final Logger LOGGER = Logger.getLogger(RestApiServlet.class.getName()); private final ILangCompilationProvider compilationProvider; private final IParserFactory parserFactory; private final IStatementExecutorFactory statementExecutorFactory; private final IStorageComponentProvider componentProvider; public RestApiServlet(ConcurrentMap<String, Object> ctx, String[] paths, ILangCompilationProvider compilationProvider, IStatementExecutorFactory statementExecutorFactory, IStorageComponentProvider componentProvider) { super(ctx, paths); this.compilationProvider = compilationProvider; this.parserFactory = compilationProvider.getParserFactory(); this.statementExecutorFactory = statementExecutorFactory; this.componentProvider = componentProvider; } /** * Initialize the Content-Type of the response, and construct a * SessionConfig with the appropriate output writer and output-format * based on the Accept: header and other servlet parameters. */ static SessionConfig initResponse(IServletRequest request, IServletResponse response) throws IOException { HttpUtil.setContentType(response, HttpUtil.ContentType.TEXT_PLAIN, HttpUtil.Encoding.UTF8); // CLEAN_JSON output is the default; most generally useful for a // programmatic HTTP API OutputFormat format = OutputFormat.CLEAN_JSON; // First check the "output" servlet parameter. String output = request.getParameter("output"); String accept = request.getHeader("Accept", ""); if (output != null) { if ("CSV".equals(output)) { format = OutputFormat.CSV; } else if ("ADM".equals(output)) { format = OutputFormat.ADM; } } else { // Second check the Accept: HTTP header. if (accept.contains("application/x-adm")) { format = OutputFormat.ADM; } else if (accept.contains("text/csv")) { format = OutputFormat.CSV; } } // If it's JSON, check for the "lossless" flag if (format == OutputFormat.CLEAN_JSON && ("true".equals(request.getParameter("lossless")) || accept.contains("lossless=true"))) { format = OutputFormat.LOSSLESS_JSON; } SessionConfig.ResultDecorator handlePrefix = (AlgebricksAppendable app) -> app.append("{ \"").append("handle").append("\": "); SessionConfig.ResultDecorator handlePostfix = (AlgebricksAppendable app) -> app.append(" }"); SessionConfig sessionConfig = new SessionConfig(response.writer(), format, null, null, handlePrefix, handlePostfix); // If it's JSON or ADM, check for the "wrapper-array" flag. Default is // "true" for JSON and "false" for ADM. (Not applicable for CSV.) boolean wrapperArray = format == OutputFormat.CLEAN_JSON || format == OutputFormat.LOSSLESS_JSON; String wrapperParam = request.getParameter("wrapper-array"); if (wrapperParam != null) { wrapperArray = Boolean.valueOf(wrapperParam); } else if (accept.contains("wrap-array=true")) { wrapperArray = true; } else if (accept.contains("wrap-array=false")) { wrapperArray = false; } sessionConfig.set(SessionConfig.FORMAT_WRAPPER_ARRAY, wrapperArray); // Now that format is set, output the content-type switch (format) { case ADM: HttpUtil.setContentType(response, "application/x-adm"); break; case CLEAN_JSON: // No need to reflect "clean-ness" in output type; fall through case LOSSLESS_JSON: HttpUtil.setContentType(response, "application/json"); break; case CSV: // Check for header parameter or in Accept:. if ("present".equals(request.getParameter("header")) || accept.contains("header=present")) { HttpUtil.setContentType(response, "text/csv; header=present"); sessionConfig.set(SessionConfig.FORMAT_CSV_HEADER, true); } else { HttpUtil.setContentType(response, "text/csv; header=absent"); } break; default: throw new IOException("Unknown format " + format); } return sessionConfig; } @Override protected void get(IServletRequest request, IServletResponse response) { getOrPost(request, response); } @Override protected void post(IServletRequest request, IServletResponse response) { getOrPost(request, response); } private void getOrPost(IServletRequest request, IServletResponse response) { try { String query = query(request); // enable cross-origin resource sharing response.setHeader("Access-Control-Allow-Origin", "*"); response.setHeader("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept"); SessionConfig sessionConfig = initResponse(request, response); QueryTranslator.ResultDelivery resultDelivery = whichResultDelivery(request); doHandle(response, query, sessionConfig, resultDelivery); } catch (Exception e) { response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR); LOGGER.log(Level.WARNING, "Failure handling request", e); return; } } private void doHandle(IServletResponse response, String query, SessionConfig sessionConfig, ResultDelivery resultDelivery) throws JsonProcessingException { try { response.setStatus(HttpResponseStatus.OK); IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR); IHyracksDataset hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR); if (hds == null) { synchronized (ctx) { hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR); if (hds == null) { hds = new HyracksDataset(hcc, ResultReader.FRAME_SIZE, ResultReader.NUM_READERS); ctx.put(HYRACKS_DATASET_ATTR, hds); } } } IParser parser = parserFactory.createParser(query); List<Statement> aqlStatements = parser.parse(); validate(aqlStatements); MetadataManager.INSTANCE.init(); IStatementExecutor translator = statementExecutorFactory.create(aqlStatements, sessionConfig, compilationProvider, componentProvider); translator.compileAndExecute(hcc, hds, resultDelivery); } catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) { response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR); GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, pe.getMessage(), pe); String errorMessage = ResultUtil.buildParseExceptionMessage(pe, query); ObjectNode errorResp = ResultUtil.getErrorResponse(2, errorMessage, "", ResultUtil.extractFullStackTrace(pe)); sessionConfig.out().write(new ObjectMapper().writeValueAsString(errorResp)); } catch (Exception e) { GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e); response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR); ResultUtil.apiErrorHandler(sessionConfig.out(), e); } } //TODO: Both Get and Post of this API must use the same parameter names private String query(IServletRequest request) { if (request.getHttpRequest().method() == HttpMethod.POST) { return request.getHttpRequest().content().toString(StandardCharsets.UTF_8); } else { return getQueryParameter(request); } } private void validate(List<Statement> aqlStatements) throws AsterixException { for (Statement st : aqlStatements) { if ((st.getCategory() & getAllowedCategories()) == 0) { throw new AsterixException(String.format(getErrorMessage(), st.getKind())); } } } protected QueryTranslator.ResultDelivery whichResultDelivery(IServletRequest request) { String mode = request.getParameter("mode"); if (mode != null) { if ("asynchronous".equals(mode) || "async".equals(mode)) { return QueryTranslator.ResultDelivery.ASYNC; } else if ("asynchronous-deferred".equals(mode) || "deferred".equals(mode)) { return QueryTranslator.ResultDelivery.DEFERRED; } } return QueryTranslator.ResultDelivery.IMMEDIATE; } protected abstract String getQueryParameter(IServletRequest request); protected abstract byte getAllowedCategories(); protected abstract String getErrorMessage(); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: Util.java 468649 2006-10-28 07:00:55Z minchau $ */ package org.apache.xalan.xsltc.compiler.util; import java.util.StringTokenizer; import org.apache.bcel.generic.Type; import org.apache.xalan.xsltc.compiler.Constants; import org.apache.xml.utils.XML11Char; /** * @author Jacek Ambroziak * @author Santiago Pericas-Geertsen */ public final class Util { private static char filesep; static { String temp = System.getProperty("file.separator", "/"); filesep = temp.charAt(0); } public static String noExtName(String name) { final int index = name.lastIndexOf('.'); return name.substring(0, index >= 0 ? index : name.length()); } /** * Search for both slashes in order to support URLs and * files. */ public static String baseName(String name) { int index = name.lastIndexOf('\\'); if (index < 0) { index = name.lastIndexOf('/'); } if (index >= 0) return name.substring(index + 1); else { int lastColonIndex = name.lastIndexOf(':'); if (lastColonIndex > 0) return name.substring(lastColonIndex + 1); else return name; } } /** * Search for both slashes in order to support URLs and * files. */ public static String pathName(String name) { int index = name.lastIndexOf('/'); if (index < 0) { index = name.lastIndexOf('\\'); } return name.substring(0, index + 1); } /** * Replace all illegal Java chars by '_'. */ public static String toJavaName(String name) { if (name.length() > 0) { final StringBuffer result = new StringBuffer(); char ch = name.charAt(0); result.append(Character.isJavaIdentifierStart(ch) ? ch : '_'); final int n = name.length(); for (int i = 1; i < n; i++) { ch = name.charAt(i); result.append(Character.isJavaIdentifierPart(ch) ? ch : '_'); } return result.toString(); } return name; } public static Type getJCRefType(String signature) { return Type.getType(signature); } public static String internalName(String cname) { return cname.replace('.', filesep); } public static void println(String s) { System.out.println(s); } public static void println(char ch) { System.out.println(ch); } public static void TRACE1() { System.out.println("TRACE1"); } public static void TRACE2() { System.out.println("TRACE2"); } public static void TRACE3() { System.out.println("TRACE3"); } /** * Replace a certain character in a string with a new substring. */ public static String replace(String base, char ch, String str) { return (base.indexOf(ch) < 0) ? base : replace(base, String.valueOf(ch), new String[] { str }); } public static String replace(String base, String delim, String[] str) { final int len = base.length(); final StringBuffer result = new StringBuffer(); for (int i = 0; i < len; i++) { final char ch = base.charAt(i); final int k = delim.indexOf(ch); if (k >= 0) { result.append(str[k]); } else { result.append(ch); } } return result.toString(); } /** * Replace occurances of '.', '-', '/' and ':' */ public static String escape(String input) { return replace(input, ".-/:", new String[] { "$dot$", "$dash$", "$slash$", "$colon$" }); } public static String getLocalName(String qname) { final int index = qname.lastIndexOf(":"); return (index > 0) ? qname.substring(index + 1) : qname; } public static String getPrefix(String qname) { final int index = qname.lastIndexOf(":"); return (index > 0) ? qname.substring(0, index) : Constants.EMPTYSTRING; } /** * Checks if the string is a literal (i.e. not an AVT) or not. */ public static boolean isLiteral(String str) { final int length = str.length(); for (int i = 0; i < length - 1; i++) { if (str.charAt(i) == '{' && str.charAt(i + 1) != '{') { return false; } } return true; } /** * Checks if the string is valid list of qnames */ public static boolean isValidQNames(String str) { if ((str != null) && (!str.equals(Constants.EMPTYSTRING))) { final StringTokenizer tokens = new StringTokenizer(str); while (tokens.hasMoreTokens()) { if (!XML11Char.isXML11ValidQName(tokens.nextToken())) { return false; } } } return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.spi.commons.batch; import java.util.Arrays; import javax.jcr.RepositoryException; import org.apache.jackrabbit.spi.Batch; import org.apache.jackrabbit.spi.ItemId; import org.apache.jackrabbit.spi.Name; import org.apache.jackrabbit.spi.NodeId; import org.apache.jackrabbit.spi.PropertyId; import org.apache.jackrabbit.spi.QValue; import org.apache.jackrabbit.spi.Tree; /** * Factory for creating {@link Operation}s. The inner classes of this class * all implement the <code>Operation</code> interface. They are representatives * for the method calls on a {@link Batch}. In addition {@link Empty} represents * the empty operation which does nothing. */ public final class Operations { private Operations() { super(); } // -------------------------------------------------------------< Empty >--- /** * Representative of the empty {@link Operation} which does nothing when * applied to a {@link Batch}. */ public static class Empty implements Operation { private static final Empty INSTANCE = new Empty(); protected Empty() { super(); } /** * This method has no effect. * {@inheritDoc} */ public void apply(Batch batch) throws RepositoryException { /* nothing to do */ } @Override public String toString() { return "Empty[]"; } @Override public boolean equals(Object other) { if (null == other) { return false; } return other instanceof Empty; } @Override public int hashCode() { return Empty.class.hashCode(); } } /** * Factory method for creating an {@link Empty} operation. * @return */ public static Operation empty() { return Empty.INSTANCE; } // -----------------------------------------------------------< AddNode >--- /** * Representative of an add-node {@link Operation} which calls * {@link Batch#addNode(NodeId, Name, Name, String)} when applied to a {@link Batch}. */ public static class AddNode implements Operation { protected final NodeId parentId; protected final Name nodeName; protected final Name nodetypeName; protected final String uuid; /** * Create a new add-node {@link Operation} for the given arguments. * @see Batch#addNode(NodeId, Name, Name, String) * * @param parentId * @param nodeName * @param nodetypeName * @param uuid */ public AddNode(NodeId parentId, Name nodeName, Name nodetypeName, String uuid) { super(); this.parentId = parentId; this.nodeName = nodeName; this.nodetypeName = nodetypeName; this.uuid = uuid; } /** * {@inheritDoc} */ public void apply(Batch batch) throws RepositoryException { batch.addNode(parentId, nodeName, nodetypeName, uuid); } @Override public String toString() { return "AddNode[" + parentId + ", " + nodeName + ", " + nodetypeName + ", " + uuid + "]"; } @Override public boolean equals(Object other) { if (null == other) { return false; } if (this == other) { return true; } if (other instanceof AddNode) { return equals((AddNode) other); } return false; } public boolean equals(AddNode other) { return Operations.equals(parentId, other.parentId) && Operations.equals(nodeName, other.nodeName) && Operations.equals(nodetypeName, other.nodetypeName) && Operations.equals(uuid, other.uuid); } @Override public int hashCode() { return 41 * ( 41 * ( 41 * ( 41 + Operations.hashCode(parentId)) + Operations.hashCode(nodeName)) + Operations.hashCode(nodetypeName)) + Operations.hashCode(uuid); } } /** * Factory method for creating an {@link AddNode} operation. * @see Batch#addNode(NodeId, Name, Name, String) * * @param parentId * @param nodeName * @param nodetypeName * @param uuid * @return */ public static Operation addNode(NodeId parentId, Name nodeName, Name nodetypeName, String uuid) { return new AddNode(parentId, nodeName, nodetypeName, uuid); } // -------------------------------------------------------< AddProperty >--- /** * Representative of an add-property {@link Operation} which calls * {@link Batch#addProperty(NodeId, Name, QValue)} or {@link Batch#addProperty(NodeId, Name, QValue[])} * depending on whether the property is multi valued or not when applied to a {@link Batch}. */ public static class AddProperty implements Operation { protected final NodeId parentId; protected final Name propertyName; protected final QValue[] values; protected final boolean isMultivalued; private AddProperty(NodeId parentId, Name propertyName, QValue[] values, boolean isMultivalued) { super(); this.parentId = parentId; this.propertyName = propertyName; this.values = values; this.isMultivalued = isMultivalued; } /** * Create a new add-property {@link Operation} for the given arguments. * @see Batch#addProperty(NodeId, Name, QValue) * * @param parentId * @param propertyName * @param value */ public AddProperty(NodeId parentId, Name propertyName, QValue value) { this(parentId, propertyName, new QValue[] { value }, false); } /** * Create a new add-property {@link Operation} for the given arguments. * @see Batch#addProperty(NodeId, Name, QValue[]) * * @param parentId * @param propertyName * @param values */ public AddProperty(NodeId parentId, Name propertyName, QValue[] values) { this(parentId, propertyName, values, true); } /** * {@inheritDoc} */ public void apply(Batch batch) throws RepositoryException { if (isMultivalued) { batch.addProperty(parentId, propertyName, values); } else { batch.addProperty(parentId, propertyName, values[0]); } } @Override public String toString() { return "AddProperty[" + parentId + ", " + propertyName + ", " + Arrays.toString(values) + "]"; } @Override public boolean equals(Object other) { if (null == other) { return false; } if (this == other) { return true; } if (other instanceof AddProperty) { return equals((AddProperty) other); } return false; } public boolean equals(AddProperty other) { return Operations.equals(parentId, other.parentId) && Operations.equals(propertyName, other.propertyName) && isMultivalued == other.isMultivalued && Arrays.equals(values, other.values); } @Override public int hashCode() { return 41 * ( 41 * ( 41 + Operations.hashCode(parentId)) + Operations.hashCode(propertyName)) + Operations.hashCode(values); } } /** * Factory method for creating an {@link AddProperty} operation. * * @see Batch#addProperty(NodeId, Name, QValue) * @param parentId * @param propertyName * @param value * @return */ public static Operation addProperty(NodeId parentId, Name propertyName, QValue value) { return new AddProperty(parentId, propertyName, value); } /** * Factory method for creating an {@link AddProperty} operation. * * @see Batch#addProperty(NodeId, Name, QValue[]) * @param parentId * @param propertyName * @param values * @return */ public static Operation addProperty(NodeId parentId, Name propertyName, QValue[] values) { return new AddProperty(parentId, propertyName, values); } // --------------------------------------------------------------< Move >--- /** * Representative of a move {@link Operation} which calls * {@link Batch#move(NodeId, NodeId, Name)} when applied to a {@link Batch}. */ public static class Move implements Operation { protected final NodeId srcNodeId; protected final NodeId destParentNodeId; protected final Name destName; /** * Create a new move {@link Operation} for the given arguments. * * @see Batch#move(NodeId, NodeId, Name) * @param srcNodeId * @param destParentNodeId * @param destName */ public Move(NodeId srcNodeId, NodeId destParentNodeId, Name destName) { super(); this.srcNodeId = srcNodeId; this.destParentNodeId = destParentNodeId; this.destName = destName; } /** * {@inheritDoc} */ public void apply(Batch batch) throws RepositoryException { batch.move(srcNodeId, destParentNodeId, destName); } @Override public String toString() { return "Move[" + srcNodeId + ", " + destParentNodeId + ", " + destName + "]"; } @Override public boolean equals(Object other) { if (null == other) { return false; } if (this == other) { return true; } if (other instanceof Move) { return equals((Move) other); } return false; } public boolean equals(Move other) { return Operations.equals(srcNodeId, other.srcNodeId) && Operations.equals(destParentNodeId, other.destParentNodeId) && Operations.equals(destName, other.destName); } @Override public int hashCode() { return 41 * ( 41 * ( 41 + Operations.hashCode(srcNodeId)) + Operations.hashCode(destParentNodeId)) + Operations.hashCode(destName); } } /** * Factory method for creating a {@link Move} operation. * * @see Batch#move(NodeId, NodeId, Name) * @param srcNodeId * @param destParentNodeId * @param destName * @return */ public static Operation move(NodeId srcNodeId, NodeId destParentNodeId, Name destName) { return new Move(srcNodeId, destParentNodeId, destName); } // ------------------------------------------------------------< Remove >--- /** * Representative of a remove {@link Operation} which calls {@link Batch#remove(ItemId)} when * applied to a {@link Batch}. */ public static class Remove implements Operation { protected final ItemId itemId; /** * Create a new remove {@link Operation} for the given arguments. * * @see Batch#move(NodeId, NodeId, Name) * @param itemId */ public Remove(ItemId itemId) { super(); this.itemId = itemId; } /** * {@inheritDoc} */ public void apply(Batch batch) throws RepositoryException { batch.remove(itemId); } @Override public String toString() { return "Remove[" + itemId + "]"; } @Override public boolean equals(Object other) { if (null == other) { return false; } if (this == other) { return true; } if (other instanceof Remove) { return equals((Remove) other); } return false; } public boolean equals(Remove other) { return Operations.equals(itemId, other.itemId); } @Override public int hashCode() { return 41 + Operations.hashCode(itemId); } } /** * Factory method for creating a {@link Remove} operation. * * @see Batch#move(NodeId, NodeId, Name) * @param itemId * @return */ public static Operation remove(ItemId itemId) { return new Remove(itemId); } // ------------------------------------------------------< ReorderNodes >--- /** * Representative of a reorder-nodes {@link Operation} which calls * {@link Batch#reorderNodes(NodeId, NodeId, NodeId)} when applied to a {@link Batch}. */ public static class ReorderNodes implements Operation { protected final NodeId parentId; protected final NodeId srcNodeId; protected final NodeId beforeNodeId; /** * Create a new reorder-nodes {@link Operation} for the given arguments. * * @see Batch#reorderNodes(NodeId, NodeId, NodeId) * @param parentId * @param srcNodeId * @param beforeNodeId */ public ReorderNodes(NodeId parentId, NodeId srcNodeId, NodeId beforeNodeId) { super(); this.parentId = parentId; this.srcNodeId = srcNodeId; this.beforeNodeId = beforeNodeId; } /** * {@inheritDoc} */ public void apply(Batch batch) throws RepositoryException { batch.reorderNodes(parentId, srcNodeId, beforeNodeId); } @Override public String toString() { return "ReorderNodes[" + parentId + ", " + srcNodeId + ", " + beforeNodeId + "]"; } @Override public boolean equals(Object other) { if (null == other) { return false; } if (this == other) { return true; } if (other instanceof ReorderNodes) { return equals((ReorderNodes) other); } return false; } public boolean equals(ReorderNodes other) { return Operations.equals(parentId, other.parentId) && Operations.equals(srcNodeId, other.srcNodeId) && Operations.equals(beforeNodeId, other.beforeNodeId); } @Override public int hashCode() { return 41 * ( 41 * ( 41 + Operations.hashCode(parentId)) + Operations.hashCode(srcNodeId)) + Operations.hashCode(beforeNodeId); } } /** * Factory method for creating a reorder-nodes {@link Operation} for the given arguments. * * @see Batch#reorderNodes(NodeId, NodeId, NodeId) * @param parentId * @param srcNodeId * @param beforeNodeId * @return */ public static Operation reorderNodes(NodeId parentId, NodeId srcNodeId, NodeId beforeNodeId) { return new ReorderNodes(parentId, srcNodeId, beforeNodeId); } // ---------------------------------------------------------< SetMixins >--- /** * Representative of a set-mixin {@link Operation} which calls * {@link Batch#setMixins(NodeId, Name[])} when applied to a {@link Batch}. */ public static class SetMixins implements Operation { protected final NodeId nodeId; protected final Name[] mixinNodeTypeNames; /** * Create a new set-mixin {@link Operation} for the given arguments. * * @see Batch#setMixins(NodeId, Name[]) * @param nodeId * @param mixinNodeTypeNames */ public SetMixins(NodeId nodeId, Name[] mixinNodeTypeNames) { super(); this.nodeId = nodeId; this.mixinNodeTypeNames = mixinNodeTypeNames; } /** * {@inheritDoc} */ public void apply(Batch batch) throws RepositoryException { batch.setMixins(nodeId, mixinNodeTypeNames); } @Override public String toString() { return "SetMixins[" + nodeId + ", " + Arrays.toString(mixinNodeTypeNames) + "]"; } @Override public boolean equals(Object other) { if (null == other) { return false; } if (this == other) { return true; } if (other instanceof SetMixins) { return equals((SetMixins) other); } return false; } public boolean equals(SetMixins other) { return Operations.equals(nodeId, other.nodeId) && Arrays.equals(mixinNodeTypeNames, other.mixinNodeTypeNames); } @Override public int hashCode() { return 41 * ( 41 + Operations.hashCode(nodeId)) + Operations.hashCode(mixinNodeTypeNames); } } /** * Factory method for creating a set-mixin {@link Operation} for the given arguments. * * @see Batch#setMixins(NodeId, Name[]) * @param nodeId * @param mixinNodeTypeNames * @return */ public static Operation setMixins(NodeId nodeId, Name[] mixinNodeTypeNames) { return new SetMixins(nodeId, mixinNodeTypeNames); } // ----------------------------------------------------< SetPrimaryType >--- /** * Representative of a set-mixin {@link Operation} which calls * {@link Batch#setMixins(NodeId, Name[])} when applied to a {@link Batch}. */ public static class SetPrimaryType implements Operation { protected final NodeId nodeId; protected final Name primaryTypeName; /** * Create a new set-mixin {@link Operation} for the given arguments. * * @see Batch#setMixins(NodeId, Name[]) * @param nodeId * @param primaryTypeName */ public SetPrimaryType(NodeId nodeId, Name primaryTypeName) { super(); this.nodeId = nodeId; this.primaryTypeName = primaryTypeName; } /** * {@inheritDoc} */ public void apply(Batch batch) throws RepositoryException { batch.setPrimaryType(nodeId, primaryTypeName); } @Override public String toString() { return "SetPrimaryType[" + nodeId + ", " + primaryTypeName + "]"; } @Override public boolean equals(Object other) { if (null == other) { return false; } if (this == other) { return true; } if (other instanceof SetPrimaryType) { return equals((SetPrimaryType) other); } return false; } public boolean equals(SetPrimaryType other) { return Operations.equals(nodeId, other.nodeId) && primaryTypeName.equals(other.primaryTypeName); } @Override public int hashCode() { return 41 * ( 41 + Operations.hashCode(nodeId)) + Operations.hashCode(primaryTypeName); } } /** * Factory method for creating a set-primaryType {@link Operation} for the given arguments. * * @see Batch#setPrimaryType(NodeId, Name) * @param nodeId * @param primaryTypeName * @return */ public static Operation setPrimaryType(NodeId nodeId, Name primaryTypeName) { return new SetPrimaryType(nodeId, primaryTypeName); } // ----------------------------------------------------------< SetValue >--- /** * Representative of a set-value {@link Operation} which calls * {@link Batch#setValue(PropertyId, QValue)} or {@link Batch#setValue(PropertyId, QValue[])} * depending on whether the property is multi valued or not when applied to a {@link Batch}. */ public static class SetValue implements Operation { protected final PropertyId propertyId; protected final QValue[] values; protected final boolean isMultivalued; private SetValue(PropertyId propertyId, QValue[] values, boolean isMultivalued) { super(); this.propertyId = propertyId; this.values = values; this.isMultivalued = isMultivalued; } /** * Create a new set-value {@link Operation} for the given arguments. * * @see Batch#setValue(PropertyId, QValue) * @param propertyId * @param value */ public SetValue(PropertyId propertyId, QValue value) { this(propertyId, new QValue[]{ value }, false); } /** * Create a new set-value {@link Operation} for the given arguments. * * @see Batch#setValue(PropertyId, QValue[]) * @param propertyId * @param values */ public SetValue(PropertyId propertyId, QValue[] values) { this(propertyId, values, true); } /** * {@inheritDoc} */ public void apply(Batch batch) throws RepositoryException { if (isMultivalued) { batch.setValue(propertyId, values); } else { batch.setValue(propertyId, values[0]); } } @Override public String toString() { return "SetValue[" + propertyId + ", " + Arrays.toString(values) + "]"; } @Override public boolean equals(Object other) { if (null == other) { return false; } if (this == other) { return true; } if (other instanceof SetValue) { return equals((SetValue) other); } return false; } public boolean equals(SetValue other) { return Operations.equals(propertyId, other.propertyId) && isMultivalued == other.isMultivalued && Arrays.equals(values, other.values); } @Override public int hashCode() { return 41 * ( 41 + Operations.hashCode(propertyId)) + Operations.hashCode(values); } } /** * Factory method for creating set-value {@link Operation} for the given arguments. * * @see Batch#setValue(PropertyId, QValue) * @param propertyId * @param value * @return */ public static Operation setValue(PropertyId propertyId, QValue value) { return new SetValue(propertyId, value); } /** * Factory method for creating a set-value {@link Operation} for the given arguments. * * @see Batch#setValue(PropertyId, QValue[]) * @param propertyId * @param values * @return */ public static Operation setValue(final PropertyId propertyId, final QValue[] values) { return new SetValue(propertyId, values); } // -----------------------------------------------------------< private >--- protected static boolean equals(Object o1, Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } protected static int hashCode(Object o) { return o == null ? 0 : o.hashCode(); } //--------------------------------------------------------------< SetTree >--- public static class SetTree implements Operation { protected final NodeId parentId; protected final Tree tree; public SetTree(NodeId parentId, Tree tree) { super(); this.parentId = parentId; this.tree = tree; } /** * {@inheritDoc} */ public void apply(Batch batch) throws RepositoryException { batch.setTree(parentId, tree); } //----------------------------< Object >--- @Override public String toString() { return "SetTree[" + parentId + ", " + tree+"]"; } @Override public boolean equals(Object other) { if (null == other) { return false; } if (this == other) { return true; } if (other instanceof SetTree) { return equals((SetTree) other); } return false; } public boolean equals(SetTree other) { return Operations.equals(parentId, other.parentId) && Operations.equals(tree, other.tree); } @Override public int hashCode() { return 41 * ( 41 + Operations.hashCode(parentId)) + Operations.hashCode(tree); } } /** * Factory method for creating an {@link SetTree} operation. * @see Batch#addNode(NodeId, Name, Name, String) */ public static Operation setTree(NodeId parentId, Tree contentTree) { return new SetTree(parentId, contentTree); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db; import java.io.IOException; import java.util.function.LongPredicate; import java.util.function.Predicate; import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.*; import org.apache.cassandra.db.filter.*; import org.apache.cassandra.db.monitoring.ApproximateTime; import org.apache.cassandra.db.monitoring.MonitorableImpl; import org.apache.cassandra.db.partitions.*; import org.apache.cassandra.db.rows.*; import org.apache.cassandra.db.transform.StoppingTransformation; import org.apache.cassandra.db.transform.Transformation; import org.apache.cassandra.exceptions.UnknownIndexException; import org.apache.cassandra.index.Index; import org.apache.cassandra.index.IndexNotAvailableException; import org.apache.cassandra.io.IVersionedSerializer; import org.apache.cassandra.io.util.DataInputPlus; import org.apache.cassandra.io.util.DataOutputPlus; import org.apache.cassandra.metrics.TableMetrics; import org.apache.cassandra.net.MessageOut; import org.apache.cassandra.schema.IndexMetadata; import org.apache.cassandra.schema.Schema; import org.apache.cassandra.schema.SchemaConstants; import org.apache.cassandra.schema.TableId; import org.apache.cassandra.schema.TableMetadata; import org.apache.cassandra.service.ClientWarn; import org.apache.cassandra.tracing.Tracing; import org.apache.cassandra.utils.FBUtilities; /** * General interface for storage-engine read commands (common to both range and * single partition commands). * <p> * This contains all the informations needed to do a local read. */ public abstract class ReadCommand extends MonitorableImpl implements ReadQuery { private static final int TEST_ITERATION_DELAY_MILLIS = Integer.parseInt(System.getProperty("cassandra.test.read_iteration_delay_ms", "0")); protected static final Logger logger = LoggerFactory.getLogger(ReadCommand.class); public static final IVersionedSerializer<ReadCommand> serializer = new Serializer(); private final Kind kind; private final TableMetadata metadata; private final int nowInSec; private final ColumnFilter columnFilter; private final RowFilter rowFilter; private final DataLimits limits; private final boolean isDigestQuery; // if a digest query, the version for which the digest is expected. Ignored if not a digest. private int digestVersion; @Nullable private final IndexMetadata index; protected static abstract class SelectionDeserializer { public abstract ReadCommand deserialize(DataInputPlus in, int version, boolean isDigest, int digestVersion, TableMetadata metadata, int nowInSec, ColumnFilter columnFilter, RowFilter rowFilter, DataLimits limits, IndexMetadata index) throws IOException; } protected enum Kind { SINGLE_PARTITION (SinglePartitionReadCommand.selectionDeserializer), PARTITION_RANGE (PartitionRangeReadCommand.selectionDeserializer); private final SelectionDeserializer selectionDeserializer; Kind(SelectionDeserializer selectionDeserializer) { this.selectionDeserializer = selectionDeserializer; } } protected ReadCommand(Kind kind, boolean isDigestQuery, int digestVersion, TableMetadata metadata, int nowInSec, ColumnFilter columnFilter, RowFilter rowFilter, DataLimits limits, IndexMetadata index) { this.kind = kind; this.isDigestQuery = isDigestQuery; this.digestVersion = digestVersion; this.metadata = metadata; this.nowInSec = nowInSec; this.columnFilter = columnFilter; this.rowFilter = rowFilter; this.limits = limits; this.index = index; } protected abstract void serializeSelection(DataOutputPlus out, int version) throws IOException; protected abstract long selectionSerializedSize(int version); public abstract boolean isLimitedToOnePartition(); /** * Creates a new <code>ReadCommand</code> instance with new limits. * * @param newLimits the new limits * @return a new <code>ReadCommand</code> with the updated limits */ public abstract ReadCommand withUpdatedLimit(DataLimits newLimits); /** * The metadata for the table queried. * * @return the metadata for the table queried. */ public TableMetadata metadata() { return metadata; } /** * The time in seconds to use as "now" for this query. * <p> * We use the same time as "now" for the whole query to avoid considering different * values as expired during the query, which would be buggy (would throw of counting amongst other * things). * * @return the time (in seconds) to use as "now". */ public int nowInSec() { return nowInSec; } /** * The configured timeout for this command. * * @return the configured timeout for this command. */ public abstract long getTimeout(); /** * A filter on which (non-PK) columns must be returned by the query. * * @return which columns must be fetched by this query. */ public ColumnFilter columnFilter() { return columnFilter; } /** * Filters/Resrictions on CQL rows. * <p> * This contains the restrictions that are not directly handled by the * {@code ClusteringIndexFilter}. More specifically, this includes any non-PK column * restrictions and can include some PK columns restrictions when those can't be * satisfied entirely by the clustering index filter (because not all clustering columns * have been restricted for instance). If there is 2ndary indexes on the table, * one of this restriction might be handled by a 2ndary index. * * @return the filter holding the expression that rows must satisfy. */ public RowFilter rowFilter() { return rowFilter; } /** * The limits set on this query. * * @return the limits set on this query. */ public DataLimits limits() { return limits; } /** * Whether this query is a digest one or not. * * @return Whether this query is a digest query. */ public boolean isDigestQuery() { return isDigestQuery; } /** * If the query is a digest one, the requested digest version. * * @return the requested digest version if the query is a digest. Otherwise, this can return * anything. */ public int digestVersion() { return digestVersion; } /** * Sets the digest version, for when digest for that command is requested. * <p> * Note that we allow setting this independently of setting the command as a digest query as * this allows us to use the command as a carrier of the digest version even if we only call * setIsDigestQuery on some copy of it. * * @param digestVersion the version for the digest is this command is used for digest query.. * @return this read command. */ public ReadCommand setDigestVersion(int digestVersion) { this.digestVersion = digestVersion; return this; } /** * Index (metadata) chosen for this query. Can be null. * * @return index (metadata) chosen for this query */ @Nullable public IndexMetadata indexMetadata() { return index; } /** * The clustering index filter this command to use for the provided key. * <p> * Note that that method should only be called on a key actually queried by this command * and in practice, this will almost always return the same filter, but for the sake of * paging, the filter on the first key of a range command might be slightly different. * * @param key a partition key queried by this command. * * @return the {@code ClusteringIndexFilter} to use for the partition of key {@code key}. */ public abstract ClusteringIndexFilter clusteringIndexFilter(DecoratedKey key); /** * Returns a copy of this command. * * @return a copy of this command. */ public abstract ReadCommand copy(); /** * Returns a copy of this command with isDigestQuery set to true. */ public abstract ReadCommand copyAsDigestQuery(); protected abstract UnfilteredPartitionIterator queryStorage(ColumnFamilyStore cfs, ReadExecutionController executionController); protected abstract int oldestUnrepairedTombstone(); public ReadResponse createResponse(UnfilteredPartitionIterator iterator) { return isDigestQuery() ? ReadResponse.createDigestResponse(iterator, this) : ReadResponse.createDataResponse(iterator, this); } long indexSerializedSize(int version) { return null != index ? IndexMetadata.serializer.serializedSize(index, version) : 0; } public Index getIndex(ColumnFamilyStore cfs) { return null != index ? cfs.indexManager.getIndex(index) : null; } static IndexMetadata findIndex(TableMetadata table, RowFilter rowFilter) { if (table.indexes.isEmpty() || rowFilter.isEmpty()) return null; ColumnFamilyStore cfs = Keyspace.openAndGetStore(table); Index index = cfs.indexManager.getBestIndexFor(rowFilter); return null != index ? index.getIndexMetadata() : null; } /** * If the index manager for the CFS determines that there's an applicable * 2i that can be used to execute this command, call its (optional) * validation method to check that nothing in this command's parameters * violates the implementation specific validation rules. */ public void maybeValidateIndex() { Index index = getIndex(Keyspace.openAndGetStore(metadata)); if (null != index) index.validate(this); } /** * Executes this command on the local host. * * @param executionController the execution controller spanning this command * * @return an iterator over the result of executing this command locally. */ @SuppressWarnings("resource") // The result iterator is closed upon exceptions (we know it's fine to potentially not close the intermediary // iterators created inside the try as long as we do close the original resultIterator), or by closing the result. public UnfilteredPartitionIterator executeLocally(ReadExecutionController executionController) { long startTimeNanos = System.nanoTime(); ColumnFamilyStore cfs = Keyspace.openAndGetStore(metadata()); Index index = getIndex(cfs); Index.Searcher searcher = null; if (index != null) { if (!cfs.indexManager.isIndexQueryable(index)) throw new IndexNotAvailableException(index); searcher = index.searcherFor(this); Tracing.trace("Executing read on {}.{} using index {}", cfs.metadata.keyspace, cfs.metadata.name, index.getIndexMetadata().name); } UnfilteredPartitionIterator resultIterator = searcher == null ? queryStorage(cfs, executionController) : searcher.search(executionController); try { resultIterator = withStateTracking(resultIterator); resultIterator = withMetricsRecording(withoutPurgeableTombstones(resultIterator, cfs), cfs.metric, startTimeNanos); // If we've used a 2ndary index, we know the result already satisfy the primary expression used, so // no point in checking it again. RowFilter updatedFilter = searcher == null ? rowFilter() : index.getPostIndexQueryFilter(rowFilter()); // TODO: We'll currently do filtering by the rowFilter here because it's convenient. However, // we'll probably want to optimize by pushing it down the layer (like for dropped columns) as it // would be more efficient (the sooner we discard stuff we know we don't care, the less useless // processing we do on it). return limits().filter(updatedFilter.filter(resultIterator, nowInSec()), nowInSec(), selectsFullPartition()); } catch (RuntimeException | Error e) { resultIterator.close(); throw e; } } protected abstract void recordLatency(TableMetrics metric, long latencyNanos); public PartitionIterator executeInternal(ReadExecutionController controller) { return UnfilteredPartitionIterators.filter(executeLocally(controller), nowInSec()); } public ReadExecutionController executionController() { return ReadExecutionController.forCommand(this); } /** * Wraps the provided iterator so that metrics on what is scanned by the command are recorded. * This also log warning/trow TombstoneOverwhelmingException if appropriate. */ private UnfilteredPartitionIterator withMetricsRecording(UnfilteredPartitionIterator iter, final TableMetrics metric, final long startTimeNanos) { class MetricRecording extends Transformation<UnfilteredRowIterator> { private final int failureThreshold = DatabaseDescriptor.getTombstoneFailureThreshold(); private final int warningThreshold = DatabaseDescriptor.getTombstoneWarnThreshold(); private final boolean respectTombstoneThresholds = !SchemaConstants.isLocalSystemKeyspace(ReadCommand.this.metadata().keyspace); private final boolean enforceStrictLiveness = metadata.enforceStrictLiveness(); private int liveRows = 0; private int tombstones = 0; private DecoratedKey currentKey; @Override public UnfilteredRowIterator applyToPartition(UnfilteredRowIterator iter) { currentKey = iter.partitionKey(); return Transformation.apply(iter, this); } @Override public Row applyToStatic(Row row) { return applyToRow(row); } @Override public Row applyToRow(Row row) { boolean hasTombstones = false; for (Cell cell : row.cells()) { if (!cell.isLive(ReadCommand.this.nowInSec())) { countTombstone(row.clustering()); hasTombstones = true; // allows to avoid counting an extra tombstone if the whole row expired } } if (row.hasLiveData(ReadCommand.this.nowInSec(), enforceStrictLiveness)) ++liveRows; else if (!row.primaryKeyLivenessInfo().isLive(ReadCommand.this.nowInSec()) && row.hasDeletion(ReadCommand.this.nowInSec()) && !hasTombstones) { // We're counting primary key deletions only here. countTombstone(row.clustering()); } return row; } @Override public RangeTombstoneMarker applyToMarker(RangeTombstoneMarker marker) { countTombstone(marker.clustering()); return marker; } private void countTombstone(ClusteringPrefix clustering) { ++tombstones; if (tombstones > failureThreshold && respectTombstoneThresholds) { String query = ReadCommand.this.toCQLString(); Tracing.trace("Scanned over {} tombstones for query {}; query aborted (see tombstone_failure_threshold)", failureThreshold, query); metric.tombstoneFailures.inc(); throw new TombstoneOverwhelmingException(tombstones, query, ReadCommand.this.metadata(), currentKey, clustering); } } @Override public void onClose() { recordLatency(metric, System.nanoTime() - startTimeNanos); metric.tombstoneScannedHistogram.update(tombstones); metric.liveScannedHistogram.update(liveRows); boolean warnTombstones = tombstones > warningThreshold && respectTombstoneThresholds; if (warnTombstones) { String msg = String.format( "Read %d live rows and %d tombstone cells for query %1.512s (see tombstone_warn_threshold)", liveRows, tombstones, ReadCommand.this.toCQLString()); ClientWarn.instance.warn(msg); if (tombstones < failureThreshold) { metric.tombstoneWarnings.inc(); } logger.warn(msg); } Tracing.trace("Read {} live rows and {} tombstone cells{}", liveRows, tombstones, (warnTombstones ? " (see tombstone_warn_threshold)" : "")); } }; return Transformation.apply(iter, new MetricRecording()); } protected class CheckForAbort extends StoppingTransformation<UnfilteredRowIterator> { long lastChecked = 0; protected UnfilteredRowIterator applyToPartition(UnfilteredRowIterator partition) { if (maybeAbort()) { partition.close(); return null; } return Transformation.apply(partition, this); } protected Row applyToRow(Row row) { if (TEST_ITERATION_DELAY_MILLIS > 0) maybeDelayForTesting(); return maybeAbort() ? null : row; } private boolean maybeAbort() { /** * The value returned by ApproximateTime.currentTimeMillis() is updated only every * {@link ApproximateTime.CHECK_INTERVAL_MS}, by default 10 millis. Since MonitorableImpl * relies on ApproximateTime, we don't need to check unless the approximate time has elapsed. */ if (lastChecked == ApproximateTime.currentTimeMillis()) return false; lastChecked = ApproximateTime.currentTimeMillis(); if (isAborted()) { stop(); return true; } return false; } private void maybeDelayForTesting() { if (!metadata.keyspace.startsWith("system")) FBUtilities.sleepQuietly(TEST_ITERATION_DELAY_MILLIS); } } protected UnfilteredPartitionIterator withStateTracking(UnfilteredPartitionIterator iter) { return Transformation.apply(iter, new CheckForAbort()); } /** * Creates a message for this command. */ public abstract MessageOut<ReadCommand> createMessage(); protected abstract void appendCQLWhereClause(StringBuilder sb); // Skip purgeable tombstones. We do this because it's safe to do (post-merge of the memtable and sstable at least), it // can save us some bandwith, and avoid making us throw a TombstoneOverwhelmingException for purgeable tombstones (which // are to some extend an artefact of compaction lagging behind and hence counting them is somewhat unintuitive). protected UnfilteredPartitionIterator withoutPurgeableTombstones(UnfilteredPartitionIterator iterator, ColumnFamilyStore cfs) { class WithoutPurgeableTombstones extends PurgeFunction { public WithoutPurgeableTombstones() { super(nowInSec(), cfs.gcBefore(nowInSec()), oldestUnrepairedTombstone(), cfs.getCompactionStrategyManager().onlyPurgeRepairedTombstones(), iterator.metadata().enforceStrictLiveness()); } protected LongPredicate getPurgeEvaluator() { return time -> true; } } return Transformation.apply(iterator, new WithoutPurgeableTombstones()); } /** * Recreate the CQL string corresponding to this query. * <p> * Note that in general the returned string will not be exactly the original user string, first * because there isn't always a single syntax for a given query, but also because we don't have * all the information needed (we know the non-PK columns queried but not the PK ones as internally * we query them all). So this shouldn't be relied too strongly, but this should be good enough for * debugging purpose which is what this is for. */ public String toCQLString() { StringBuilder sb = new StringBuilder(); sb.append("SELECT ").append(columnFilter()); sb.append(" FROM ").append(metadata().keyspace).append('.').append(metadata.name); appendCQLWhereClause(sb); if (limits() != DataLimits.NONE) sb.append(' ').append(limits()); return sb.toString(); } // Monitorable interface public String name() { return toCQLString(); } private static class Serializer implements IVersionedSerializer<ReadCommand> { private static int digestFlag(boolean isDigest) { return isDigest ? 0x01 : 0; } private static boolean isDigest(int flags) { return (flags & 0x01) != 0; } // We don't set this flag anymore, but still look if we receive a // command with it set in case someone is using thrift a mixed 3.0/4.0+ // cluster (which is unsupported). This is also a reminder for not // re-using this flag until we drop 3.0/3.X compatibility (since it's // used by these release for thrift and would thus confuse things) private static boolean isForThrift(int flags) { return (flags & 0x02) != 0; } private static int indexFlag(boolean hasIndex) { return hasIndex ? 0x04 : 0; } private static boolean hasIndex(int flags) { return (flags & 0x04) != 0; } public void serialize(ReadCommand command, DataOutputPlus out, int version) throws IOException { out.writeByte(command.kind.ordinal()); out.writeByte(digestFlag(command.isDigestQuery()) | indexFlag(null != command.indexMetadata())); if (command.isDigestQuery()) out.writeUnsignedVInt(command.digestVersion()); command.metadata.id.serialize(out); out.writeInt(command.nowInSec()); ColumnFilter.serializer.serialize(command.columnFilter(), out, version); RowFilter.serializer.serialize(command.rowFilter(), out, version); DataLimits.serializer.serialize(command.limits(), out, version, command.metadata.comparator); if (null != command.index) IndexMetadata.serializer.serialize(command.index, out, version); command.serializeSelection(out, version); } public ReadCommand deserialize(DataInputPlus in, int version) throws IOException { Kind kind = Kind.values()[in.readByte()]; int flags = in.readByte(); boolean isDigest = isDigest(flags); // Shouldn't happen or it's a user error (see comment above) but // better complain loudly than doing the wrong thing. if (isForThrift(flags)) throw new IllegalStateException("Received a command with the thrift flag set. " + "This means thrift is in use in a mixed 3.0/3.X and 4.0+ cluster, " + "which is unsupported. Make sure to stop using thrift before " + "upgrading to 4.0"); boolean hasIndex = hasIndex(flags); int digestVersion = isDigest ? (int)in.readUnsignedVInt() : 0; TableMetadata metadata = Schema.instance.getExistingTableMetadata(TableId.deserialize(in)); int nowInSec = in.readInt(); ColumnFilter columnFilter = ColumnFilter.serializer.deserialize(in, version, metadata); RowFilter rowFilter = RowFilter.serializer.deserialize(in, version, metadata); DataLimits limits = DataLimits.serializer.deserialize(in, version, metadata.comparator); IndexMetadata index = hasIndex ? deserializeIndexMetadata(in, version, metadata) : null; return kind.selectionDeserializer.deserialize(in, version, isDigest, digestVersion, metadata, nowInSec, columnFilter, rowFilter, limits, index); } private IndexMetadata deserializeIndexMetadata(DataInputPlus in, int version, TableMetadata metadata) throws IOException { try { return IndexMetadata.serializer.deserialize(in, version, metadata); } catch (UnknownIndexException e) { logger.info("Couldn't find a defined index on {}.{} with the id {}. " + "If an index was just created, this is likely due to the schema not " + "being fully propagated. Local read will proceed without using the " + "index. Please wait for schema agreement after index creation.", metadata.keyspace, metadata.name, e.indexId); return null; } } public long serializedSize(ReadCommand command, int version) { return 2 // kind + flags + (command.isDigestQuery() ? TypeSizes.sizeofUnsignedVInt(command.digestVersion()) : 0) + command.metadata.id.serializedSize() + TypeSizes.sizeof(command.nowInSec()) + ColumnFilter.serializer.serializedSize(command.columnFilter(), version) + RowFilter.serializer.serializedSize(command.rowFilter(), version) + DataLimits.serializer.serializedSize(command.limits(), version, command.metadata.comparator) + command.selectionSerializedSize(version) + command.indexSerializedSize(version); } } }
// Copyright 2012 Jason Petersen and Timothy Macdonald // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * Tune.java * * This class represents the musical portion of a simple 4-part hymn. * It does not include the corresponding text. * * @author Spring 2010 */ package model; import io.BadInputException; import java.util.HashMap; import java.util.Iterator; public final class Tune implements HymnComponent { /** The name of this Tune */ private String name; /** The meter for this Tune */ private Meter meter; /** The KeySignature for this Tune */ private KeySignature key; /** The TimeSignature for this Tune */ private TimeSignature time; /** The author of this Tune */ private String author = "Unknown"; /** The year the Tune was written */ private int year = 0; /** The starting beat in the measure (where 0 is the first) */ private double startingBeat = 1; /** The Music for this Tune */ private Music music; /** The melody for this Tune, typically the soprano Voice */ private Part melody = Part.SOPRANO; /** The Refrain for this Tune */ private Refrain chorus; /** Maps line number to a Refrain which should be added at the end of that line */ private HashMap<Integer, Refrain> alleluias = new HashMap<Integer, Refrain>(); /** Determine if the Tune is valid */ private boolean valid; /** * Constructor. * @param name The name of this Tune. * @param meter The meter for this Tune. * @param key The key signature for this Tune. * @param time The time signature for this Tune. * @throws BadInputException The imput was null or the name length was 0. * @author Jason Petersen */ public Tune(String name, Meter meter, KeySignature key, TimeSignature time) throws BadInputException { if(name == null || name.length() == 0 || meter == null || key == null || time == null) throw new BadInputException("Input for Tune cannot be null"); this.name = name; this.meter = meter; this.key = key; this.time = time; this.valid = false; } @Override public String toString() { return this.name; } @Override public Meter getMeter() { return this.meter; } @Override public boolean isValid() { return valid; } @Override public String getFilename() { String alphabet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789"; String name = getName(); String filename = ""; for(char c : name.toCharArray()){ if(alphabet.contains(c + "")) filename += c; } return filename; } @Override public boolean equals(Object other) { if(other == null) return false; if(other instanceof Tune){ Tune tune = (Tune) other; return (tune.name == null ? this.name == null : tune.name.equals(this.name)) && (tune.author == null ? this.author == null : tune.author.equals(this.author)) && tune.year == this.year && tune.startingBeat == this.startingBeat && (tune.meter == null ? this.meter == null : tune.meter.equals(this.meter)) && (tune.key == null ? this.key == null : tune.key.equals(this.key)) && (tune.time == null ? this.time == null : tune.time.equals(this.time)) && (tune.melody == null ? this.melody == null : tune.melody.equals(this.melody)) && (tune.chorus == null ? this.chorus == null : tune.chorus.equals(this.chorus)) && (tune.music == null ? this.music == null : tune.music.equals(this.music)) && (equalAlleluias(tune)); } return false; } /* ---------- Setter methods ---------- */ /** * Set the Music for this Tune * Will not be set if Music is null. * @param music The Music for this Tune. * @author Jason Petersen */ public void setMusic(Music music) { if(music != null) { this.music = music; this.valid = meter.numberOfSyllables() == music.numberOfSyllables() && music.isValid(); } } /** * Set the melody for this Tune. * @param The Part that is the melody for this Tune. * @author Jason Petersen */ public void setMelody(Part part) { if(part != null) this.melody = part; } /** * Set the chorus for this Tune. * Will not be updated if input is null. * @param chorus The Refrain that is the chorus for this Tune. * @author Jason Petersen */ public void setChorus(Refrain chorus) { if(chorus != null) this.chorus = chorus; } /** * Set the author of this Tune. * Will not be updated if input is null. * @param author The author of this Tune. * @author Jason Petersen */ public void setAuthor(String author) { if(author != null && author.length() > 0) this.author = author; } /** * Set the year this Tune was written. * @param year The yaer this Tune was written. * @author Jason Petersen */ public void setYear(int year) { this.year = year; } /** * Set the starting beat for this Tune. * Will not be set if input is null or of length 0. * @param input The starting beat double as a String. * @throws BadInputException The input didn't contain * a valid starting beat. * @author Jason Petersen */ public void setStartingBeat(String input) throws BadInputException { if(input != null && input.length() > 0) { startingBeat = parseStartingBeat(input); } } /** * Add an Refrain at the end of the given line. * @param line The line after which the provided Refrain * should be added. Valid line numbers start at 1 and cannot * exceed the number of lines in the meter for this Tune. * @param alleluia The Refrain to add to the given line. * @throws BadInputException The line number is invalid or * an alleluia has already been added for the given line. * @author Jason Petersen */ public void addAlleluia(int line, Refrain alleluia) throws BadInputException { if(alleluia == null) throw new BadInputException("Alleluia cannot be null"); if(line < 1 || line > meter.numberOfLines()) throw new BadInputException("Alleluia cannot be added to line " + line); if(!alleluias.containsKey(line)) alleluias.put(line, alleluia); else throw new BadInputException("An alleluia already exists for line " + line); } /* ---------- Getter methods ---------- */ /** * Get the name for this Tune. * @return The name for this Tune. * @author Spring 2010 */ public String getName() { return this.name; } /** * Get the Music for this Tune. * May be null. * @return The Music for this Tune. * @author Spring 2010 */ public Music getMusic() { return this.music; } /** * Get the KeySignature for this Tune. * @return The KeySignature for this Tune. * @author Spring 2010 */ public KeySignature getKeySignature() { return this.key; } /** * Get the TimeSignature for this Tune. * @return The TimeSignature for this Tune. * @author Spring 2010 */ public TimeSignature getTimeSignature() { return this.time; } /** * Get the author of this Tune. * Default value is "Unknown". * @return The author of this Tune. * @author Spring 2010 */ public String getAuthor() { return this.author; } /** * Get the year this Tune was written. * Default value is 0. * @return The year the Tune was written. * @author Spring 2010 */ public int getYear() { return this.year; } /** * Get the starting beat for this Tune. * Default value is 1. * @return The starting beat for this Tune. * @author Jason Petersen */ public double getStartingBeat() { return this.startingBeat; } /** * Get the name of melody for this Tune. * @return The name of the melody for this Tune. * @author Jason Petersen */ public Part getMelody() { return this.melody; } /** * Test if this Tune has a chorus. * @return True if the Tune has a chorus, * otherwise false. * @author Jason Petersen */ public boolean hasChorus() { return this.chorus != null; } /** * Get the chorus for this Tune. * May be null. * @return The chorus for this Tune. * @author Jason Petersen */ public Refrain getChorus() { return this.chorus; } /** * Get the alleluia Refrain attached to the given line. * May be null. * @param line The line for which to get the attached Refrain. * @return The Refrain attached to the given line, * null if no Refrain has been added for the line. * @author Jason Petersen */ public Refrain getAlleluiaForLine(int line) { if(alleluias.containsKey(line)) { return alleluias.get(line); } return null; } /* ---------- Helper methods --------- */ /** * Helper method to test if alleluias for another * Tune matches this Tune's alleluias. * @return True if all the alleluias match, otherwise false. * @author Jason Petersen */ private boolean equalAlleluias(Tune other) { boolean toReturn = true; int line = 1; for(Iterator<Integer> m = meter.iterator(); m.hasNext(); m.next()) { Refrain oa = other.getAlleluiaForLine(line); Refrain ta = this.getAlleluiaForLine(line); toReturn = toReturn && (oa == null ? ta == null : oa.equals(ta)); line++; } return toReturn; } /** * Helper method to parse starting beat String input * to a double. * @param input The starting beat double as a String. * @return The String input as a double. * @throws BadInputException The input was not able to be expressed * as [Integer]/[Note.SMALLESTNOTE] or it did not fit the Tune's time signature. * @author Jason Petersen */ private double parseStartingBeat(String input) throws BadInputException { double toReturn = makeStringDouble(input); if(Math.floor(toReturn) != toReturn) { double decimal = toReturn - Math.floor(toReturn); int powTwo = 0; int note = Note.SMALLESTNOTE*2; while(note/2 != 1){ powTwo++; note = note/2; } boolean validBeat = false; while(powTwo > 0) { decimal = decimal*2.0; if(Math.floor(decimal) == decimal) { validBeat = true; break; } powTwo--; } if(!validBeat) throw new BadInputException("The double must be able to be expressed as [Integer]/[" + Note.SMALLESTNOTE + "]"); } if (toReturn < 1 || toReturn > time.getNumBeats()) throw new BadInputException("Invalid Starting Beat - does not fit Time Signature"); return toReturn; } /** * Helper method to create a double from a String. * @param s A String containing the double representation to be parsed. * @return The double value represented by the argument in decimal. * @throws BadInputException The string does not contain a parsable double. * @author Jason Petersen */ private double makeStringDouble(String s) throws BadInputException { try{ return Double.parseDouble(s); } catch (NumberFormatException nfe){ throw new BadInputException(s + " is not a double."); } } }
/* * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.clouddriver.aws.security.config; import com.amazonaws.auth.AWSCredentialsProvider; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.spinnaker.clouddriver.aws.security.AWSAccountInfoLookup; import com.netflix.spinnaker.clouddriver.aws.security.AmazonClientProvider; import com.netflix.spinnaker.clouddriver.aws.security.AmazonCredentials; import com.netflix.spinnaker.clouddriver.aws.security.DefaultAWSAccountInfoLookup; import com.netflix.spinnaker.clouddriver.aws.security.config.CredentialsConfig.Account; import com.netflix.spinnaker.clouddriver.aws.security.config.CredentialsConfig.Region; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.regex.Pattern; import java.util.stream.Collectors; public class CredentialsLoader<T extends AmazonCredentials> { private final AWSCredentialsProvider credentialsProvider; private final AWSAccountInfoLookup awsAccountInfoLookup; private final Map<String, String> templateValues; private final CredentialTranslator<T> credentialTranslator; private final ObjectMapper objectMapper; public CredentialsLoader(AWSCredentialsProvider credentialsProvider, AmazonClientProvider amazonClientProvider, Class<T> credentialsType) { this(credentialsProvider, amazonClientProvider, credentialsType, Collections.<String, String>emptyMap()); } public CredentialsLoader(AWSCredentialsProvider credentialsProvider, AmazonClientProvider amazonClientProvider,Class<T> credentialsType, Map<String, String> templateValues) { this(credentialsProvider, new DefaultAWSAccountInfoLookup(credentialsProvider, amazonClientProvider), credentialsType, templateValues); } public CredentialsLoader(AWSCredentialsProvider credentialsProvider, AWSAccountInfoLookup awsAccountInfoLookup, Class<T> credentialsType) { this(credentialsProvider, awsAccountInfoLookup, credentialsType, Collections.<String, String>emptyMap()); } public CredentialsLoader(AWSCredentialsProvider credentialsProvider, AWSAccountInfoLookup awsAccountInfoLookup, Class<T> credentialsType, Map<String, String> templateValues) { this.credentialsProvider = Objects.requireNonNull(credentialsProvider, "credentialsProvider"); this.awsAccountInfoLookup = awsAccountInfoLookup; this.templateValues = templateValues; this.objectMapper = new ObjectMapper(); this.credentialTranslator = findTranslator(credentialsType, this.objectMapper); } private Lazy<List<Region>> createDefaults(final List<Region> defaults) { return new Lazy<>(new Lazy.Loader<List<Region>>() { @Override public List<Region> get() { if (defaults == null) { return toRegion(awsAccountInfoLookup.listRegions()); } else { List<Region> result = new ArrayList<>(defaults.size()); List<String> toLookup = new ArrayList<>(); for (Region def : defaults) { if (def.getAvailabilityZones() == null || def.getAvailabilityZones().isEmpty()) { toLookup.add(def.getName()); } else { result.add(def); } } if (!toLookup.isEmpty()) { List<Region> resolved = toRegion(awsAccountInfoLookup.listRegions(toLookup)); for (Region region : resolved) { Region fromDefault = find(defaults, region.getName()); if (fromDefault != null) { region.setPreferredZones(fromDefault.getPreferredZones()); region.setDeprecated(fromDefault.getDeprecated()); } } result.addAll(resolved); } return result; } } }); } private List<Region> initRegions(Lazy<List<Region>> defaults, List<Region> toInit) { if (toInit == null) { return defaults.get(); } Map<String, Region> toInitByName = toInit.stream().collect( Collectors.toMap(Region::getName, Function.identity()) ); List<Region> result = new ArrayList<>(toInit.size()); List<String> toLookup = new ArrayList<>(); for (Region r : toInit) { if (r.getAvailabilityZones() == null || r.getAvailabilityZones().isEmpty()) { toLookup.add(r.getName()); } else { result.add(r); } } for (Iterator<String> lookups = toLookup.iterator(); lookups.hasNext(); ) { Region fromDefault = find(defaults.get(), lookups.next()); if (fromDefault != null) { lookups.remove(); result.add(fromDefault); } } if (!toLookup.isEmpty()) { List<Region> resolved = toRegion(awsAccountInfoLookup.listRegions(toLookup)); for (Region region : resolved) { Region src = find(toInit, region.getName()); if (src == null || src.getPreferredZones() == null) { src = find(defaults.get(), region.getName()); } if (src != null) { region.setPreferredZones(src.getPreferredZones()); } } result.addAll(resolved); } // make a clone of all regions such that modifications apply only to this specific instance (and not global defaults) result = result.stream().map(Region::copyOf).collect(Collectors.toList()); for (Region r : result) { Region toInitRegion = toInitByName.get(r.getName()); if (toInitRegion != null && toInitRegion.getDeprecated() != null) { r.setDeprecated(toInitRegion.getDeprecated()); } } return result; } private static Region find(List<Region> src, String name) { if (src != null) { for (Region r : src) { if (r.getName().equals(name)) { return r; } } } return null; } private static List<Region> toRegion(List<AmazonCredentials.AWSRegion> src) { List<Region> result = new ArrayList<>(src.size()); for (AmazonCredentials.AWSRegion r : src) { Region region = new Region(); region.setName(r.getName()); region.setAvailabilityZones(new ArrayList<>(r.getAvailabilityZones())); region.setPreferredZones(new ArrayList<>(r.getPreferredZones())); result.add(region); } return result; } public T load(String accountName) throws Throwable { CredentialsConfig config = new CredentialsConfig(); Account account = new Account(); account.setName(accountName); config.setAccounts(Arrays.asList(account)); List<T> result = load(config); if (result.size() != 1) { throw new IllegalStateException("failed to create account"); } return result.get(0); } public List<T> load(CredentialsConfig source) throws Throwable { final CredentialsConfig config = objectMapper.convertValue(source, CredentialsConfig.class); if (config.getAccounts() == null || config.getAccounts().isEmpty()) { return Collections.emptyList(); } Lazy<List<Region>> defaultRegions = createDefaults(config.getDefaultRegions()); List<T> initializedAccounts = new ArrayList<>(config.getAccounts().size()); for (Account account : config.getAccounts()) { if (account.getAccountId() == null) { if (!credentialTranslator.resolveAccountId()) { throw new IllegalArgumentException("accountId is required and not resolvable for this credentials type"); } account.setAccountId(awsAccountInfoLookup.findAccountId()); } if (account.getEnvironment() == null) { account.setEnvironment(account.getName()); } if (account.getAccountType() == null) { account.setAccountType(account.getName()); } account.setRegions(initRegions(defaultRegions, account.getRegions())); account.setDefaultSecurityGroups(account.getDefaultSecurityGroups() != null ? account.getDefaultSecurityGroups() : config.getDefaultSecurityGroups()); Map<String, String> templateContext = new HashMap<>(templateValues); templateContext.put("name", account.getName()); templateContext.put("accountId", account.getAccountId()); templateContext.put("environment", account.getEnvironment()); templateContext.put("accountType", account.getAccountType()); account.setDefaultKeyPair(templateFirstNonNull(templateContext, account.getDefaultKeyPair(), config.getDefaultKeyPairTemplate())); account.setEdda(templateFirstNonNull(templateContext, account.getEdda(), config.getDefaultEddaTemplate())); account.setFront50(templateFirstNonNull(templateContext, account.getFront50(), config.getDefaultFront50Template())); account.setDiscovery(templateFirstNonNull(templateContext, account.getDiscovery(), config.getDefaultDiscoveryTemplate())); account.setAssumeRole(templateFirstNonNull(templateContext, account.getAssumeRole(), config.getDefaultAssumeRole())); account.setSessionName(templateFirstNonNull(templateContext, account.getSessionName(), config.getDefaultSessionName())); account.setBastionHost(templateFirstNonNull(templateContext, account.getBastionHost(), config.getDefaultBastionHostTemplate())); initializedAccounts.add(credentialTranslator.translate(credentialsProvider, account)); } return initializedAccounts; } private static class Lazy<T> { public static interface Loader<T> { T get(); } private final Loader<T> loader; private final AtomicReference<T> ref = new AtomicReference<>(); public Lazy(Loader<T> loader) { this.loader = loader; } public T get() { if (ref.get() == null) { ref.set(loader.get()); } return ref.get(); } } private static String templateFirstNonNull(Map<String, String> substitutions, String... values) { for (String value : values) { if (value != null) { return StringTemplater.render(value, substitutions); } } return null; } static <T extends AmazonCredentials> CredentialTranslator<T> findTranslator(Class<T> credentialsType, ObjectMapper objectMapper) { return new CopyConstructorTranslator<>(objectMapper, credentialsType); } static interface CredentialTranslator<T extends AmazonCredentials> { Class<T> getCredentialType(); boolean resolveAccountId(); T translate(AWSCredentialsProvider credentialsProvider, Account account) throws Throwable; } static class CopyConstructorTranslator<T extends AmazonCredentials> implements CredentialTranslator<T> { private final ObjectMapper objectMapper; private final Class<T> credentialType; private final Constructor<T> copyConstructor; public CopyConstructorTranslator(ObjectMapper objectMapper, Class<T> credentialType) { this.objectMapper = objectMapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); this.credentialType = credentialType; try { copyConstructor = credentialType.getConstructor(credentialType, AWSCredentialsProvider.class); } catch (NoSuchMethodException nsme) { throw new IllegalArgumentException("Class " + credentialType + " must supply a constructor with " + credentialType + ", " + AWSCredentialsProvider.class + " args."); } } @Override public Class<T> getCredentialType() { return credentialType; } @Override public boolean resolveAccountId() { try { credentialType.getMethod("getAssumeRole"); return false; } catch (NoSuchMethodException nsme) { return true; } } @Override public T translate(AWSCredentialsProvider credentialsProvider, Account account) throws Throwable { T immutableInstance = objectMapper.convertValue(account, credentialType); try { return copyConstructor.newInstance(immutableInstance, credentialsProvider); } catch (InvocationTargetException ite) { throw ite.getTargetException(); } } } static class StringTemplater { public static String render(String template, Map<String, String> substitutions) { String base = template; int iterations = 0; boolean changed = true; while (changed && iterations < 10) { iterations++; String previous = base; for (Map.Entry<String, String> substitution : substitutions.entrySet()) { base = base.replaceAll(Pattern.quote("{{" + substitution.getKey() + "}}"), substitution.getValue()); } changed = !previous.equals(base); } if (changed) { throw new RuntimeException("too many levels of templatery"); } return base; } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.search.aggregations.bucket.terms; import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ParseField; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds; import org.elasticsearch.search.aggregations.bucket.terms.heuristic.JLHScore; import org.elasticsearch.search.aggregations.bucket.terms.heuristic.SignificanceHeuristic; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.io.IOException; import java.util.Map; import java.util.Objects; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationBuilder<SignificantTermsAggregationBuilder> { public static final String NAME = "significant_terms"; public static final ValuesSourceRegistry.RegistryKey<SignificantTermsAggregatorSupplier> REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>(NAME, SignificantTermsAggregatorSupplier.class); static final ParseField BACKGROUND_FILTER = new ParseField("background_filter"); static final TermsAggregator.BucketCountThresholds DEFAULT_BUCKET_COUNT_THRESHOLDS = new TermsAggregator.BucketCountThresholds( 3, 0, 10, -1 ); static final SignificanceHeuristic DEFAULT_SIGNIFICANCE_HEURISTIC = new JLHScore(); private static final ObjectParser<SignificantTermsAggregationBuilder, Void> PARSER = new ObjectParser<>( SignificantTermsAggregationBuilder.NAME, SignificanceHeuristic.class, SignificantTermsAggregationBuilder::significanceHeuristic, null ); static { ValuesSourceAggregationBuilder.declareFields(PARSER, true, true, false); PARSER.declareInt(SignificantTermsAggregationBuilder::shardSize, TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME); PARSER.declareLong(SignificantTermsAggregationBuilder::minDocCount, TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME); PARSER.declareLong(SignificantTermsAggregationBuilder::shardMinDocCount, TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME); PARSER.declareInt(SignificantTermsAggregationBuilder::size, TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME); PARSER.declareString(SignificantTermsAggregationBuilder::executionHint, TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME); PARSER.declareObject( SignificantTermsAggregationBuilder::backgroundFilter, (p, context) -> parseInnerQueryBuilder(p), SignificantTermsAggregationBuilder.BACKGROUND_FILTER ); PARSER.declareField( (b, v) -> b.includeExclude(IncludeExclude.merge(v, b.includeExclude())), IncludeExclude::parseInclude, IncludeExclude.INCLUDE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING ); PARSER.declareField( (b, v) -> b.includeExclude(IncludeExclude.merge(b.includeExclude(), v)), IncludeExclude::parseExclude, IncludeExclude.EXCLUDE_FIELD, ObjectParser.ValueType.STRING_ARRAY ); } public static SignificantTermsAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { return PARSER.parse(parser, new SignificantTermsAggregationBuilder(aggregationName), null); } public static void registerAggregators(ValuesSourceRegistry.Builder builder) { SignificantTermsAggregatorFactory.registerAggregators(builder); } private IncludeExclude includeExclude = null; private String executionHint = null; private QueryBuilder filterBuilder = null; private TermsAggregator.BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(DEFAULT_BUCKET_COUNT_THRESHOLDS); private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC; public SignificantTermsAggregationBuilder(String name) { super(name); } /** * Read from a Stream. */ public SignificantTermsAggregationBuilder(StreamInput in) throws IOException { super(in); bucketCountThresholds = new BucketCountThresholds(in); executionHint = in.readOptionalString(); filterBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); includeExclude = in.readOptionalWriteable(IncludeExclude::new); significanceHeuristic = in.readNamedWriteable(SignificanceHeuristic.class); } protected SignificantTermsAggregationBuilder( SignificantTermsAggregationBuilder clone, AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metadata ) { super(clone, factoriesBuilder, metadata); this.bucketCountThresholds = new BucketCountThresholds(clone.bucketCountThresholds); this.executionHint = clone.executionHint; this.filterBuilder = clone.filterBuilder; this.includeExclude = clone.includeExclude; this.significanceHeuristic = clone.significanceHeuristic; } @Override protected ValuesSourceType defaultValueSourceType() { return CoreValuesSourceType.KEYWORD; } @Override protected SignificantTermsAggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metadata) { return new SignificantTermsAggregationBuilder(this, factoriesBuilder, metadata); } protected AggregationBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { if (filterBuilder != null) { QueryBuilder rewrittenFilter = filterBuilder.rewrite(queryRewriteContext); if (rewrittenFilter != filterBuilder) { SignificantTermsAggregationBuilder rewritten = shallowCopy(factoriesBuilder, metadata); rewritten.backgroundFilter(rewrittenFilter); return rewritten; } } return super.doRewrite(queryRewriteContext); } @Override protected void innerWriteTo(StreamOutput out) throws IOException { bucketCountThresholds.writeTo(out); out.writeOptionalString(executionHint); out.writeOptionalNamedWriteable(filterBuilder); out.writeOptionalWriteable(includeExclude); out.writeNamedWriteable(significanceHeuristic); } @Override protected boolean serializeTargetValueType(Version version) { return true; } public TermsAggregator.BucketCountThresholds getBucketCountThresholds() { return new TermsAggregator.BucketCountThresholds(bucketCountThresholds); } public TermsAggregator.BucketCountThresholds bucketCountThresholds() { return bucketCountThresholds; } public SignificantTermsAggregationBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) { if (bucketCountThresholds == null) { throw new IllegalArgumentException("[bucketCountThresholds] must not be null: [" + name + "]"); } this.bucketCountThresholds = bucketCountThresholds; return this; } /** * Sets the size - indicating how many term buckets should be returned * (defaults to 10) */ public SignificantTermsAggregationBuilder size(int size) { if (size <= 0) { throw new IllegalArgumentException("[size] must be greater than 0. Found [" + size + "] in [" + name + "]"); } bucketCountThresholds.setRequiredSize(size); return this; } /** * Sets the shard_size - indicating the number of term buckets each shard * will return to the coordinating node (the node that coordinates the * search execution). The higher the shard size is, the more accurate the * results are. */ public SignificantTermsAggregationBuilder shardSize(int shardSize) { if (shardSize <= 0) { throw new IllegalArgumentException("[shardSize] must be greater than 0. Found [" + shardSize + "] in [" + name + "]"); } bucketCountThresholds.setShardSize(shardSize); return this; } /** * Set the minimum document count terms should have in order to appear in * the response. */ public SignificantTermsAggregationBuilder minDocCount(long minDocCount) { if (minDocCount < 0) { throw new IllegalArgumentException( "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]" ); } bucketCountThresholds.setMinDocCount(minDocCount); return this; } /** * Set the minimum document count terms should have on the shard in order to * appear in the response. */ public SignificantTermsAggregationBuilder shardMinDocCount(long shardMinDocCount) { if (shardMinDocCount < 0) { throw new IllegalArgumentException( "[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]" ); } bucketCountThresholds.setShardMinDocCount(shardMinDocCount); return this; } /** * Expert: sets an execution hint to the aggregation. */ public SignificantTermsAggregationBuilder executionHint(String executionHint) { this.executionHint = executionHint; return this; } /** * Expert: gets an execution hint to the aggregation. */ public String executionHint() { return executionHint; } public SignificantTermsAggregationBuilder backgroundFilter(QueryBuilder backgroundFilter) { if (backgroundFilter == null) { throw new IllegalArgumentException("[backgroundFilter] must not be null: [" + name + "]"); } this.filterBuilder = backgroundFilter; return this; } public QueryBuilder backgroundFilter() { return filterBuilder; } /** * Set terms to include and exclude from the aggregation results */ public SignificantTermsAggregationBuilder includeExclude(IncludeExclude includeExclude) { this.includeExclude = includeExclude; return this; } /** * Get terms to include and exclude from the aggregation results */ public IncludeExclude includeExclude() { return includeExclude; } public SignificantTermsAggregationBuilder significanceHeuristic(SignificanceHeuristic significanceHeuristic) { if (significanceHeuristic == null) { throw new IllegalArgumentException("[significanceHeuristic] must not be null: [" + name + "]"); } this.significanceHeuristic = significanceHeuristic; return this; } public SignificanceHeuristic significanceHeuristic() { return significanceHeuristic; } @Override public BucketCardinality bucketCardinality() { return BucketCardinality.MANY; } @Override protected ValuesSourceAggregatorFactory innerBuild( AggregationContext context, ValuesSourceConfig config, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder ) throws IOException { SignificanceHeuristic executionHeuristic = significanceHeuristic.rewrite(context); SignificantTermsAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config); return new SignificantTermsAggregatorFactory( name, config, includeExclude, executionHint, filterBuilder, bucketCountThresholds, executionHeuristic, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier ); } @Override protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { bucketCountThresholds.toXContent(builder, params); if (executionHint != null) { builder.field(TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint); } if (filterBuilder != null) { builder.field(BACKGROUND_FILTER.getPreferredName(), filterBuilder); } if (includeExclude != null) { includeExclude.toXContent(builder, params); } significanceHeuristic.toXContent(builder, params); return builder; } @Override public int hashCode() { return Objects.hash(super.hashCode(), bucketCountThresholds, executionHint, filterBuilder, includeExclude, significanceHeuristic); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; SignificantTermsAggregationBuilder other = (SignificantTermsAggregationBuilder) obj; return Objects.equals(bucketCountThresholds, other.bucketCountThresholds) && Objects.equals(executionHint, other.executionHint) && Objects.equals(filterBuilder, other.filterBuilder) && Objects.equals(includeExclude, other.includeExclude) && Objects.equals(significanceHeuristic, other.significanceHeuristic); } @Override public String getType() { return NAME; } @Override protected ValuesSourceRegistry.RegistryKey<?> getRegistryKey() { return REGISTRY_KEY; } }
package com.patys.zs; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.URL; import java.util.ArrayList; import java.util.List; import jxl.Cell; import jxl.Sheet; import jxl.Workbook; import jxl.read.biff.BiffException; import android.app.Activity; import android.app.Dialog; import android.app.ProgressDialog; import android.os.AsyncTask; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.StrictMode; import android.view.Menu; import android.view.MenuItem; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.TextView; public class Zastepstwa extends Activity { private static String file_url = "http://zs.ketrzyn.pl/plan/zast.xls"; private String file_path; public static Boolean downloaded; private ListView excel_view; private ProgressDialog p_dialog; public static final int progress_bar_type = 0; private Handler handler; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_zastepstwa); StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build(); StrictMode.setThreadPolicy(policy); file_path = this.getFilesDir().toString() + "/zast.xls"; downloaded = false; new DownloadFileFromURL().execute(file_url); final File file = new File(file_path); /* handler = new Handler(); handler.post(new Runnable(){ @Override public void run() { if(downloaded == true) System.out.println("Yep dziala"); else System.out.println("nope, shhit"); if(file.exists() == true) System.out.println("Yep dziala plik"); else System.out.println("nope, shhit plik"); // upadte textView here handler.postDelayed(this,500); // set time here to refresh textView } });*/ String excel_full = ""; if(file.exists()) { List<String> excel_dataList; try { excel_dataList = read(); for (int i = 0; i < excel_dataList.size(); i++) { String item = excel_dataList.get(i); if(item.contains("#NEXT")) { excel_full += "\n"; }else { excel_full += item; } } } catch (IOException e) { e.printStackTrace(); } } TextView textView = (TextView) findViewById(R.id.excel_text); textView.setText(excel_full); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.zastepstwa, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } /** * Showing Dialog * */ @Override protected Dialog onCreateDialog(int id) { switch (id) { case progress_bar_type: // we set this to 0 p_dialog = new ProgressDialog(this); p_dialog.setMessage("Syncing with school server ..."); p_dialog.setIndeterminate(true); p_dialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL); p_dialog.setCancelable(true); p_dialog.show(); return p_dialog; default: return null; } } /** * Background Async Task to download file * */ class DownloadFileFromURL extends AsyncTask<String, String, String> { /** * Before starting background thread Show Progress Bar Dialog * */ @Override protected void onPreExecute() { super.onPreExecute(); showDialog(progress_bar_type); } /** * Downloading file in background thread * */ @Override protected String doInBackground(String... sUrl) { InputStream input = null; OutputStream output = null; HttpURLConnection connection = null; try { URL url = new URL(sUrl[0]); connection = (HttpURLConnection) url.openConnection(); connection.connect(); // expect HTTP 200 OK, so we don't mistakenly save error report // instead of the file if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) { return "Server returned HTTP " + connection.getResponseCode() + " " + connection.getResponseMessage(); } // this will be useful to display download percentage // might be -1: server did not report the length int fileLength = connection.getContentLength(); // download the file input = connection.getInputStream(); output = new FileOutputStream("zast.xls"); byte data[] = new byte[4096]; long total = 0; int count; while ((count = input.read(data)) != -1) { // allow canceling with back button if (isCancelled()) { input.close(); return null; } total += count; // publishing the progress.... if (fileLength > 0) // only if total length is known publishProgress(""+(int) (total * 100 / fileLength)); output.write(data, 0, count); } } catch (Exception e) { return e.toString(); } finally { try { if (output != null) output.close(); if (input != null) input.close(); } catch (IOException ignored) { } if (connection != null) connection.disconnect(); } return null; } /** * Updating progress bar * */ protected void onProgressUpdate(String... progress) { // setting progress percentage p_dialog.setProgress(Integer.parseInt(progress[0])); } /** * After completing background task Dismiss the progress dialog * **/ @Override protected void onPostExecute(String file_url) { // dismiss the dialog after the file was downloaded dismissDialog(progress_bar_type); downloaded = true; } } public List<String> read() throws IOException { List<String> resultSet = new ArrayList<String>(); File inputWorkbook = new File(file_path); if(inputWorkbook.exists()){ Workbook w; try { w = Workbook.getWorkbook(inputWorkbook); // Get the first sheet Sheet sheet = w.getSheet(0); // Loop over column and lines for (int j = 0; j < sheet.getRows(); j++) { for (int i = 0; i < sheet.getColumns(); i++) { Cell cel = sheet.getCell(i, j); String contents = cel.getContents(); // ustawianie tekstu if(i == 0) { if(contents == null || contents.length() == 0) resultSet.add(" "); else if(contents.length() < 10) { while(contents.length() != 10) { contents += " "; } } resultSet.add(contents); } else if(i == 1) { if(contents == null || contents.length() == 0) resultSet.add(" "); else if(contents.length() < 15) { while(contents.length() != 15) { contents += " "; } } resultSet.add(contents); } else if(i == 2) { if(contents == null || contents.length() == 0) resultSet.add(" "); else if(contents.length() < 20) { while(contents.length() != 20) { contents += " "; } } resultSet.add(contents); } else if(i == 3) { if(contents == null || contents.length() == 0) resultSet.add(" "); else if(contents.length() < 20) { while(contents.length() != 20) { contents += " "; } } resultSet.add(contents); } else resultSet.add(contents); } resultSet.add("#NEXT"); continue; } } catch (BiffException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } } else { resultSet.add("File not found..!"); } if(resultSet.size()==0){ resultSet.add("Data not found..!"); } return resultSet; } }
package info.novatec.testit.webtester.browser; import static info.novatec.testit.webtester.eventsystem.EventSystem.fireEvent; import java.io.File; import java.io.IOException; import java.net.URL; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.openqa.selenium.Alert; import org.openqa.selenium.By; import org.openqa.selenium.JavascriptExecutor; import org.openqa.selenium.Keys; import org.openqa.selenium.OutputType; import org.openqa.selenium.TakesScreenshot; import org.openqa.selenium.TimeoutException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.support.ui.ExpectedConditions; import org.openqa.selenium.support.ui.WebDriverWait; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import info.novatec.testit.webtester.api.browser.Browser; import info.novatec.testit.webtester.api.browser.BrowserBuilder; import info.novatec.testit.webtester.api.browser.BrowserFactory; import info.novatec.testit.webtester.api.browser.BrowserIdentification; import info.novatec.testit.webtester.api.callbacks.BrowserCallback; import info.novatec.testit.webtester.api.callbacks.BrowserCallbackWithReturnValue; import info.novatec.testit.webtester.api.config.Configuration; import info.novatec.testit.webtester.api.pageobjects.Identification; import info.novatec.testit.webtester.api.pageobjects.PageObjectFactory; import info.novatec.testit.webtester.api.pageobjects.PageObjectList; import info.novatec.testit.webtester.eventsystem.events.browser.AcceptedAlertEvent; import info.novatec.testit.webtester.eventsystem.events.browser.ClosedBrowserEvent; import info.novatec.testit.webtester.eventsystem.events.browser.ClosedWindowEvent; import info.novatec.testit.webtester.eventsystem.events.browser.DeclinedAlertEvent; import info.novatec.testit.webtester.eventsystem.events.browser.ExceptionEvent; import info.novatec.testit.webtester.eventsystem.events.browser.MaximizedWindowEvent; import info.novatec.testit.webtester.eventsystem.events.browser.NavigatedBackwardsEvent; import info.novatec.testit.webtester.eventsystem.events.browser.NavigatedForwardsEvent; import info.novatec.testit.webtester.eventsystem.events.browser.OpenedUrlEvent; import info.novatec.testit.webtester.eventsystem.events.browser.RefreshedPageEvent; import info.novatec.testit.webtester.eventsystem.events.browser.SavedSourceCodeEvent; import info.novatec.testit.webtester.eventsystem.events.browser.SwitchedToDefaultContentEvent; import info.novatec.testit.webtester.eventsystem.events.browser.SwitchedToFrameEvent; import info.novatec.testit.webtester.eventsystem.events.browser.SwitchedToWindowEvent; import info.novatec.testit.webtester.eventsystem.events.browser.TookScreenshotEvent; import info.novatec.testit.webtester.internal.pageobjects.PageObjectModel; import info.novatec.testit.webtester.pageobjects.GenericElement; import info.novatec.testit.webtester.pageobjects.PageObject; import info.novatec.testit.webtester.utils.PageObjectFinder; import info.novatec.testit.webtester.utils.PageObjectFinder.IdentificationFinder; import info.novatec.testit.webtester.utils.PageObjectFinder.TypedFinder; /** * Implements {@link Browser browser} by using a wrapped Selenium * {@link WebDriver web driver}. * <p> * This class can be initialized using the static factory methods: * <ul> * <li><code>WebDriverBrowser.buildForWebDriver(webDriver)</code></li> * <li><code>WebDriverBrowser.forWebDriver(webDriver).withConfiguration(config). * build()</code></li> * </ul> * * @since 0.9.6 */ public class WebDriverBrowser implements Browser { private static final Logger logger = LoggerFactory.getLogger(WebDriverBrowser.class); private PageObjectFactory pageObjectFactory; private Configuration configuration; private final WebDriver webDriver; private final BrowserIdentification identification; /** * Flag to mark if this {@link WebDriverBrowser browser} is closed. */ private boolean closed; /** * Creates a new {@link WebDriverBrowser} instance wrapping the given * {@link WebDriver}. {@link WebDriverBrowser} instantiation is usually done * by a {@link BrowserFactory}. Each new {@link WebDriverBrowser} will be * registered at the {@link BrowserRegistry}. * * @param webDriver the {@link WebDriver} to be wrapped by this * {@link WebDriverBrowser}. * @since 0.9.6 */ protected WebDriverBrowser(WebDriver webDriver) { this.webDriver = webDriver; this.identification = new BrowserIdentification(); BrowserRegistry.registerBrowser(this); } @Override public WebDriverBrowser openDefaultEntryPoint() { return open(configuration.getDefaultApplicationEntryPoint()); } @Override public <T extends PageObject> T openDefaultEntryPoint(Class<T> pageObjectClass) { return open(configuration.getDefaultApplicationEntryPoint(), pageObjectClass); } @Override public WebDriverBrowser open(URL url) { return open(url.toString()); } @Override public <T extends PageObject> T open(URL url, Class<T> pageObjectClass) { return open(url.toString(), pageObjectClass); } @Override public WebDriverBrowser open(final String url) { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { getWebDriver().get(url); fireEvent(new OpenedUrlEvent(browser, url)); } }); return this; } @Override public <T extends PageObject> T open(final String url, final Class<T> pageObjectClass) { return executeAction(new BrowserCallbackWithReturnValue<T>() { @Override public T execute(Browser browser) { getWebDriver().get(url); fireEvent(new OpenedUrlEvent(browser, url)); return browser.create(pageObjectClass); } }); } @Override public <T extends PageObject> T create(Class<T> pageObjectClass) { return pageObjectFactory.create(pageObjectClass, PageObjectModel.forPage(this)); } @Override public void close() { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { if (!closed) { BrowserRegistry.deregisterBrowser(browser); try { fireEvent(new ClosedBrowserEvent(browser)); } finally { getWebDriver().quit(); closed = true; } } } }); } @Override public void closeCurrentWindow() { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { ClosedWindowEvent event = new ClosedWindowEvent(browser); getWebDriver().close(); getWebDriver().switchTo().defaultContent(); fireEvent(event); } }); } @Override public WebDriverBrowser maximizeCurrentWindow() { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { getWebDriver().manage().window().maximize(); fireEvent(new MaximizedWindowEvent(browser)); } }); return this; } @Override public String getPageTitle() { return executeAction(new BrowserCallbackWithReturnValue<String>() { @Override public String execute(Browser browser) { return StringUtils.defaultString(getWebDriver().getTitle()); } }); } @Override public String getUrl() { return executeAction(new BrowserCallbackWithReturnValue<String>() { @Override public String execute(Browser browser) { return getWebDriver().getCurrentUrl(); } }); } @Override public WebDriverBrowser navigateBackwards() { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { getWebDriver().navigate().back(); fireEvent(new NavigatedBackwardsEvent(browser)); } }); return this; } @Override public WebDriverBrowser navigateForwards() { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { getWebDriver().navigate().forward(); fireEvent(new NavigatedForwardsEvent(browser)); } }); return this; } @Override public WebDriverBrowser refresh() { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { getWebDriver().navigate().refresh(); fireEvent(new RefreshedPageEvent(browser)); } }); return this; } @Override public WebDriverBrowser switchToFullScreen() { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { getWebDriver().findElement(By.tagName("html")).sendKeys(Keys.F11); } }); return this; } @Override public WebDriverBrowser acceptAlertIfVisible() { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { if (isAlertVisible()) { Alert alert = getWebDriver().switchTo().alert(); String text = alert.getText(); alert.accept(); fireEvent(new AcceptedAlertEvent(browser, text)); } } }); return this; } @Override public WebDriverBrowser declineAlertIfVisible() { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { if (isAlertVisible()) { Alert alert = getWebDriver().switchTo().alert(); String text = alert.getText(); alert.dismiss(); fireEvent(new DeclinedAlertEvent(browser, text)); } } }); return this; } private boolean isAlertVisible() { try { long timeout = configuration.getWaitTimeout(); long interval = configuration.getWaitInterval(); new WebDriverWait(getWebDriver(), timeout, interval).until(ExpectedConditions.alertIsPresent()); return true; } catch (TimeoutException e) { return false; } } @Override public File takeScreenshot() { File defaultFolder = configuration.getScreenshotFolder(); return takeScreenshot(defaultFolder.getAbsolutePath()); } @Override public File takeScreenshot(final String targetFolder) { return takeScreenshot(targetFolder, String.valueOf(System.currentTimeMillis())); } @Override public File takeScreenshot(final String targetFolder, final String fileNameWithoutSuffix) { return executeAction(new BrowserCallbackWithReturnValue<File>() { @Override public File execute(Browser browser) { if (!(getWebDriver() instanceof TakesScreenshot)) { return null; } TakesScreenshot takesScreenshot = ( TakesScreenshot ) getWebDriver(); File tempScreenshot = takesScreenshot.getScreenshotAs(OutputType.FILE); String fileName = fileNameWithoutSuffix + ".png"; File screenshot = new File(targetFolder, fileName); try { FileUtils.moveFile(tempScreenshot, screenshot); } catch (IOException e) { logger.warn("Exception while creating screenshot, returning null.", e); return null; } fireEvent(new TookScreenshotEvent(browser, screenshot)); return screenshot; } }); } @Override public File saveSourceCode() { File defaultFolder = configuration.getSourceCodeFolder(); return saveSourceCode(defaultFolder.getAbsolutePath()); } @Override public File saveSourceCode(final String targetFolder) { return saveSourceCode(targetFolder, String.valueOf(System.currentTimeMillis())); } @Override public File saveSourceCode(final String targetFolder, final String fileNameWithoutSuffix) { return executeAction(new BrowserCallbackWithReturnValue<File>() { @Override public File execute(Browser browser) { String fileName = fileNameWithoutSuffix + ".html"; File pageSource = new File(targetFolder, fileName); try { FileUtils.write(pageSource, getPageSource()); } catch (IOException e) { logger.warn("Exception while saving page source, returning null.", e); return null; } fireEvent(new SavedSourceCodeEvent(browser, pageSource)); return pageSource; } }); } @Override public String getPageSource() { return getWebDriver().getPageSource(); } @Override public WebDriverBrowser executeJavaScript(String script, PageObject pageObject, Object... parameters) { Object[] parameterArray = new Object[parameters.length + 1]; parameterArray[0] = pageObject.getWebElement(); System.arraycopy(parameters, 0, parameterArray, 1, parameters.length); return executeJavaScript(script, parameterArray); } @Override public WebDriverBrowser executeJavaScript(final String script, final Object... parameters) { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { if (!(getWebDriver() instanceof JavascriptExecutor)) { throw new UnsupportedOperationException("WebDriver does not support JavaScript execution!"); } (( JavascriptExecutor ) getWebDriver()).executeScript(script, parameters); } }); return this; } @Override public WebDriverBrowser setFocusOnFrame(final int index) { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { browser.getWebDriver().switchTo().frame(index); fireEvent(new SwitchedToFrameEvent(browser, index)); } }); return this; } @Override public WebDriverBrowser setFocusOnFrame(final String nameOrId) { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { browser.getWebDriver().switchTo().frame(nameOrId); fireEvent(new SwitchedToFrameEvent(browser, nameOrId)); } }); return this; } @Override public WebDriverBrowser setFocusOnWindow(final String nameOrHandle) { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { browser.getWebDriver().switchTo().window(nameOrHandle); fireEvent(new SwitchedToWindowEvent(browser, nameOrHandle)); } }); return this; } @Override public WebDriverBrowser setFocusOnDefaultContent() { executeAction(new BrowserCallback() { @Override public void execute(Browser browser) { browser.getWebDriver().switchTo().defaultContent(); fireEvent(new SwitchedToDefaultContentEvent(browser)); } }); return this; } @Override public PageObjectFinder finder() { return new PageObjectFinder(this); } @Override public GenericElement find(String cssSelector) { return finder().findGeneric().by(cssSelector); } @Override public PageObjectList<GenericElement> findMany(String cssSelector) { return finder().findGeneric().manyBy(cssSelector); } @Override public IdentificationFinder findBy(Identification identifier) { return finder().findBy(identifier); } @Override public <T extends PageObject> TypedFinder<T> find(Class<T> pageObjectClass) { return finder().find(pageObjectClass); } @Override public WebDriver getWebDriver() { return webDriver; } @Override public BrowserIdentification getIdentification() { return identification; } @SuppressWarnings("PMD.AvoidCatchingGenericException") protected final void executeAction(BrowserCallback callback) { try { callback.execute(this); } catch (RuntimeException e) { fireEvent(new ExceptionEvent(this, e)); throw e; } } @SuppressWarnings("PMD.AvoidCatchingGenericException") protected final <B> B executeAction(BrowserCallbackWithReturnValue<B> callback) { B value; try { value = callback.execute(this); } catch (RuntimeException e) { fireEvent(new ExceptionEvent(this, e)); throw e; } return value; } @Override public PageObjectFactory getPageObjectFactory() { return pageObjectFactory; } public void setPageObjectFactory(PageObjectFactory pageObjectFactory) { this.pageObjectFactory = pageObjectFactory; } @Override public Configuration getConfiguration() { return configuration; } public void setConfiguration(Configuration configuration) { this.configuration = configuration; } /* factories */ /** * Starts the creation of a new {@link WebDriverBrowser browser} by creating * a {@link WebDriverBrowserBuilder builder} using the given * {@link WebDriver web driver} as a starting point. * * @param webDriver the web driver to use * @return the created builder * @since 0.9.6 */ public static BrowserBuilder forWebDriver(WebDriver webDriver) { return new WebDriverBrowserBuilder(webDriver); } /** * Creates a new {@link WebDriverBrowser browser} using the given * {@link WebDriver web driver}. * * @param webDriver the web driver to use * @return the created browser * @since 0.9.6 */ public static Browser buildForWebDriver(WebDriver webDriver) { return forWebDriver(webDriver).build(); } }
package com.palantir.atlasdb.blob.generated; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Set; import java.util.SortedMap; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.function.Supplier; import java.util.stream.Stream; import javax.annotation.Generated; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.MoreObjects; import com.google.common.base.Optional; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Collections2; import com.google.common.collect.ComparisonChain; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import com.google.common.collect.Sets; import com.google.common.hash.Hashing; import com.google.common.primitives.Bytes; import com.google.common.primitives.UnsignedBytes; import com.google.protobuf.InvalidProtocolBufferException; import com.palantir.atlasdb.compress.CompressionUtils; import com.palantir.atlasdb.encoding.PtBytes; import com.palantir.atlasdb.keyvalue.api.BatchColumnRangeSelection; import com.palantir.atlasdb.keyvalue.api.Cell; import com.palantir.atlasdb.keyvalue.api.ColumnRangeSelection; import com.palantir.atlasdb.keyvalue.api.ColumnRangeSelections; import com.palantir.atlasdb.keyvalue.api.ColumnSelection; import com.palantir.atlasdb.keyvalue.api.Namespace; import com.palantir.atlasdb.keyvalue.api.Prefix; import com.palantir.atlasdb.keyvalue.api.RangeRequest; import com.palantir.atlasdb.keyvalue.api.RowResult; import com.palantir.atlasdb.keyvalue.api.TableReference; import com.palantir.atlasdb.keyvalue.impl.Cells; import com.palantir.atlasdb.ptobject.EncodingUtils; import com.palantir.atlasdb.table.api.AtlasDbDynamicMutablePersistentTable; import com.palantir.atlasdb.table.api.AtlasDbMutablePersistentTable; import com.palantir.atlasdb.table.api.AtlasDbNamedMutableTable; import com.palantir.atlasdb.table.api.AtlasDbNamedPersistentSet; import com.palantir.atlasdb.table.api.ColumnValue; import com.palantir.atlasdb.table.api.TypedRowResult; import com.palantir.atlasdb.table.description.ColumnValueDescription.Compression; import com.palantir.atlasdb.table.description.ValueType; import com.palantir.atlasdb.table.generation.ColumnValues; import com.palantir.atlasdb.table.generation.Descending; import com.palantir.atlasdb.table.generation.NamedColumnValue; import com.palantir.atlasdb.transaction.api.AtlasDbConstraintCheckingMode; import com.palantir.atlasdb.transaction.api.ConstraintCheckingTransaction; import com.palantir.atlasdb.transaction.api.ImmutableGetRangesQuery; import com.palantir.atlasdb.transaction.api.Transaction; import com.palantir.common.base.AbortingVisitor; import com.palantir.common.base.AbortingVisitors; import com.palantir.common.base.BatchingVisitable; import com.palantir.common.base.BatchingVisitableView; import com.palantir.common.base.BatchingVisitables; import com.palantir.common.base.Throwables; import com.palantir.common.collect.IterableView; import com.palantir.common.persist.Persistable; import com.palantir.common.persist.Persistable.Hydrator; import com.palantir.common.persist.Persistables; import com.palantir.util.AssertUtils; import com.palantir.util.crypto.Sha256Hash; @Generated("com.palantir.atlasdb.table.description.render.TableRenderer") @SuppressWarnings({"all", "deprecation"}) public final class HotspottyDataStreamMetadataTable implements AtlasDbMutablePersistentTable<HotspottyDataStreamMetadataTable.HotspottyDataStreamMetadataRow, HotspottyDataStreamMetadataTable.HotspottyDataStreamMetadataNamedColumnValue<?>, HotspottyDataStreamMetadataTable.HotspottyDataStreamMetadataRowResult>, AtlasDbNamedMutableTable<HotspottyDataStreamMetadataTable.HotspottyDataStreamMetadataRow, HotspottyDataStreamMetadataTable.HotspottyDataStreamMetadataNamedColumnValue<?>, HotspottyDataStreamMetadataTable.HotspottyDataStreamMetadataRowResult> { private final Transaction t; private final List<HotspottyDataStreamMetadataTrigger> triggers; private final static String rawTableName = "hotspottyData_stream_metadata"; private final TableReference tableRef; private final static ColumnSelection allColumns = getColumnSelection(HotspottyDataStreamMetadataNamedColumn.values()); static HotspottyDataStreamMetadataTable of(Transaction t, Namespace namespace) { return new HotspottyDataStreamMetadataTable(t, namespace, ImmutableList.<HotspottyDataStreamMetadataTrigger>of()); } static HotspottyDataStreamMetadataTable of(Transaction t, Namespace namespace, HotspottyDataStreamMetadataTrigger trigger, HotspottyDataStreamMetadataTrigger... triggers) { return new HotspottyDataStreamMetadataTable(t, namespace, ImmutableList.<HotspottyDataStreamMetadataTrigger>builder().add(trigger).add(triggers).build()); } static HotspottyDataStreamMetadataTable of(Transaction t, Namespace namespace, List<HotspottyDataStreamMetadataTrigger> triggers) { return new HotspottyDataStreamMetadataTable(t, namespace, triggers); } private HotspottyDataStreamMetadataTable(Transaction t, Namespace namespace, List<HotspottyDataStreamMetadataTrigger> triggers) { this.t = t; this.tableRef = TableReference.create(namespace, rawTableName); this.triggers = triggers; } public static String getRawTableName() { return rawTableName; } public TableReference getTableRef() { return tableRef; } public String getTableName() { return tableRef.getQualifiedName(); } public Namespace getNamespace() { return tableRef.getNamespace(); } /** * <pre> * HotspottyDataStreamMetadataRow { * {@literal Long id}; * } * </pre> */ public static final class HotspottyDataStreamMetadataRow implements Persistable, Comparable<HotspottyDataStreamMetadataRow> { private final long id; public static HotspottyDataStreamMetadataRow of(long id) { return new HotspottyDataStreamMetadataRow(id); } private HotspottyDataStreamMetadataRow(long id) { this.id = id; } public long getId() { return id; } public static Function<HotspottyDataStreamMetadataRow, Long> getIdFun() { return new Function<HotspottyDataStreamMetadataRow, Long>() { @Override public Long apply(HotspottyDataStreamMetadataRow row) { return row.id; } }; } public static Function<Long, HotspottyDataStreamMetadataRow> fromIdFun() { return new Function<Long, HotspottyDataStreamMetadataRow>() { @Override public HotspottyDataStreamMetadataRow apply(Long row) { return HotspottyDataStreamMetadataRow.of(row); } }; } @Override public byte[] persistToBytes() { byte[] idBytes = EncodingUtils.encodeSignedVarLong(id); return EncodingUtils.add(idBytes); } public static final Hydrator<HotspottyDataStreamMetadataRow> BYTES_HYDRATOR = new Hydrator<HotspottyDataStreamMetadataRow>() { @Override public HotspottyDataStreamMetadataRow hydrateFromBytes(byte[] __input) { int __index = 0; Long id = EncodingUtils.decodeSignedVarLong(__input, __index); __index += EncodingUtils.sizeOfSignedVarLong(id); return new HotspottyDataStreamMetadataRow(id); } }; @Override public String toString() { return MoreObjects.toStringHelper(getClass().getSimpleName()) .add("id", id) .toString(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } HotspottyDataStreamMetadataRow other = (HotspottyDataStreamMetadataRow) obj; return Objects.equals(id, other.id); } @SuppressWarnings("ArrayHashCode") @Override public int hashCode() { return Objects.hashCode(id); } @Override public int compareTo(HotspottyDataStreamMetadataRow o) { return ComparisonChain.start() .compare(this.id, o.id) .result(); } } public interface HotspottyDataStreamMetadataNamedColumnValue<T> extends NamedColumnValue<T> { /* */ } /** * <pre> * Column value description { * type: com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata; * name: "StreamMetadata" * field { * name: "status" * number: 1 * label: LABEL_REQUIRED * type: TYPE_ENUM * type_name: ".com.palantir.atlasdb.protos.generated.Status" * } * field { * name: "length" * number: 2 * label: LABEL_REQUIRED * type: TYPE_INT64 * } * field { * name: "hash" * number: 3 * label: LABEL_REQUIRED * type: TYPE_BYTES * } * } * </pre> */ public static final class Metadata implements HotspottyDataStreamMetadataNamedColumnValue<com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata> { private final com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata value; public static Metadata of(com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata value) { return new Metadata(value); } private Metadata(com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata value) { this.value = value; } @Override public String getColumnName() { return "metadata"; } @Override public String getShortColumnName() { return "md"; } @Override public com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata getValue() { return value; } @Override public byte[] persistValue() { byte[] bytes = value.toByteArray(); return CompressionUtils.compress(bytes, Compression.NONE); } @Override public byte[] persistColumnName() { return PtBytes.toCachedBytes("md"); } public static final Hydrator<Metadata> BYTES_HYDRATOR = new Hydrator<Metadata>() { @Override public Metadata hydrateFromBytes(byte[] bytes) { bytes = CompressionUtils.decompress(bytes, Compression.NONE); try { return of(com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata.parseFrom(bytes)); } catch (InvalidProtocolBufferException e) { throw Throwables.throwUncheckedException(e); } } }; @Override public String toString() { return MoreObjects.toStringHelper(getClass().getSimpleName()) .add("Value", this.value) .toString(); } } public interface HotspottyDataStreamMetadataTrigger { public void putHotspottyDataStreamMetadata(Multimap<HotspottyDataStreamMetadataRow, ? extends HotspottyDataStreamMetadataNamedColumnValue<?>> newRows); } public static final class HotspottyDataStreamMetadataRowResult implements TypedRowResult { private final RowResult<byte[]> row; public static HotspottyDataStreamMetadataRowResult of(RowResult<byte[]> row) { return new HotspottyDataStreamMetadataRowResult(row); } private HotspottyDataStreamMetadataRowResult(RowResult<byte[]> row) { this.row = row; } @Override public HotspottyDataStreamMetadataRow getRowName() { return HotspottyDataStreamMetadataRow.BYTES_HYDRATOR.hydrateFromBytes(row.getRowName()); } public static Function<HotspottyDataStreamMetadataRowResult, HotspottyDataStreamMetadataRow> getRowNameFun() { return new Function<HotspottyDataStreamMetadataRowResult, HotspottyDataStreamMetadataRow>() { @Override public HotspottyDataStreamMetadataRow apply(HotspottyDataStreamMetadataRowResult rowResult) { return rowResult.getRowName(); } }; } public static Function<RowResult<byte[]>, HotspottyDataStreamMetadataRowResult> fromRawRowResultFun() { return new Function<RowResult<byte[]>, HotspottyDataStreamMetadataRowResult>() { @Override public HotspottyDataStreamMetadataRowResult apply(RowResult<byte[]> rowResult) { return new HotspottyDataStreamMetadataRowResult(rowResult); } }; } public boolean hasMetadata() { return row.getColumns().containsKey(PtBytes.toCachedBytes("md")); } public com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata getMetadata() { byte[] bytes = row.getColumns().get(PtBytes.toCachedBytes("md")); if (bytes == null) { return null; } Metadata value = Metadata.BYTES_HYDRATOR.hydrateFromBytes(bytes); return value.getValue(); } public static Function<HotspottyDataStreamMetadataRowResult, com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata> getMetadataFun() { return new Function<HotspottyDataStreamMetadataRowResult, com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata>() { @Override public com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata apply(HotspottyDataStreamMetadataRowResult rowResult) { return rowResult.getMetadata(); } }; } @Override public String toString() { return MoreObjects.toStringHelper(getClass().getSimpleName()) .add("RowName", getRowName()) .add("Metadata", getMetadata()) .toString(); } } public enum HotspottyDataStreamMetadataNamedColumn { METADATA { @Override public byte[] getShortName() { return PtBytes.toCachedBytes("md"); } }; public abstract byte[] getShortName(); public static Function<HotspottyDataStreamMetadataNamedColumn, byte[]> toShortName() { return new Function<HotspottyDataStreamMetadataNamedColumn, byte[]>() { @Override public byte[] apply(HotspottyDataStreamMetadataNamedColumn namedColumn) { return namedColumn.getShortName(); } }; } } public static ColumnSelection getColumnSelection(Collection<HotspottyDataStreamMetadataNamedColumn> cols) { return ColumnSelection.create(Collections2.transform(cols, HotspottyDataStreamMetadataNamedColumn.toShortName())); } public static ColumnSelection getColumnSelection(HotspottyDataStreamMetadataNamedColumn... cols) { return getColumnSelection(Arrays.asList(cols)); } private static final Map<String, Hydrator<? extends HotspottyDataStreamMetadataNamedColumnValue<?>>> shortNameToHydrator = ImmutableMap.<String, Hydrator<? extends HotspottyDataStreamMetadataNamedColumnValue<?>>>builder() .put("md", Metadata.BYTES_HYDRATOR) .build(); public Map<HotspottyDataStreamMetadataRow, com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata> getMetadatas(Collection<HotspottyDataStreamMetadataRow> rows) { Map<Cell, HotspottyDataStreamMetadataRow> cells = Maps.newHashMapWithExpectedSize(rows.size()); for (HotspottyDataStreamMetadataRow row : rows) { cells.put(Cell.create(row.persistToBytes(), PtBytes.toCachedBytes("md")), row); } Map<Cell, byte[]> results = t.get(tableRef, cells.keySet()); Map<HotspottyDataStreamMetadataRow, com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata> ret = Maps.newHashMapWithExpectedSize(results.size()); for (Entry<Cell, byte[]> e : results.entrySet()) { com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata val = Metadata.BYTES_HYDRATOR.hydrateFromBytes(e.getValue()).getValue(); ret.put(cells.get(e.getKey()), val); } return ret; } public void putMetadata(HotspottyDataStreamMetadataRow row, com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata value) { put(ImmutableMultimap.of(row, Metadata.of(value))); } public void putMetadata(Map<HotspottyDataStreamMetadataRow, com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata> map) { Map<HotspottyDataStreamMetadataRow, HotspottyDataStreamMetadataNamedColumnValue<?>> toPut = Maps.newHashMapWithExpectedSize(map.size()); for (Entry<HotspottyDataStreamMetadataRow, com.palantir.atlasdb.protos.generated.StreamPersistence.StreamMetadata> e : map.entrySet()) { toPut.put(e.getKey(), Metadata.of(e.getValue())); } put(Multimaps.forMap(toPut)); } @Override public void put(Multimap<HotspottyDataStreamMetadataRow, ? extends HotspottyDataStreamMetadataNamedColumnValue<?>> rows) { t.useTable(tableRef, this); t.put(tableRef, ColumnValues.toCellValues(rows)); for (HotspottyDataStreamMetadataTrigger trigger : triggers) { trigger.putHotspottyDataStreamMetadata(rows); } } public void deleteMetadata(HotspottyDataStreamMetadataRow row) { deleteMetadata(ImmutableSet.of(row)); } public void deleteMetadata(Iterable<HotspottyDataStreamMetadataRow> rows) { byte[] col = PtBytes.toCachedBytes("md"); Set<Cell> cells = Cells.cellsWithConstantColumn(Persistables.persistAll(rows), col); t.delete(tableRef, cells); } @Override public void delete(HotspottyDataStreamMetadataRow row) { delete(ImmutableSet.of(row)); } @Override public void delete(Iterable<HotspottyDataStreamMetadataRow> rows) { List<byte[]> rowBytes = Persistables.persistAll(rows); Set<Cell> cells = Sets.newHashSetWithExpectedSize(rowBytes.size()); cells.addAll(Cells.cellsWithConstantColumn(rowBytes, PtBytes.toCachedBytes("md"))); t.delete(tableRef, cells); } public Optional<HotspottyDataStreamMetadataRowResult> getRow(HotspottyDataStreamMetadataRow row) { return getRow(row, allColumns); } public Optional<HotspottyDataStreamMetadataRowResult> getRow(HotspottyDataStreamMetadataRow row, ColumnSelection columns) { byte[] bytes = row.persistToBytes(); RowResult<byte[]> rowResult = t.getRows(tableRef, ImmutableSet.of(bytes), columns).get(bytes); if (rowResult == null) { return Optional.absent(); } else { return Optional.of(HotspottyDataStreamMetadataRowResult.of(rowResult)); } } @Override public List<HotspottyDataStreamMetadataRowResult> getRows(Iterable<HotspottyDataStreamMetadataRow> rows) { return getRows(rows, allColumns); } @Override public List<HotspottyDataStreamMetadataRowResult> getRows(Iterable<HotspottyDataStreamMetadataRow> rows, ColumnSelection columns) { SortedMap<byte[], RowResult<byte[]>> results = t.getRows(tableRef, Persistables.persistAll(rows), columns); List<HotspottyDataStreamMetadataRowResult> rowResults = Lists.newArrayListWithCapacity(results.size()); for (RowResult<byte[]> row : results.values()) { rowResults.add(HotspottyDataStreamMetadataRowResult.of(row)); } return rowResults; } @Override public List<HotspottyDataStreamMetadataNamedColumnValue<?>> getRowColumns(HotspottyDataStreamMetadataRow row) { return getRowColumns(row, allColumns); } @Override public List<HotspottyDataStreamMetadataNamedColumnValue<?>> getRowColumns(HotspottyDataStreamMetadataRow row, ColumnSelection columns) { byte[] bytes = row.persistToBytes(); RowResult<byte[]> rowResult = t.getRows(tableRef, ImmutableSet.of(bytes), columns).get(bytes); if (rowResult == null) { return ImmutableList.of(); } else { List<HotspottyDataStreamMetadataNamedColumnValue<?>> ret = Lists.newArrayListWithCapacity(rowResult.getColumns().size()); for (Entry<byte[], byte[]> e : rowResult.getColumns().entrySet()) { ret.add(shortNameToHydrator.get(PtBytes.toString(e.getKey())).hydrateFromBytes(e.getValue())); } return ret; } } @Override public Multimap<HotspottyDataStreamMetadataRow, HotspottyDataStreamMetadataNamedColumnValue<?>> getRowsMultimap(Iterable<HotspottyDataStreamMetadataRow> rows) { return getRowsMultimapInternal(rows, allColumns); } @Override public Multimap<HotspottyDataStreamMetadataRow, HotspottyDataStreamMetadataNamedColumnValue<?>> getRowsMultimap(Iterable<HotspottyDataStreamMetadataRow> rows, ColumnSelection columns) { return getRowsMultimapInternal(rows, columns); } private Multimap<HotspottyDataStreamMetadataRow, HotspottyDataStreamMetadataNamedColumnValue<?>> getRowsMultimapInternal(Iterable<HotspottyDataStreamMetadataRow> rows, ColumnSelection columns) { SortedMap<byte[], RowResult<byte[]>> results = t.getRows(tableRef, Persistables.persistAll(rows), columns); return getRowMapFromRowResults(results.values()); } private static Multimap<HotspottyDataStreamMetadataRow, HotspottyDataStreamMetadataNamedColumnValue<?>> getRowMapFromRowResults(Collection<RowResult<byte[]>> rowResults) { Multimap<HotspottyDataStreamMetadataRow, HotspottyDataStreamMetadataNamedColumnValue<?>> rowMap = HashMultimap.create(); for (RowResult<byte[]> result : rowResults) { HotspottyDataStreamMetadataRow row = HotspottyDataStreamMetadataRow.BYTES_HYDRATOR.hydrateFromBytes(result.getRowName()); for (Entry<byte[], byte[]> e : result.getColumns().entrySet()) { rowMap.put(row, shortNameToHydrator.get(PtBytes.toString(e.getKey())).hydrateFromBytes(e.getValue())); } } return rowMap; } @Override public Map<HotspottyDataStreamMetadataRow, BatchingVisitable<HotspottyDataStreamMetadataNamedColumnValue<?>>> getRowsColumnRange(Iterable<HotspottyDataStreamMetadataRow> rows, BatchColumnRangeSelection columnRangeSelection) { Map<byte[], BatchingVisitable<Map.Entry<Cell, byte[]>>> results = t.getRowsColumnRange(tableRef, Persistables.persistAll(rows), columnRangeSelection); Map<HotspottyDataStreamMetadataRow, BatchingVisitable<HotspottyDataStreamMetadataNamedColumnValue<?>>> transformed = Maps.newHashMapWithExpectedSize(results.size()); for (Entry<byte[], BatchingVisitable<Map.Entry<Cell, byte[]>>> e : results.entrySet()) { HotspottyDataStreamMetadataRow row = HotspottyDataStreamMetadataRow.BYTES_HYDRATOR.hydrateFromBytes(e.getKey()); BatchingVisitable<HotspottyDataStreamMetadataNamedColumnValue<?>> bv = BatchingVisitables.transform(e.getValue(), result -> { return shortNameToHydrator.get(PtBytes.toString(result.getKey().getColumnName())).hydrateFromBytes(result.getValue()); }); transformed.put(row, bv); } return transformed; } @Override public Iterator<Map.Entry<HotspottyDataStreamMetadataRow, HotspottyDataStreamMetadataNamedColumnValue<?>>> getRowsColumnRange(Iterable<HotspottyDataStreamMetadataRow> rows, ColumnRangeSelection columnRangeSelection, int batchHint) { Iterator<Map.Entry<Cell, byte[]>> results = t.getRowsColumnRange(getTableRef(), Persistables.persistAll(rows), columnRangeSelection, batchHint); return Iterators.transform(results, e -> { HotspottyDataStreamMetadataRow row = HotspottyDataStreamMetadataRow.BYTES_HYDRATOR.hydrateFromBytes(e.getKey().getRowName()); HotspottyDataStreamMetadataNamedColumnValue<?> colValue = shortNameToHydrator.get(PtBytes.toString(e.getKey().getColumnName())).hydrateFromBytes(e.getValue()); return Maps.immutableEntry(row, colValue); }); } @Override public Map<HotspottyDataStreamMetadataRow, Iterator<HotspottyDataStreamMetadataNamedColumnValue<?>>> getRowsColumnRangeIterator(Iterable<HotspottyDataStreamMetadataRow> rows, BatchColumnRangeSelection columnRangeSelection) { Map<byte[], Iterator<Map.Entry<Cell, byte[]>>> results = t.getRowsColumnRangeIterator(tableRef, Persistables.persistAll(rows), columnRangeSelection); Map<HotspottyDataStreamMetadataRow, Iterator<HotspottyDataStreamMetadataNamedColumnValue<?>>> transformed = Maps.newHashMapWithExpectedSize(results.size()); for (Entry<byte[], Iterator<Map.Entry<Cell, byte[]>>> e : results.entrySet()) { HotspottyDataStreamMetadataRow row = HotspottyDataStreamMetadataRow.BYTES_HYDRATOR.hydrateFromBytes(e.getKey()); Iterator<HotspottyDataStreamMetadataNamedColumnValue<?>> bv = Iterators.transform(e.getValue(), result -> { return shortNameToHydrator.get(PtBytes.toString(result.getKey().getColumnName())).hydrateFromBytes(result.getValue()); }); transformed.put(row, bv); } return transformed; } private ColumnSelection optimizeColumnSelection(ColumnSelection columns) { if (columns.allColumnsSelected()) { return allColumns; } return columns; } public BatchingVisitableView<HotspottyDataStreamMetadataRowResult> getAllRowsUnordered() { return getAllRowsUnordered(allColumns); } public BatchingVisitableView<HotspottyDataStreamMetadataRowResult> getAllRowsUnordered(ColumnSelection columns) { return BatchingVisitables.transform(t.getRange(tableRef, RangeRequest.builder() .retainColumns(optimizeColumnSelection(columns)).build()), new Function<RowResult<byte[]>, HotspottyDataStreamMetadataRowResult>() { @Override public HotspottyDataStreamMetadataRowResult apply(RowResult<byte[]> input) { return HotspottyDataStreamMetadataRowResult.of(input); } }); } @Override public List<String> findConstraintFailures(Map<Cell, byte[]> writes, ConstraintCheckingTransaction transaction, AtlasDbConstraintCheckingMode constraintCheckingMode) { return ImmutableList.of(); } @Override public List<String> findConstraintFailuresNoRead(Map<Cell, byte[]> writes, AtlasDbConstraintCheckingMode constraintCheckingMode) { return ImmutableList.of(); } /** * This exists to avoid unused import warnings * {@link AbortingVisitor} * {@link AbortingVisitors} * {@link ArrayListMultimap} * {@link Arrays} * {@link AssertUtils} * {@link AtlasDbConstraintCheckingMode} * {@link AtlasDbDynamicMutablePersistentTable} * {@link AtlasDbMutablePersistentTable} * {@link AtlasDbNamedMutableTable} * {@link AtlasDbNamedPersistentSet} * {@link BatchColumnRangeSelection} * {@link BatchingVisitable} * {@link BatchingVisitableView} * {@link BatchingVisitables} * {@link BiFunction} * {@link Bytes} * {@link Callable} * {@link Cell} * {@link Cells} * {@link Collection} * {@link Collections2} * {@link ColumnRangeSelection} * {@link ColumnRangeSelections} * {@link ColumnSelection} * {@link ColumnValue} * {@link ColumnValues} * {@link ComparisonChain} * {@link Compression} * {@link CompressionUtils} * {@link ConstraintCheckingTransaction} * {@link Descending} * {@link EncodingUtils} * {@link Entry} * {@link EnumSet} * {@link Function} * {@link Generated} * {@link HashMultimap} * {@link HashSet} * {@link Hashing} * {@link Hydrator} * {@link ImmutableGetRangesQuery} * {@link ImmutableList} * {@link ImmutableMap} * {@link ImmutableMultimap} * {@link ImmutableSet} * {@link InvalidProtocolBufferException} * {@link IterableView} * {@link Iterables} * {@link Iterator} * {@link Iterators} * {@link Joiner} * {@link List} * {@link Lists} * {@link Map} * {@link Maps} * {@link MoreObjects} * {@link Multimap} * {@link Multimaps} * {@link NamedColumnValue} * {@link Namespace} * {@link Objects} * {@link Optional} * {@link Persistable} * {@link Persistables} * {@link Prefix} * {@link PtBytes} * {@link RangeRequest} * {@link RowResult} * {@link Set} * {@link Sets} * {@link Sha256Hash} * {@link SortedMap} * {@link Stream} * {@link Supplier} * {@link TableReference} * {@link Throwables} * {@link TimeUnit} * {@link Transaction} * {@link TypedRowResult} * {@link UUID} * {@link UnsignedBytes} * {@link ValueType} */ static String __CLASS_HASH = "ZAmuaTg7KVhrSM8jgiLdFA=="; }
// ======================================================================== // Copyright (c) 2004-2009 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // You may elect to redistribute this code under either of these licenses. // ======================================================================== package org.eclipse.jetty.servlet; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.GenericServlet; import javax.servlet.RequestDispatcher; import javax.servlet.Servlet; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletRequestWrapper; import javax.servlet.ServletResponse; import javax.servlet.ServletResponseWrapper; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponseWrapper; import junit.framework.Assert; import org.eclipse.jetty.server.Dispatcher; import org.eclipse.jetty.server.LocalConnector; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.ContextHandler; import org.eclipse.jetty.server.handler.ContextHandlerCollection; import org.eclipse.jetty.server.handler.ResourceHandler; import org.eclipse.jetty.toolchain.test.MavenTestingUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; public class DispatcherTest { private Server _server; private LocalConnector _connector; private ContextHandlerCollection _contextCollection; private ServletContextHandler _contextHandler; private ResourceHandler _resourceHandler; @Before public void init() throws Exception { _server = new Server(); _server.setSendServerVersion(false); _connector = new LocalConnector(); _contextCollection = new ContextHandlerCollection(); _contextHandler = new ServletContextHandler(); _contextHandler.setContextPath("/context"); _contextCollection.addHandler(_contextHandler); _resourceHandler = new ResourceHandler(); _resourceHandler.setResourceBase(MavenTestingUtils.getTestResourceDir("dispatchResourceTest").getAbsolutePath()); ContextHandler resourceContextHandler = new ContextHandler("/resource"); resourceContextHandler.setHandler(_resourceHandler); _contextCollection.addHandler(resourceContextHandler); _server.setHandler(_contextCollection); _server.addConnector( _connector ); _server.start(); } @After public void destroy() throws Exception { _server.stop(); _server.join(); } @Test public void testForward() throws Exception { _contextHandler.addServlet(ForwardServlet.class, "/ForwardServlet/*"); _contextHandler.addServlet(AssertForwardServlet.class, "/AssertForwardServlet/*"); String expected= "HTTP/1.1 200 OK\r\n"+ "Content-Type: text/html\r\n"+ "Content-Length: 0\r\n"+ "\r\n"; String responses = _connector.getResponses("GET /context/ForwardServlet?do=assertforward&do=more&test=1 HTTP/1.1\n" + "Host: localhost\n\n"); assertEquals(expected, responses); } @Test public void testForwardWithParam() throws Exception { _contextHandler.addServlet(ForwardServlet.class, "/ForwardServlet/*"); _contextHandler.addServlet(EchoURIServlet.class, "/EchoURI/*"); String expected= "HTTP/1.1 200 OK\r\n"+ "Content-Type: text/plain\r\n"+ "Content-Length: 54\r\n"+ "\r\n"+ "/context\r\n"+ "/EchoURI\r\n"+ "/x x\r\n"+ "/context/EchoURI/x%20x;a=1\r\n"; String responses = _connector.getResponses("GET /context/ForwardServlet;ignore=true?do=req.echo&uri=EchoURI%2Fx%2520x%3Ba=1%3Fb=2 HTTP/1.1\n" + "Host: localhost\n\n"); assertEquals(expected, responses); } @Test public void testInclude() throws Exception { _contextHandler.addServlet(IncludeServlet.class, "/IncludeServlet/*"); _contextHandler.addServlet(AssertIncludeServlet.class, "/AssertIncludeServlet/*"); String expected= "HTTP/1.1 200 OK\r\n"+ "Content-Length: 0\r\n"+ "\r\n"; String responses = _connector.getResponses("GET /context/IncludeServlet?do=assertinclude&do=more&test=1 HTTP/1.1\n" + "Host: localhost\n\n"); assertEquals(expected, responses); } @Test public void testForwardThenInclude() throws Exception { _contextHandler.addServlet(ForwardServlet.class, "/ForwardServlet/*"); _contextHandler.addServlet(IncludeServlet.class, "/IncludeServlet/*"); _contextHandler.addServlet(AssertForwardIncludeServlet.class, "/AssertForwardIncludeServlet/*"); String expected= "HTTP/1.1 200 OK\r\n"+ "Content-Length: 0\r\n"+ "\r\n"; String responses = _connector.getResponses("GET /context/ForwardServlet/forwardpath?do=include HTTP/1.1\n" + "Host: localhost\n\n"); assertEquals(expected, responses); } @Test public void testIncludeThenForward() throws Exception { _contextHandler.addServlet(IncludeServlet.class, "/IncludeServlet/*"); _contextHandler.addServlet(ForwardServlet.class, "/ForwardServlet/*"); _contextHandler.addServlet(AssertIncludeForwardServlet.class, "/AssertIncludeForwardServlet/*"); String expected= "HTTP/1.1 200 OK\r\n"+ "Transfer-Encoding: chunked\r\n"+ "\r\n"+ "0\r\n"+ "\r\n"; String responses = _connector.getResponses("GET /context/IncludeServlet/includepath?do=forward HTTP/1.1\n" + "Host: localhost\n\n"); assertEquals(expected, responses); } @Test public void testServletForward() throws Exception { _contextHandler.addServlet(DispatchServletServlet.class, "/dispatch/*"); _contextHandler.addServlet(RogerThatServlet.class, "/roger/*"); String expected= "HTTP/1.1 200 OK\r\n"+ "Content-Length: 11\r\n"+ "\r\n"+ "Roger That!"; String responses = _connector.getResponses("GET /context/dispatch/test?forward=/roger/that HTTP/1.0\n" + "Host: localhost\n\n"); assertEquals(expected, responses); } @Test public void testServletInclude() throws Exception { _contextHandler.addServlet(DispatchServletServlet.class, "/dispatch/*"); _contextHandler.addServlet(RogerThatServlet.class, "/roger/*"); String expected= "HTTP/1.1 200 OK\r\n"+ "Content-Length: 11\r\n"+ "\r\n"+ "Roger That!"; String responses = _connector.getResponses("GET /context/dispatch/test?include=/roger/that HTTP/1.0\n" + "Host: localhost\n\n"); assertEquals(expected, responses); } @Test public void testWorkingResourceHandler() throws Exception { String responses = _connector.getResponses("GET /resource/content.txt HTTP/1.0\n" + "Host: localhost\n\n"); assertTrue(responses.contains("content goes here")); // from inside the context.txt file } @Test public void testIncludeToResourceHandler() throws Exception { _contextHandler.addServlet(DispatchToResourceServlet.class, "/resourceServlet/*"); String responses = _connector.getResponses("GET /context/resourceServlet/content.txt?do=include HTTP/1.0\n" + "Host: localhost\n\n"); // from inside the context.txt file Assert.assertNotNull(responses); assertTrue(responses.contains("content goes here")); } @Test public void testForwardToResourceHandler() throws Exception { _contextHandler.addServlet(DispatchToResourceServlet.class, "/resourceServlet/*"); String responses = _connector.getResponses("GET /context/resourceServlet/content.txt?do=forward HTTP/1.0\n" + "Host: localhost\n\n"); // from inside the context.txt file assertTrue(responses.contains("content goes here")); } @Test public void testWrappedIncludeToResourceHandler() throws Exception { _contextHandler.addServlet(DispatchToResourceServlet.class, "/resourceServlet/*"); String responses = _connector.getResponses("GET /context/resourceServlet/content.txt?do=include&wrapped=true HTTP/1.0\n" + "Host: localhost\n\n"); // from inside the context.txt file assertTrue(responses.contains("content goes here")); } @Test public void testWrappedForwardToResourceHandler() throws Exception { _contextHandler.addServlet(DispatchToResourceServlet.class, "/resourceServlet/*"); String responses = _connector.getResponses("GET /context/resourceServlet/content.txt?do=forward&wrapped=true HTTP/1.0\n" + "Host: localhost\n\n"); // from inside the context.txt file assertTrue(responses.contains("content goes here")); } @Test public void testForwardFilterToRogerServlet() throws Exception { _contextHandler.addServlet(RogerThatServlet.class, "/*"); _contextHandler.addServlet(ReserveEchoServlet.class,"/recho/*"); _contextHandler.addServlet(EchoServlet.class, "/echo/*"); _contextHandler.addFilter(ForwardFilter.class, "/*", FilterMapping.REQUEST); String rogerResponse = _connector.getResponses("GET /context/ HTTP/1.0\n" + "Host: localhost\n\n"); String echoResponse = _connector.getResponses("GET /context/foo?echo=echoText HTTP/1.0\n" + "Host: localhost\n\n"); String rechoResponse = _connector.getResponses("GET /context/?echo=echoText HTTP/1.0\n" + "Host: localhost\n\n"); assertTrue(rogerResponse.contains("Roger That!")); assertTrue(echoResponse.contains("echoText")); assertTrue(rechoResponse.contains("txeTohce")); } public static class ForwardServlet extends HttpServlet implements Servlet { protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { RequestDispatcher dispatcher = null; if(request.getParameter("do").equals("include")) dispatcher = getServletContext().getRequestDispatcher("/IncludeServlet/includepath?do=assertforwardinclude"); else if(request.getParameter("do").equals("assertincludeforward")) dispatcher = getServletContext().getRequestDispatcher("/AssertIncludeForwardServlet/assertpath?do=end"); else if(request.getParameter("do").equals("assertforward")) dispatcher = getServletContext().getRequestDispatcher("/AssertForwardServlet?do=end&do=the"); else if(request.getParameter("do").equals("ctx.echo")) dispatcher = getServletContext().getRequestDispatcher(request.getParameter("uri")); else if(request.getParameter("do").equals("req.echo")) dispatcher = request.getRequestDispatcher(request.getParameter("uri")); dispatcher.forward(request, response); } } /* * Forward filter works with roger, echo and reverse echo servlets to test various * forwarding bits using filters. * * when there is an echo parameter and the path info is / it forwards to the reverse echo * anything else in the pathInfo and it sends straight to the echo servlet...otherwise its * all roger servlet */ public static class ForwardFilter implements Filter { ServletContext servletContext; public void init(FilterConfig filterConfig) throws ServletException { servletContext = filterConfig.getServletContext().getContext("/context"); } public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if ( servletContext == null || !(request instanceof HttpServletRequest) || !(response instanceof HttpServletResponse)) { chain.doFilter(request,response); return; } HttpServletRequest req = (HttpServletRequest)request; HttpServletResponse resp = (HttpServletResponse)response; if ( req.getParameter("echo") != null && "/".equals(req.getPathInfo())) { RequestDispatcher dispatcher = servletContext.getRequestDispatcher("/recho"); dispatcher.forward(request,response); } else if ( req.getParameter("echo") != null ) { RequestDispatcher dispatcher = servletContext.getRequestDispatcher("/echo"); dispatcher.forward(request,response); } else { chain.doFilter(request,response); return; } } public void destroy() { } } public static class DispatchServletServlet extends HttpServlet implements Servlet { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { RequestDispatcher dispatcher = null; if(request.getParameter("include")!=null) { dispatcher = getServletContext().getRequestDispatcher(request.getParameter("include")); dispatcher.include(new ServletRequestWrapper(request), new ServletResponseWrapper(response)); } else if(request.getParameter("forward")!=null) { dispatcher = getServletContext().getRequestDispatcher(request.getParameter("forward")); dispatcher.forward(new ServletRequestWrapper(request), new ServletResponseWrapper(response)); } } } public static class IncludeServlet extends HttpServlet implements Servlet { @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { RequestDispatcher dispatcher = null; if(request.getParameter("do").equals("forward")) dispatcher = getServletContext().getRequestDispatcher("/ForwardServlet/forwardpath?do=assertincludeforward"); else if(request.getParameter("do").equals("assertforwardinclude")) dispatcher = getServletContext().getRequestDispatcher("/AssertForwardIncludeServlet/assertpath?do=end"); else if(request.getParameter("do").equals("assertinclude")) dispatcher = getServletContext().getRequestDispatcher("/AssertIncludeServlet?do=end&do=the"); dispatcher.include(request, response); } } public static class RogerThatServlet extends GenericServlet { @Override public void service(ServletRequest req, ServletResponse res) throws ServletException, IOException { res.getWriter().print("Roger That!"); } } public static class EchoServlet extends GenericServlet { @Override public void service(ServletRequest req, ServletResponse res) throws ServletException, IOException { String echoText = req.getParameter("echo"); if ( echoText == null ) { throw new ServletException("echo is a required parameter"); } else { res.getWriter().print(echoText); } } } public static class ReserveEchoServlet extends GenericServlet { @Override public void service(ServletRequest req, ServletResponse res) throws ServletException, IOException { String echoText = req.getParameter("echo"); if ( echoText == null ) { throw new ServletException("echo is a required parameter"); } else { res.getWriter().print(new StringBuffer(echoText).reverse().toString()); } } } public static class DispatchToResourceServlet extends HttpServlet implements Servlet { @Override public void service(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { ServletContext targetContext = getServletConfig().getServletContext().getContext("/resource"); RequestDispatcher dispatcher = targetContext.getRequestDispatcher(req.getPathInfo()); if ( "true".equals(req.getParameter("wrapped"))) { if (req.getParameter("do").equals("forward")) { dispatcher.forward(new HttpServletRequestWrapper(req),new HttpServletResponseWrapper(res)); } else if (req.getParameter("do").equals("include")) { dispatcher.include(new HttpServletRequestWrapper(req),new HttpServletResponseWrapper(res)); } else { throw new ServletException("type of forward or include is required"); } } else { if (req.getParameter("do").equals("forward")) { dispatcher.forward(req,res); } else if (req.getParameter("do").equals("include")) { dispatcher.include(req,res); } else { throw new ServletException("type of forward or include is required"); } } } } public static class EchoURIServlet extends HttpServlet implements Servlet { protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/plain"); response.setStatus(HttpServletResponse.SC_OK); response.getOutputStream().println(request.getContextPath()); response.getOutputStream().println(request.getServletPath()); response.getOutputStream().println(request.getPathInfo()); response.getOutputStream().println(request.getRequestURI()); } } public static class AssertForwardServlet extends HttpServlet implements Servlet { protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { assertEquals( "/context/ForwardServlet", request.getAttribute(Dispatcher.FORWARD_REQUEST_URI)); assertEquals( "/context", request.getAttribute(Dispatcher.FORWARD_CONTEXT_PATH) ); assertEquals( "/ForwardServlet", request.getAttribute(Dispatcher.FORWARD_SERVLET_PATH)); assertEquals( null, request.getAttribute(Dispatcher.FORWARD_PATH_INFO)); assertEquals( "do=assertforward&do=more&test=1", request.getAttribute(Dispatcher.FORWARD_QUERY_STRING) ); List<String> expectedAttributeNames = Arrays.asList(Dispatcher.FORWARD_REQUEST_URI, Dispatcher.FORWARD_CONTEXT_PATH, Dispatcher.FORWARD_SERVLET_PATH, Dispatcher.FORWARD_QUERY_STRING); List<String> requestAttributeNames = Collections.list(request.getAttributeNames()); assertTrue(requestAttributeNames.containsAll(expectedAttributeNames)); assertEquals(null, request.getPathInfo()); assertEquals(null, request.getPathTranslated()); assertEquals("do=end&do=the&test=1", request.getQueryString()); assertEquals("/context/AssertForwardServlet", request.getRequestURI()); assertEquals("/context", request.getContextPath()); assertEquals("/AssertForwardServlet", request.getServletPath()); response.setContentType("text/html"); response.setStatus(HttpServletResponse.SC_OK); } } public static class AssertIncludeServlet extends HttpServlet implements Servlet { protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { assertEquals( "/context/AssertIncludeServlet", request.getAttribute(Dispatcher.INCLUDE_REQUEST_URI)); assertEquals( "/context", request.getAttribute(Dispatcher.INCLUDE_CONTEXT_PATH) ); assertEquals( "/AssertIncludeServlet", request.getAttribute(Dispatcher.INCLUDE_SERVLET_PATH)); assertEquals( null, request.getAttribute(Dispatcher.INCLUDE_PATH_INFO)); assertEquals( "do=end&do=the", request.getAttribute(Dispatcher.INCLUDE_QUERY_STRING)); List expectedAttributeNames = Arrays.asList(Dispatcher.INCLUDE_REQUEST_URI, Dispatcher.INCLUDE_CONTEXT_PATH, Dispatcher.INCLUDE_SERVLET_PATH, Dispatcher.INCLUDE_QUERY_STRING); List requestAttributeNames = Collections.list(request.getAttributeNames()); assertTrue(requestAttributeNames.containsAll(expectedAttributeNames)); assertEquals(null, request.getPathInfo()); assertEquals(null, request.getPathTranslated()); assertEquals("do=assertinclude&do=more&test=1", request.getQueryString()); assertEquals("/context/IncludeServlet", request.getRequestURI()); assertEquals("/context", request.getContextPath()); assertEquals("/IncludeServlet", request.getServletPath()); response.setContentType("text/html"); response.setStatus(HttpServletResponse.SC_OK); } } public static class AssertForwardIncludeServlet extends HttpServlet implements Servlet { protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // include doesn't hide forward assertEquals( "/context/ForwardServlet/forwardpath", request.getAttribute(Dispatcher.FORWARD_REQUEST_URI)); assertEquals( "/context", request.getAttribute(Dispatcher.FORWARD_CONTEXT_PATH) ); assertEquals( "/ForwardServlet", request.getAttribute(Dispatcher.FORWARD_SERVLET_PATH)); assertEquals( "/forwardpath", request.getAttribute(Dispatcher.FORWARD_PATH_INFO)); assertEquals( "do=include", request.getAttribute(Dispatcher.FORWARD_QUERY_STRING) ); assertEquals( "/context/AssertForwardIncludeServlet/assertpath", request.getAttribute(Dispatcher.INCLUDE_REQUEST_URI)); assertEquals( "/context", request.getAttribute(Dispatcher.INCLUDE_CONTEXT_PATH) ); assertEquals( "/AssertForwardIncludeServlet", request.getAttribute(Dispatcher.INCLUDE_SERVLET_PATH)); assertEquals( "/assertpath", request.getAttribute(Dispatcher.INCLUDE_PATH_INFO)); assertEquals( "do=end", request.getAttribute(Dispatcher.INCLUDE_QUERY_STRING)); List expectedAttributeNames = Arrays.asList(Dispatcher.FORWARD_REQUEST_URI, Dispatcher.FORWARD_CONTEXT_PATH, Dispatcher.FORWARD_SERVLET_PATH, Dispatcher.FORWARD_PATH_INFO, Dispatcher.FORWARD_QUERY_STRING, Dispatcher.INCLUDE_REQUEST_URI, Dispatcher.INCLUDE_CONTEXT_PATH, Dispatcher.INCLUDE_SERVLET_PATH, Dispatcher.INCLUDE_PATH_INFO, Dispatcher.INCLUDE_QUERY_STRING); List requestAttributeNames = Collections.list(request.getAttributeNames()); assertTrue(requestAttributeNames.containsAll(expectedAttributeNames)); assertEquals("/includepath", request.getPathInfo()); assertEquals(null, request.getPathTranslated()); assertEquals("do=assertforwardinclude", request.getQueryString()); assertEquals("/context/IncludeServlet/includepath", request.getRequestURI()); assertEquals("/context", request.getContextPath()); assertEquals("/IncludeServlet", request.getServletPath()); response.setContentType("text/html"); response.setStatus(HttpServletResponse.SC_OK); } } public static class AssertIncludeForwardServlet extends HttpServlet implements Servlet { protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // forward hides include assertEquals( null, request.getAttribute(Dispatcher.INCLUDE_REQUEST_URI)); assertEquals( null, request.getAttribute(Dispatcher.INCLUDE_CONTEXT_PATH) ); assertEquals( null, request.getAttribute(Dispatcher.INCLUDE_SERVLET_PATH)); assertEquals( null, request.getAttribute(Dispatcher.INCLUDE_PATH_INFO)); assertEquals( null, request.getAttribute(Dispatcher.INCLUDE_QUERY_STRING)); assertEquals( "/context/IncludeServlet/includepath", request.getAttribute(Dispatcher.FORWARD_REQUEST_URI)); assertEquals( "/context", request.getAttribute(Dispatcher.FORWARD_CONTEXT_PATH) ); assertEquals( "/IncludeServlet", request.getAttribute(Dispatcher.FORWARD_SERVLET_PATH)); assertEquals( "/includepath", request.getAttribute(Dispatcher.FORWARD_PATH_INFO)); assertEquals( "do=forward", request.getAttribute(Dispatcher.FORWARD_QUERY_STRING) ); List expectedAttributeNames = Arrays.asList(Dispatcher.FORWARD_REQUEST_URI, Dispatcher.FORWARD_CONTEXT_PATH, Dispatcher.FORWARD_SERVLET_PATH, Dispatcher.FORWARD_PATH_INFO, Dispatcher.FORWARD_QUERY_STRING); List requestAttributeNames = Collections.list(request.getAttributeNames()); assertTrue(requestAttributeNames.containsAll(expectedAttributeNames)); assertEquals("/assertpath", request.getPathInfo()); assertEquals(null, request.getPathTranslated()); assertEquals("do=end", request.getQueryString()); assertEquals("/context/AssertIncludeForwardServlet/assertpath", request.getRequestURI()); assertEquals("/context", request.getContextPath()); assertEquals("/AssertIncludeForwardServlet", request.getServletPath()); response.setContentType("text/html"); response.setStatus(HttpServletResponse.SC_OK); } } }
/* Copyright (C) 2005-2014, by the President and Fellows of Harvard College. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Dataverse Network - A web application to share, preserve and analyze research data. Developed at the Institute for Quantitative Social Science, Harvard University. Version 3.0. */ package edu.harvard.iq.dataverse.dataaccess; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.logging.Logger; import java.util.List; import java.util.Map; import java.util.HashMap; import java.util.Arrays; import java.util.ArrayList; import java.util.LinkedHashMap; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.datavariable.VariableCategory; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.rserve.*; /** * 4.0 implementation of the Data Access "optional service" that offers * access to "subsettable" (tabular) data files in alternative formats. * In reality, there will only be 1 alternative format supported in 4.0: * R data. The module will still provide functionality for supporting * multiple alternative formats. * * @author Leonid Andreev */ public class DataFileConverter { private static Logger dbgLog = Logger.getLogger(DataFileConverter.class.getPackage().getName()); public DataFileConverter() { } public static String FILE_TYPE_TAB = "tab"; public static String FILE_TYPE_RDATA = "RData"; public static String SERVICE_REQUEST_CONVERT = "convert"; public static FileAccessObject performFormatConversion (DataFile file, FileAccessObject fileDownload, String formatRequested, String formatType) { if (!file.isTabularData()) { return null; } File tabFile = null; File formatConvertedFile = null; String cachedFileSystemLocation = null; // initialize the data variables list: List<DataVariable> dataVariables = file.getDataTable().getDataVariables(); // if the format requested is "D00", and it's already a TAB file, // we don't need to do anything: if (formatRequested.equals(FILE_TYPE_TAB) && file.getContentType().equals("text/tab-separated-values")) { return fileDownload; } /* No support for "remote" files in 4.0 as of yet. * -- L.A. 4.0 alpha 1 if (file.isRemote()) { //TODO://tabFile = saveRemoteFile (file, fileDownload); } else { */ // If it's a local file we may already have a cached copy of this // format. cachedFileSystemLocation = file.getFileSystemLocation() + "." + formatRequested; if (new File(cachedFileSystemLocation).exists()) { formatConvertedFile = new File(cachedFileSystemLocation); } else { // OK, we don't have a cached copy. So we'll have to run // conversion again (below). Let's have the // tab-delimited file handy: tabFile = file.getFileSystemLocation().toFile(); } /*}*/ // Check if the tab file is present and run the conversion: if (tabFile != null && (tabFile.length() > 0)) { formatConvertedFile = runFormatConversion (file, tabFile, formatRequested); // for local files, cache the result: if (formatConvertedFile != null && formatConvertedFile.exists()) { try { File cachedConvertedFile = new File (cachedFileSystemLocation); FileUtil.copyFile(formatConvertedFile,cachedConvertedFile); formatConvertedFile.delete(); formatConvertedFile = cachedConvertedFile; } catch (IOException ex) { // Whatever. For whatever reason we have failed to cache // the format-converted copy of the file we just produced. // But it's not fatal. So we just carry on. } } } // Now check the converted file: if (formatConvertedFile != null && formatConvertedFile.exists()) { fileDownload.closeInputStream(); fileDownload.setSize(formatConvertedFile.length()); try { fileDownload.setInputStream(new FileInputStream(formatConvertedFile)); } catch (IOException ex) { return null; } fileDownload.releaseConnection(); fileDownload.setHTTPMethod(null); fileDownload.setIsLocalFile(true); fileDownload.setMimeType(formatType); String dbFileName = fileDownload.getFileName(); if (dbFileName == null || dbFileName.equals("")) { dbFileName = "f" + file.getId().toString(); } fileDownload.setFileName(generateAltFileName(formatRequested, dbFileName)); if (formatRequested.equals(FILE_TYPE_TAB) && (!fileDownload.noVarHeader())) { String varHeaderLine = null; List dataVariablesList = file.getDataTable().getDataVariables(); //TODO://varHeaderLine = generateVariableHeader(dataVariablesList); fileDownload.setVarHeader(varHeaderLine); } else { fileDownload.setNoVarHeader(true); fileDownload.setVarHeader(null); // (otherwise, since this is a subsettable file, the variable header // will be added to this R/Stata/etc. file -- which would // totally screw things up!) } //TODO://setDownloadContentHeaders (fileDownload); return fileDownload; } return null; } // end of performformatconversion(); // Method for (subsettable) file format conversion. // The method needs the subsettable file saved on disk as in the // TAB-delimited format. // Meaning, if this is a remote subsettable file, it needs to be downloaded // and stored locally as a temporary file; and if it's a fixed-field file, it // needs to be converted to TAB-delimited, before you can feed the file // to this method. (See performFormatConversion() method) // The method below takes the tab file and sends it to the R server // (possibly running on a remote host) and gets back the transformed copy, // providing error-checking and diagnostics in the process. // This is mostly Akio Sone's code. private static File runFormatConversion (DataFile file, File tabFile, String formatRequested) { if ( formatRequested.equals (FILE_TYPE_TAB) ) { // if the *requested* format is TAB-delimited, we don't // need to call R to do any conversions, we can just // send back the TAB file we have just produced. // (OK, so that the assumption is, if this is a fixed-field file -- // from ICPSR or otherwise -- the Access service has already // converted it to tab-delimited... TODO: review this logic; // perhaps fixed-field to tabular should also be handled here? // -- L.A. 4.0 alpha 1) return tabFile; } File formatConvertedFile = null; // create the service instance RemoteDataFrameService dfs = new RemoteDataFrameService(); if ("RData".equals(formatRequested)) { List<DataVariable> dataVariables = file.getDataTable().getDataVariables(); Map<String, Map<String, String>> vls = null; vls = getValueTableForRequestedVariables(dataVariables); dbgLog.fine("format conversion: variables(getDataVariableForRequest())=" + dataVariables + "\n"); dbgLog.fine("format conversion: variables(dataVariables)=" + dataVariables + "\n"); dbgLog.fine("format conversion: value table(vls)=" + vls + "\n"); RJobRequest sro = new RJobRequest(dataVariables, vls); sro.setTabularDataFileName(tabFile.getAbsolutePath()); sro.setRequestType(SERVICE_REQUEST_CONVERT); sro.setFormatRequested(FILE_TYPE_RDATA); // execute the service Map<String, String> resultInfo = dfs.execute(sro); //resultInfo.put("offlineCitation", citation); dbgLog.fine("resultInfo="+resultInfo+"\n"); // check whether a requested file is actually created if ("true".equals(resultInfo.get("RexecError"))){ dbgLog.fine("R-runtime error trying to convert a file."); return null; } else { String dataFrameFileName = resultInfo.get("dataFrameFileName"); dbgLog.fine("data frame file name: "+dataFrameFileName); formatConvertedFile = new File(dataFrameFileName); } } else if ("prep".equals(formatRequested)) { formatConvertedFile = dfs.runDataPreprocessing(file); } else { dbgLog.warning("Unsupported file format requested: "+formatRequested); return null; } if (formatConvertedFile.exists()) { dbgLog.fine("frmtCnvrtdFile:length=" + formatConvertedFile.length()); } else { dbgLog.warning("Format-converted file was not properly created."); return null; } return formatConvertedFile; } private static Map<String, Map<String, String>> getValueTableForRequestedVariables(List<DataVariable> dvs){ Map<String, Map<String, String>> vls = new LinkedHashMap<String, Map<String, String>>(); for (DataVariable dv : dvs){ List<VariableCategory> varCat = new ArrayList<VariableCategory>(); varCat.addAll(dv.getCategories()); Map<String, String> vl = new HashMap<String, String>(); for (VariableCategory vc : varCat){ if (vc.getLabel() != null){ vl.put(vc.getValue(), vc.getLabel()); } } if (vl.size() > 0){ vls.put("v"+dv.getId(), vl); } } return vls; } private static String generateAltFileName(String formatRequested, String xfileId) { String altFileName = xfileId; if ( altFileName == null || altFileName.equals("")) { altFileName = "Converted"; } if ( formatRequested != null ) { altFileName = FileUtil.replaceExtension(altFileName, formatRequested); } return altFileName; } }
package org.deeplearning4j.jp.rnn.general.timeseries; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.util.Random; import org.deeplearning4j.examples.rnn.shakespeare.CharacterIterator; import org.deeplearning4j.nn.api.Layer; import org.deeplearning4j.nn.api.OptimizationAlgorithm; import org.deeplearning4j.nn.conf.MultiLayerConfiguration; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.Updater; import org.deeplearning4j.nn.conf.distribution.UniformDistribution; import org.deeplearning4j.nn.conf.layers.GravesLSTM; import org.deeplearning4j.nn.conf.layers.RnnOutputLayer; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.nn.weights.WeightInit; import org.deeplearning4j.optimize.listeners.ScoreIterationListener; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction; /** * TODO * 1. generate a few fake timeseries w labels (fraud, no-fraud) * 2. setup the network * 3. set the data load of the timeseries into the network .fit() method * 4. output a report on how accurate the model is * * @author josh * */ public class LSTM_GenericTimeseriesClassification { public static void main( String[] args ) throws Exception { int lstmLayerSize = 200; //Number of units in each GravesLSTM layer int miniBatchSize = 32; //Size of mini batch to use when training int examplesPerEpoch = 50 * miniBatchSize; //i.e., how many examples to learn on between generating samples int exampleLength = 100; //Length of each training example int numEpochs = 30; //Total number of training + sample generation epochs int nSamplesToGenerate = 4; //Number of samples to generate after each training epoch int nCharactersToSample = 130; //Length of each sample to generate String generationInitialization = null; //Optional character initialization; a random character is used if null // Above is Used to 'prime' the LSTM with a character sequence to continue/complete. // Initialization characters must all be in CharacterIterator.getMinimalCharacterSet() by default Random rng = new Random(12345); //Get a DataSetIterator that handles vectorization of text into something we can use to train // our GravesLSTM network. CharacterIterator iter = getShakespeareIterator(miniBatchSize,exampleLength,examplesPerEpoch); int nOut = iter.totalOutcomes(); //Set up network configuration: MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder() .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1) .learningRate(0.1) .rmsDecay(0.95) .seed(12345) .regularization(true) .l2(0.001) .list(3) .layer(0, new GravesLSTM.Builder().nIn(iter.inputColumns()).nOut(lstmLayerSize) .updater(Updater.RMSPROP) .activation("tanh").weightInit(WeightInit.DISTRIBUTION) .dist(new UniformDistribution(-0.08, 0.08)).build()) .layer(1, new GravesLSTM.Builder().nIn(lstmLayerSize).nOut(lstmLayerSize) .updater(Updater.RMSPROP) .activation("tanh").weightInit(WeightInit.DISTRIBUTION) .dist(new UniformDistribution(-0.08, 0.08)).build()) .layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation("softmax") //MCXENT + softmax for classification .updater(Updater.RMSPROP) .nIn(lstmLayerSize).nOut(nOut).weightInit(WeightInit.DISTRIBUTION) .dist(new UniformDistribution(-0.08, 0.08)).build()) .pretrain(false).backprop(true) .build(); MultiLayerNetwork net = new MultiLayerNetwork(conf); net.init(); net.setListeners(new ScoreIterationListener(1)); //Print the number of parameters in the network (and for each layer) Layer[] layers = net.getLayers(); int totalNumParams = 0; for( int i=0; i<layers.length; i++ ){ int nParams = layers[i].numParams(); System.out.println("Number of parameters in layer " + i + ": " + nParams); totalNumParams += nParams; } System.out.println("Total number of network parameters: " + totalNumParams); String[] initStrings = { "diary", "gozer", "are", "I", "dear" }; //Do training, and then generate and print samples from network for( int i=0; i<numEpochs; i++ ){ net.fit(iter); System.out.println("--------------------"); System.out.println("Completed epoch " + i ); System.out.println("Sampling characters from network given initialization \""+ (generationInitialization == null ? "" : generationInitialization) +"\""); String[] samples = sampleCharactersFromNetwork( initStrings[ i % initStrings.length ] ,net,iter,rng,nCharactersToSample,nSamplesToGenerate); for( int j=0; j<samples.length; j++ ){ System.out.println("----- Sample " + j + " -----"); System.out.println( "Init String: " + initStrings[ i % initStrings.length ] ); System.out.println(samples[j]); System.out.println(); } iter.reset(); //Reset iterator for another epoch } System.out.println("\n\nExample complete"); } /** Downloads Shakespeare training data and stores it locally (temp directory). Then set up and return a simple * DataSetIterator that does vectorization based on the text. * @param miniBatchSize Number of text segments in each training mini-batch * @param exampleLength Number of characters in each text segment. * @param examplesPerEpoch Number of examples we want in an 'epoch'. */ private static CharacterIterator getShakespeareIterator(int miniBatchSize, int exampleLength, int examplesPerEpoch) throws Exception{ //The Complete Works of William Shakespeare //5.3MB file in UTF-8 Encoding, ~5.4 million characters //https://www.gutenberg.org/ebooks/100 //String url = "https://s3.amazonaws.com/dl4j-distribution/pg100.txt"; //String tempDir = System.getProperty("java.io.tmpdir"); String fileLocation = "src/main/resources/rnn_sammer.txt"; //tempDir + "/Shakespeare.txt"; //Storage location from downloaded file File f = new File(fileLocation); //URL resource = LSTM_SammerBot.class.getResource( fileLocation ); //System.out.println( resource ); //File f = Paths.get(resource.toURI()).toFile(); /* if( !f.exists() ){ FileUtils.copyURLToFile(new URL(url), f); System.out.println("File downloaded to " + f.getAbsolutePath()); } else {*/ System.out.println("Using existing text file at " + f.getAbsolutePath()); //} if(!f.exists()) throw new IOException("File does not exist: " + fileLocation); //Download problem? char[] validCharacters = CharacterIterator.getMinimalCharacterSet(); //Which characters are allowed? Others will be removed return new CharacterIterator(fileLocation, Charset.forName("UTF-8"), miniBatchSize, exampleLength, examplesPerEpoch, validCharacters, new Random(12345),true); } /** Generate a sample from the network, given an (optional, possibly null) initialization. Initialization * can be used to 'prime' the RNN with a sequence you want to extend/continue.<br> * Note that the initalization is used for all samples * @param initialization String, may be null. If null, select a random character as initialization for all samples * @param charactersToSample Number of characters to sample from network (excluding initialization) * @param net MultiLayerNetwork with one or more GravesLSTM/RNN layers and a softmax output layer * @param iter CharacterIterator. Used for going from indexes back to characters */ private static String[] sampleCharactersFromNetwork( String initialization, MultiLayerNetwork net, CharacterIterator iter, Random rng, int charactersToSample, int numSamples ){ //Set up initialization. If no initialization: use a random character if( initialization == null ){ initialization = String.valueOf(iter.getRandomCharacter()); } //Create input for initialization INDArray initializationInput = Nd4j.zeros(numSamples, iter.inputColumns(), initialization.length()); char[] init = initialization.toCharArray(); for( int i=0; i<init.length; i++ ){ int idx = iter.convertCharacterToIndex(init[i]); for( int j=0; j<numSamples; j++ ){ initializationInput.putScalar(new int[]{j,idx,i}, 1.0f); } } StringBuilder[] sb = new StringBuilder[numSamples]; for( int i=0; i<numSamples; i++ ) sb[i] = new StringBuilder(initialization); //Sample from network (and feed samples back into input) one character at a time (for all samples) //Sampling is done in parallel here net.rnnClearPreviousState(); INDArray output = net.rnnTimeStep(initializationInput); output = output.tensorAlongDimension(output.size(2)-1,1,0); //Gets the last time step output for( int i=0; i<charactersToSample; i++ ){ //Set up next input (single time step) by sampling from previous output INDArray nextInput = Nd4j.zeros(numSamples,iter.inputColumns()); //Output is a probability distribution. Sample from this for each example we want to generate, and add it to the new input for( int s=0; s<numSamples; s++ ){ double[] outputProbDistribution = new double[iter.totalOutcomes()]; for( int j=0; j<outputProbDistribution.length; j++ ) outputProbDistribution[j] = output.getDouble(s,j); int sampledCharacterIdx = sampleFromDistribution(outputProbDistribution,rng); nextInput.putScalar(new int[]{s,sampledCharacterIdx}, 1.0f); //Prepare next time step input sb[s].append(iter.convertIndexToCharacter(sampledCharacterIdx)); //Add sampled character to StringBuilder (human readable output) } output = net.rnnTimeStep(nextInput); //Do one time step of forward pass } String[] out = new String[numSamples]; for( int i=0; i<numSamples; i++ ) out[i] = sb[i].toString(); return out; } /** Given a probability distribution over discrete classes, sample from the distribution * and return the generated class index. * @param distribution Probability distribution over classes. Must sum to 1.0 */ private static int sampleFromDistribution( double[] distribution, Random rng ){ double d = rng.nextDouble(); double sum = 0.0; for( int i=0; i<distribution.length; i++ ){ sum += distribution[i]; if( d <= sum ) return i; } //Should never happen if distribution is a valid probability distribution throw new IllegalArgumentException("Distribution is invalid? d="+d+", sum="+sum); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.jgeppert.struts2.jquery.tree.views.jsp.ui; import com.jgeppert.struts2.jquery.tree.components.Tree; import com.jgeppert.struts2.jquery.views.jsp.ui.AbstractContainerTag; import com.opensymphony.xwork2.util.ValueStack; import org.apache.struts2.components.Component; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * @author <a href="http://www.jgeppert.com">Johannes Geppert</a> */ public class TreeTag extends AbstractContainerTag { private static final long serialVersionUID = -6297851020849153739L; protected String jstreetheme; protected String jstreethemeVariant; protected String jstreethemeResponsive; protected String htmlTitles; protected String animation; protected String initiallyOpen; protected String rtl; protected String href; protected String onClickTopics; protected String rootNode; protected String childCollectionProperty; protected String nodeTitleProperty; protected String nodeTypeProperty; protected String nodeIdProperty; protected String nodeHref; protected String nodeHrefParamName; protected String nodeTargets; protected String openAllOnLoad; protected String openAllOnRefresh; protected String contextmenu; protected String plugins; protected String types; protected String showThemeDots; protected String showThemeIcons; protected String checkbox; protected String checkboxTwoState; protected String checkboxToogleAllTopics; protected String checkboxHideTopics; protected String checkboxShowTopics; protected String checkboxCheckAllTopics; protected String checkboxUncheckAllTopics; protected String searchTopic; protected String searchClearTopic; protected String searchElementId; protected String onSearchCompleteTopics; protected String onSearchClearTopics; @Override public Component getBean(ValueStack stack, HttpServletRequest req, HttpServletResponse res) { return new Tree(stack, req, res); } @Override protected void populateParams() { super.populateParams(); Tree tree = (Tree) component; tree.setJstreetheme(jstreetheme); tree.setJstreethemeVariant(jstreethemeVariant); tree.setJstreethemeResponsive(jstreethemeResponsive); tree.setAnimation(animation); tree.setHtmlTitles(htmlTitles); tree.setInitiallyOpen(initiallyOpen); tree.setRtl(rtl); tree.setHref(href); tree.setOnClickTopics(onClickTopics); tree.setRootNode(rootNode); tree.setChildCollectionProperty(childCollectionProperty); tree.setNodeIdProperty(nodeIdProperty); tree.setNodeTitleProperty(nodeTitleProperty); tree.setNodeTypeProperty(nodeTypeProperty); tree.setNodeHref(nodeHref); tree.setNodeHrefParamName(nodeHrefParamName); tree.setNodeTargets(nodeTargets); tree.setOpenAllOnLoad(openAllOnLoad); tree.setOpenAllOnRefresh(openAllOnRefresh); tree.setContextmenu(contextmenu); tree.setPlugins(plugins); tree.setTypes(types); tree.setShowThemeDots(showThemeDots); tree.setShowThemeIcons(showThemeIcons); tree.setCheckbox(checkbox); tree.setCheckboxTwoState(checkboxTwoState); tree.setCheckboxToogleAllTopics(checkboxToogleAllTopics); tree.setCheckboxShowTopics(checkboxShowTopics); tree.setCheckboxHideTopics(checkboxHideTopics); tree.setCheckboxCheckAllTopics(checkboxCheckAllTopics); tree.setCheckboxUncheckAllTopics(checkboxUncheckAllTopics); tree.setSearchElementId(searchElementId); tree.setSearchTopic(searchTopic); tree.setOnSearchCompleteTopics(onSearchCompleteTopics); tree.setSearchClearTopic(searchClearTopic); tree.setOnSearchClearTopics(onSearchClearTopics); } public void setJstreetheme(String jstreetheme) { this.jstreetheme = jstreetheme; } public void setJstreethemeVariant(String jstreethemeVariant) { this.jstreethemeVariant = jstreethemeVariant; } public void setJstreethemeResponsive(String jstreethemeResponsive) { this.jstreethemeResponsive = jstreethemeResponsive; } public void setHtmlTitles(String htmlTitles) { this.htmlTitles = htmlTitles; } public void setAnimation(String animation) { this.animation = animation; } public void setInitiallyOpen(String initiallyOpen) { this.initiallyOpen = initiallyOpen; } public void setRtl(String rtl) { this.rtl = rtl; } @Override public void setHref(String href) { this.href = href; } public void setOnClickTopics(String onClickTopics) { this.onClickTopics = onClickTopics; } public void setRootNode(String rootNode) { this.rootNode = rootNode; } public void setChildCollectionProperty(String childCollectionProperty) { this.childCollectionProperty = childCollectionProperty; } public void setNodeTitleProperty(String nodeTitleProperty) { this.nodeTitleProperty = nodeTitleProperty; } public void setNodeIdProperty(String nodeIdProperty) { this.nodeIdProperty = nodeIdProperty; } public void setNodeHref(String nodeHref) { this.nodeHref = nodeHref; } public void setNodeHrefParamName(String nodeHrefParamName) { this.nodeHrefParamName = nodeHrefParamName; } public void setNodeTargets(String nodeTargets) { this.nodeTargets = nodeTargets; } public void setOpenAllOnLoad(String openAllOnLoad) { this.openAllOnLoad = openAllOnLoad; } public void setOpenAllOnRefresh(String openAllOnRefresh) { this.openAllOnRefresh = openAllOnRefresh; } public void setContextmenu(String contextmenu) { this.contextmenu = contextmenu; } public void setPlugins(String plugins) { this.plugins = plugins; } public void setTypes(String types) { this.types = types; } public void setShowThemeIcons(String showThemeIcons) { this.showThemeIcons = showThemeIcons; } public void setShowThemeDots(String showThemeDots) { this.showThemeDots = showThemeDots; } public void setNodeTypeProperty(String nodeTypeProperty) { this.nodeTypeProperty = nodeTypeProperty; } public void setCheckbox(String checkbox) { this.checkbox = checkbox; } public void setCheckboxTwoState(String checkboxTwoState) { this.checkboxTwoState = checkboxTwoState; } public void setCheckboxToogleAllTopics(String checkboxToogleAllTopics) { this.checkboxToogleAllTopics = checkboxToogleAllTopics; } public void setCheckboxHideTopics(String checkboxHideTopics) { this.checkboxHideTopics = checkboxHideTopics; } public void setCheckboxShowTopics(String checkboxShowTopics) { this.checkboxShowTopics = checkboxShowTopics; } public void setCheckboxUncheckAllTopics(String checkboxUncheckAllTopics) { this.checkboxUncheckAllTopics = checkboxUncheckAllTopics; } public void setCheckboxCheckAllTopics(String checkboxCheckAllTopics) { this.checkboxCheckAllTopics = checkboxCheckAllTopics; } public void setSearchTopic(String searchTopic) { this.searchTopic = searchTopic; } public void setSearchElementId(String searchElementId) { this.searchElementId = searchElementId; } public void setOnSearchCompleteTopics(String onSearchCompleteTopics) { this.onSearchCompleteTopics = onSearchCompleteTopics; } public void setSearchClearTopic(String searchClearTopic) { this.searchClearTopic = searchClearTopic; } public void setOnSearchClearTopics(String onSearchClearTopics) { this.onSearchClearTopics = onSearchClearTopics; } }
/* * Copyright (c) 2007-2012, Stephen Colebourne & Michael Nascimento Santos * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of JSR-310 nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package java.time; import java.time.calendrical.DateTimeAccessor; import java.time.calendrical.DateTimeAccessor.Query; import java.time.format.DateTimeParseException; import java.time.format.TextStyle; import java.time.jdk8.Jdk7Methods; import java.time.zone.ZoneRules; import java.util.Collections; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.TimeZone; /** * A time-zone ID, such as {@code Europe/Paris}. * <p> * A {@code ZoneId} is used to identify the rules used to convert between an {@link Instant} and a * {@link LocalDateTime}. There are two distinct types of ID: * <p> * <ul> * <li>Fixed offsets - a fully resolved offset from UTC/Greenwich, that uses the same offset for all local * date-times * <li>Geographical regions - an area where a specific set of rules for finding the offset from UTC/Greenwich * apply * </ul> * <p> * Most fixed offsets are represented by {@link ZoneOffset}. * <p> * The actual rules, describing when and how the offset changes, are defined by {@link ZoneRules}. This class * is simply an ID used to obtain the underlying rules. This approach is taken because rules are defined by * governments and change frequently, whereas the ID is stable. * <p> * The distinction has other effects. Serializing the {@code ZoneId} will only send the ID, whereas * serializing the rules sends the entire data set. Similarly, a comparison of two IDs only examines the ID, * whereas a comparison of two rules examines the entire data set. * <p> * The code supports loading a {@code ZoneId} on a JVM which does not have available rules for that ID. This * allows the date-time object, such as {@link ZonedDateTime}, to still be queried. * * <h4>Time-zone IDs</h4> The ID is unique within the system. The formats for offset and region IDs differ. * <p> * An ID is parsed as an offset ID if it starts with 'UTC', 'GMT', '+' or '-', or is a single letter. For * example, 'Z', '+02:00', '-05:00', 'UTC+05' and 'GMT-6' are all valid offset IDs. Note that some IDs, such * as 'D' or '+ABC' meet the criteria, but are invalid. * <p> * All other IDs are considered to be region IDs. * <p> * Region IDs are defined by configuration, which can be thought of as a {@code Map} from region ID to * {@code ZoneRules}, see {@link ZoneRulesProvider}. * <p> * Time-zones are defined by governments and change frequently. There are a number of organizations, known * here as groups, that monitor time-zone changes and collate them. The default group is the IANA Time Zone * Database (TZDB). Other organizations include IATA (the airline industry body) and Microsoft. * <p> * Each group defines its own format for region ID. The TZDB group defines IDs such as 'Europe/London' or * 'America/New_York'. TZDB IDs take precedence over other groups. * <p> * It is strongly recommended that the group name is included in all Ids supplied by groups other than TZDB to * avoid conflicts. For example, IATA airline time-zone region IDs are typically the same as the three letter * airport code. However, the airport of Utrecht has the code 'UTC', which is obviously a conflict. The * recommended format for region IDs from groups other than TZDB is 'group~region'. Thus if IATA data were * defined, Utrecht airport would be 'IATA~UTC'. * * <h4>Implementation notes</h4> This class is immutable and thread-safe. */ public abstract class ZoneId { protected native static String getJavascriptDefaultTimeZoneId() /*-{ return Intl.DateTimeFormat().resolvedOptions().timeZone; }-*/; // ----------------------------------------------------------------------- /** * Gets the system default time-zone. * <p> * This queries {@link TimeZone#getDefault()} to find the default time-zone and converts it to a * {@code ZoneId}. If the system default time-zone is changed, then the result of this method will also * change. * * @return the zone ID, not null * @throws DateTimeException if the converted zone ID has an invalid format * @throws ZoneRulesException if the converted zone region ID cannot be found */ public static ZoneId systemDefault() { // This does not work on all browsers, see https://stackoverflow.com/a/34602679/220627 // But I figure it working on some browsers is better than always returning UTC which was // the previous implementation String z = getJavascriptDefaultTimeZoneId(); return z == null ? ZoneOffset.UTC : ZoneId.of(z); } // ----------------------------------------------------------------------- /** * Obtains an instance of {@code ZoneId} using its ID using a map of aliases to supplement the standard zone * IDs. * <p> * Many users of time-zones use short abbreviations, such as PST for 'Pacific Standard Time' and PDT for * 'Pacific Daylight Time'. These abbreviations are not unique, and so cannot be used as IDs. This method * allows a map of string to time-zone to be setup and reused within an application. * * @param zoneId the time-zone ID, not null * @param aliasMap a map of alias zone IDs (typically abbreviations) to real zone IDs, not null * @return the zone ID, not null * @throws DateTimeException if the zone ID has an invalid format * @throws ZoneRulesException if the zone region ID cannot be found */ public static ZoneId of(String zoneId, Map<String, String> aliasMap) { Jdk7Methods.Objects_requireNonNull(zoneId, "zoneId"); Jdk7Methods.Objects_requireNonNull(aliasMap, "aliasMap"); String id = aliasMap.get(zoneId); id = (id != null ? id : zoneId); return of(id); } /** * Obtains an instance of {@code ZoneId} from an ID ensuring that the ID is valid and available for use. * <p> * This method parses the ID, applies any appropriate normalization, and validates it against the known set * of IDs for which rules are available. * <p> * An ID is parsed as though it is an offset ID if it starts with 'UTC', 'GMT', '+' or '-', or if it has * less then two letters. The offset of {@link ZoneOffset#UTC zero} may be represented in multiple ways, * including 'Z', 'UTC', 'GMT', 'UTC0' 'GMT0', '+00:00', '-00:00' and 'UTC+00:00'. * <p> * Eight forms of ID are recognized, where '{offset}' means to parse using {@link ZoneOffset#of(String)}: * <p> * <ul> * <li><code>{offset}</code> - a {@link ZoneOffset} ID, such as 'Z' or '+02:00' * <li><code>UTC</code> - alternate form of a {@code ZoneOffset} ID equal to 'Z' * <li><code>UTC0</code> - alternate form of a {@code ZoneOffset} ID equal to 'Z' * <li><code>UTC{offset}</code> - alternate form of a {@code ZoneOffset} ID equal to '{offset}' * <li><code>GMT</code> - alternate form of a {@code ZoneOffset} ID equal to 'Z' * <li><code>GMT0</code> - alternate form of a {@code ZoneOffset} ID equal to 'Z' * <li><code>GMT{offset}</code> - alternate form of a {@code ZoneOffset} ID equal to '{offset}'r * <li><code>{regionID}</code> - full region ID, loaded from configuration * </ul> * <p> * Region IDs must match the regular expression <code>[A-Za-z][A-Za-z0-9~/._+-]+</code>. * <p> * The detailed format of the region ID depends on the group supplying the data. The default set of data is * supplied by the IANA Time Zone Database (TZDB) This has region IDs of the form '{area}/{city}', such as * 'Europe/Paris' or 'America/New_York'. This is compatible with most IDs from {@link java.util.TimeZone}. * * @param zoneId the time-zone ID, not null * @return the zone ID, not null * @throws DateTimeException if the zone ID has an invalid format * @throws ZoneRulesException if the zone region ID cannot be found */ public static ZoneId of(String zoneId) { Jdk7Methods.Objects_requireNonNull(zoneId, "zoneId"); if (zoneId.length() <= 1 || zoneId.startsWith("+") || zoneId.startsWith("-")) { return ZoneOffset.of(zoneId); } else if (zoneId.startsWith("UTC") || zoneId.startsWith("GMT")) { if (zoneId.length() == 3 || (zoneId.length() == 4 && zoneId.charAt(3) == '0')) { return ZoneOffset.UTC; } return ZoneOffset.of(zoneId.substring(3)); } else if (zoneId.contains("/")) { return new ZoneRegion(zoneId); } throw new DateTimeParseException("Illegal zoneId (GWT) " + zoneId, zoneId, 0); } // ----------------------------------------------------------------------- /** * Obtains an instance of {@code ZoneId} from a date-time object. * <p> * A {@code DateTimeAccessor} represents some form of date and time information. This factory converts the * arbitrary date-time object to an instance of {@code ZoneId}. * * @param dateTime the date-time object to convert, not null * @return the zone ID, not null * @throws DateTimeException if unable to convert to a {@code ZoneId} */ public static ZoneId from(DateTimeAccessor dateTime) { ZoneId obj = dateTime.query(Query.ZONE_ID); if (obj == null) { throw new DateTimeException("Unable to convert DateTimeAccessor to ZoneId: " + dateTime.getClass()); } return obj; } // ----------------------------------------------------------------------- /** * Constructor only accessible within the package. */ ZoneId() { } // ----------------------------------------------------------------------- /** * Gets the unique time-zone ID. * <p> * This ID uniquely defines this object. The format of an offset based ID is defined by * {@link ZoneOffset#getId()}. * * @return the time-zone unique ID, not null */ public abstract String getId(); // ----------------------------------------------------------------------- /** * Gets the time-zone rules for this ID allowing calculations to be performed. * <p> * The rules provide the functionality associated with a time-zone, such as finding the offset for a given * instant or local date-time. * <p> * A time-zone can be invalid if it is deserialized in a JVM which does not have the same rules loaded as * the JVM that stored it. In this case, calling this method will throw an exception. * <p> * The rules are supplied by {@link ZoneRulesProvider}. An advanced provider may support dynamic updates to * the rules without restarting the JVM. If so, then the result of this method may change over time. Each * individual call will be still remain thread-safe. * <p> * {@link ZoneOffset} will always return a set of rules where the offset never changes. * * @return the rules, not null * @throws DateTimeException if no rules are available for this ID */ public abstract ZoneRules getRules(); // ----------------------------------------------------------------------- /** * Gets the textual representation of the zone, such as 'British Time' or '+02:00'. * <p> * This returns a textual description for the time-zone ID. * <p> * If no textual mapping is found then the {@link #getId() full ID} is returned. * * @param style the length of the text required, not null * @param locale the locale to use, not null * @return the text value of the zone, not null */ public String getText(TextStyle style, Locale locale) { return getId(); } // ----------------------------------------------------------------------- /** * Checks if this time-zone ID is equal to another time-zone ID. * <p> * The comparison is based on the ID. * * @param obj the object to check, null returns false * @return true if this is equal to the other time-zone ID */ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof ZoneId) { ZoneId other = (ZoneId) obj; return getId().equals(other.getId()); } return false; } /** * A hash code for this time-zone ID. * * @return a suitable hash code */ @Override public int hashCode() { return getId().hashCode(); } // ----------------------------------------------------------------------- /** * Outputs this zone as a {@code String}, using the ID. * * @return a string representation of this time-zone ID, not null */ @Override public String toString() { return getId(); } }
/** * Copyright (c) 2011 Perforce Software. All rights reserved. */ package com.perforce.p4java.tests.dev.unit.features112; import com.perforce.p4java.client.IClient; import com.perforce.p4java.core.file.FileSpecOpStatus; import com.perforce.p4java.core.file.IFileSpec; import com.perforce.p4java.exception.P4JavaException; import com.perforce.p4java.impl.generic.core.file.FileSpec; import com.perforce.p4java.impl.mapbased.server.Server; import com.perforce.p4java.server.IOptionsServer; import com.perforce.p4java.server.IServerMessage; import com.perforce.p4java.server.callback.IStreamingCallback; import com.perforce.p4java.tests.SimpleServerRule; import com.perforce.p4java.tests.dev.annotations.Jobs; import com.perforce.p4java.tests.dev.annotations.TestId; import com.perforce.p4java.tests.dev.unit.P4JavaRshTestCase; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import java.util.ArrayList; import java.util.List; import java.util.Map; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.fail; /** * Test 'p4 dirs' streaming command. */ @Jobs({ "job046233" }) @TestId("Dev112_StreamingDirsTest") public class StreamingDirsTest extends P4JavaRshTestCase { IOptionsServer server = null; IClient client = null; @ClassRule public static SimpleServerRule p4d = new SimpleServerRule("r16.1", StreamingDirsTest.class.getSimpleName()); /** * @Before annotation to a method to be run before each test in a class. */ @Before public void setUp() { // initialization code (before each test). try { server = getSuperConnection(p4d.getRSHURL()); client = server.getClient("p4TestUserWS"); assertNotNull(client); server.setCurrentClient(client); } catch (Exception e) { fail("Unexpected exception: " + e.getLocalizedMessage()); } } /** * @After annotation to a method to be run after each test in a class. */ @After public void tearDown() { // cleanup code (after each test). if (server != null) { this.endServerSession(server); } } /** * Test 'p4 dirs' streaming command. */ @Test public void testDirs() { try { server.execStreamingMapCommand("dirs", new String[] { "//p4TestUserWS/basic/readonly/*" }, null, new IStreamingCallback() { List<IFileSpec> specList = new ArrayList<IFileSpec>(); public boolean startResults(int key) throws P4JavaException { return true; } public boolean handleResult( Map<String, Object> resultMap, int key) throws P4JavaException { IFileSpec file = null; if (resultMap != null) { IServerMessage errStr = ((Server) server) .handleFileErrorStr(resultMap); if (errStr == null) { file = new FileSpec((String) resultMap .get("dir")); specList.add(file); } else { if (((Server) server) .isInfoMessage(resultMap)) { if (resultMap.get("dirName") != null) { file = new FileSpec( (String) resultMap .get("dirName")); specList.add(file); } else { file = dummyFileSpec( FileSpecOpStatus.INFO, errStr, resultMap); specList.add(file); } } else { file = dummyFileSpec( FileSpecOpStatus.ERROR, errStr, resultMap); specList.add(file); } } } assertNotNull(file); if (file.getOpStatus() == FileSpecOpStatus.VALID) { System.out.println(file.toString()); } else { assertNotNull(file.getStatusMessage()); System.out.println(file.getStatusMessage()); } return true; } public boolean endResults(int key) throws P4JavaException { return true; } }, 0); } catch (P4JavaException e) { fail("Unexpected exception: " + e.getLocalizedMessage()); } } /** * Test 'p4 dirs' streaming command. */ @Test public void testDirsBadPath() { try { server.execStreamingMapCommand("dirs", new String[] { "//depot/baz/*" }, null, new IStreamingCallback() { List<IFileSpec> specList = new ArrayList<IFileSpec>(); public boolean startResults(int key) throws P4JavaException { return true; } public boolean handleResult( Map<String, Object> resultMap, int key) throws P4JavaException { IFileSpec file = null; if (resultMap != null) { IServerMessage errStr = ((Server) server) .handleFileErrorStr(resultMap); if (errStr == null) { file = new FileSpec((String) resultMap .get("dir")); specList.add(file); } else { if (((Server) server) .isInfoMessage(resultMap)) { if (resultMap.get("dirName") != null) { file = new FileSpec( (String) resultMap .get("dirName")); specList.add(file); } else { file = dummyFileSpec( FileSpecOpStatus.INFO, errStr, resultMap); specList.add(file); } } else { file = dummyFileSpec( FileSpecOpStatus.ERROR, errStr, resultMap); specList.add(file); } } } assertNotNull(file); if (file.getOpStatus() == FileSpecOpStatus.VALID) { System.out.println(file.toString()); } else { assertNotNull(file.getStatusMessage()); System.out.println(file.getStatusMessage()); } return true; } public boolean endResults(int key) throws P4JavaException { return true; } }, 0); } catch (P4JavaException e) { fail("Unexpected exception: " + e.getLocalizedMessage()); } } }
/******************************************************************************* * Copyright (c) 2015 - 2018 * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. *******************************************************************************/ package jsettlers.graphics.image; import go.graphics.GLDrawContext; import go.graphics.IllegalBufferException; import jsettlers.common.Color; import jsettlers.graphics.map.draw.DrawBuffer; /** * This is an image that is stored in the image index file. * * @author Michael Zangl */ public class ImageIndexImage extends Image { private static final float IMAGE_DRAW_OFFSET = .5f; private static final float[] tempBuffer = new float[5 * 6]; private final short width; private final short height; private final float[] geometry; private final ImageIndexTexture texture; private final int offsetX; private final int offsetY; private final float umin; private final float vmin; private final float umax; private final float vmax; private final boolean isTorso; private ImageIndexImage torso; /** * Constructs a new image in an image index. * * @param texture * The texture this image is part of. * @param offsetX * The x-offset to the center of the image. * @param offsetY * The y-offset to the center of the image. * @param width * The width of the image * @param height * The height of the image. * @param umin * The bounds of the image on the texture (0..1). * @param vmin * The bounds of the image on the texture (0..1). * @param umax * The bounds of the image on the texture (0..1). * @param vmax * The bounds of the image on the texture (0..1). */ ImageIndexImage(ImageIndexTexture texture, int offsetX, int offsetY, short width, short height, float umin, float vmin, float umax, float vmax, boolean isTorso) { this.texture = texture; this.offsetX = offsetX; this.offsetY = offsetY; this.width = width; this.height = height; this.umin = umin; this.vmin = vmin; this.umax = umax; this.vmax = vmax; this.isTorso = isTorso; geometry = createGeometry(offsetX, offsetY, width, height, umin, vmin, umax, vmax); } @Override public int getWidth() { return width; } @Override public int getHeight() { return height; } @Override public void draw(GLDrawContext gl, Color color) { draw(gl, color, 1); } @Override public void draw(GLDrawContext gl, Color color, float multiply) { if (color == null || !isTorso) { gl.color(multiply, multiply, multiply, 1); } else { gl.color(color.getRed() * multiply, color.getGreen() * multiply, color.getBlue() * multiply, color.getAlpha()); } draw(gl, geometry); if (torso != null) { torso.draw(gl, color, multiply); } } private void draw(GLDrawContext gl, float[] geometryBuffer) { try { gl.drawTrianglesWithTexture(texture.getTextureIndex(gl), geometryBuffer); } catch (IllegalBufferException e) { try { texture.recreateTexture(); gl.drawTrianglesWithTexture(texture.getTextureIndex(gl), geometryBuffer); } catch (IllegalBufferException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } } @Override public void drawAt(GLDrawContext gl, float x, float y) { drawAt(gl, x, y, null); } @Override public void drawAt(GLDrawContext gl, float x, float y, Color color) { gl.glPushMatrix(); gl.glTranslatef(x, y, 0); draw(gl, color); gl.glPopMatrix(); } @Override public void drawAt(GLDrawContext gl, DrawBuffer buffer, float viewX, float viewY, int iColor) { try { buffer.addImage(texture.getTextureIndex(gl), viewX - offsetX, viewY - offsetY, viewX - offsetX + width, viewY - offsetY + height, umin, vmin, umax, vmax, isTorso ? iColor : 0xffffffff); if (torso != null) { torso.drawAt(gl, buffer, viewX, viewY, iColor); } } catch (IllegalBufferException e) { handleIllegalBufferException(e); } } @Override public void drawOnlyImageAt(GLDrawContext gl, DrawBuffer buffer, float viewX, float viewY, int iColor) {} @Override public void drawOnlyShadowAt(GLDrawContext gl, DrawBuffer buffer, float viewX, float viewY, int iColor) {} private static float[] createGeometry(int offsetX, int offsetY, int width, int height, float umin, float vmin, float umax, float vmax) { return new float[] { // top left -offsetX + IMAGE_DRAW_OFFSET, -offsetY + height + IMAGE_DRAW_OFFSET, 0, umin, vmin, // bottom left -offsetX + IMAGE_DRAW_OFFSET, -offsetY + IMAGE_DRAW_OFFSET, 0, umin, vmax, // bottom right -offsetX + width + IMAGE_DRAW_OFFSET, -offsetY + IMAGE_DRAW_OFFSET, 0, umax, vmax, // top right -offsetX + width + IMAGE_DRAW_OFFSET, -offsetY + height + IMAGE_DRAW_OFFSET, 0, umax, vmin, // top left -offsetX + IMAGE_DRAW_OFFSET, -offsetY + height + IMAGE_DRAW_OFFSET, 0, umin, vmin, // bottom right -offsetX + width + IMAGE_DRAW_OFFSET, -offsetY + IMAGE_DRAW_OFFSET, 0, umax, vmax, }; } @Override public void drawImageAtRect(GLDrawContext gl, float minX, float minY, float maxX, float maxY) { System.arraycopy(geometry, 0, tempBuffer, 0, 4 * 5); tempBuffer[0] = minX + IMAGE_DRAW_OFFSET; tempBuffer[1] = maxY + IMAGE_DRAW_OFFSET; tempBuffer[5] = minX + IMAGE_DRAW_OFFSET; tempBuffer[6] = minY + IMAGE_DRAW_OFFSET; tempBuffer[10] = maxX + IMAGE_DRAW_OFFSET; tempBuffer[11] = minY + IMAGE_DRAW_OFFSET; tempBuffer[15] = maxX + IMAGE_DRAW_OFFSET; tempBuffer[16] = maxY + IMAGE_DRAW_OFFSET; tempBuffer[20] = minX + IMAGE_DRAW_OFFSET; tempBuffer[21] = maxY + IMAGE_DRAW_OFFSET; tempBuffer[25] = maxX + IMAGE_DRAW_OFFSET; tempBuffer[26] = minY + IMAGE_DRAW_OFFSET; draw(gl, tempBuffer); if (torso != null) { torso.drawImageAtRect(gl, minX, minY, maxX, maxY); } } public void setTorso(ImageIndexImage torso) { this.torso = torso; } }
/* * Copyright 2011 Goldman Sachs. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gs.collections.impl.set.sorted.immutable; import java.util.Comparator; import java.util.Iterator; import java.util.Set; import java.util.SortedSet; import com.gs.collections.api.LazyIterable; import com.gs.collections.api.block.function.Function; import com.gs.collections.api.block.predicate.Predicate; import com.gs.collections.api.collection.MutableCollection; import com.gs.collections.api.list.ImmutableList; import com.gs.collections.api.list.MutableList; import com.gs.collections.api.multimap.MutableMultimap; import com.gs.collections.api.multimap.sortedset.ImmutableSortedSetMultimap; import com.gs.collections.api.partition.set.sorted.PartitionImmutableSortedSet; import com.gs.collections.api.set.SetIterable; import com.gs.collections.api.set.sorted.ImmutableSortedSet; import com.gs.collections.api.set.sorted.SortedSetIterable; import com.gs.collections.api.tuple.Pair; import com.gs.collections.impl.block.factory.Comparators; import com.gs.collections.impl.block.factory.Functions; import com.gs.collections.impl.block.procedure.CollectIfProcedure; import com.gs.collections.impl.block.procedure.CollectProcedure; import com.gs.collections.impl.block.procedure.FlatCollectProcedure; import com.gs.collections.impl.block.procedure.MultimapEachPutProcedure; import com.gs.collections.impl.block.procedure.MultimapPutProcedure; import com.gs.collections.impl.block.procedure.RejectProcedure; import com.gs.collections.impl.block.procedure.SelectProcedure; import com.gs.collections.impl.collection.immutable.AbstractImmutableCollection; import com.gs.collections.impl.factory.Lists; import com.gs.collections.impl.multimap.set.sorted.TreeSortedSetMultimap; import com.gs.collections.impl.partition.set.sorted.PartitionTreeSortedSet; import com.gs.collections.impl.set.sorted.mutable.TreeSortedSet; import com.gs.collections.impl.utility.Iterate; import com.gs.collections.impl.utility.internal.SetIterables; import com.gs.collections.impl.utility.internal.SortedSetIterables; import net.jcip.annotations.Immutable; /** * This class is the parent class for all ImmutableSortedSets. All implementations of ImmutableSortedSet must implement the SortedSet * interface so an TreeSet.equals(anImmutablesortedSet) can return true when the contents are the same. */ @Immutable abstract class AbstractImmutableSortedSet<T> extends AbstractImmutableCollection<T> implements ImmutableSortedSet<T>, SortedSet<T> { public SortedSet<T> castToSortedSet() { return this; } public ImmutableSortedSet<T> newWith(T element) { if (!this.contains(element)) { return TreeSortedSet.newSet(this).with(element).toImmutable(); } return this; } public ImmutableSortedSet<T> newWithout(T element) { if (this.contains(element)) { TreeSortedSet<T> result = TreeSortedSet.newSet(this); result.remove(element); return result.toImmutable(); } return this; } public ImmutableSortedSet<T> newWithAll(Iterable<? extends T> elements) { TreeSortedSet<T> result = TreeSortedSet.newSet(this); result.addAllIterable(elements); return result.toImmutable(); } public ImmutableSortedSet<T> newWithoutAll(Iterable<? extends T> elements) { TreeSortedSet<T> result = TreeSortedSet.newSet(this); this.removeAllFrom(elements, result); return result.toImmutable(); } public T getFirst() { return this.first(); } public T getLast() { return this.last(); } public abstract Iterator<T> iterator(); @Override protected MutableCollection<T> newMutable(int size) { return TreeSortedSet.newSet(this.comparator()); } public ImmutableSortedSet<T> select(Predicate<? super T> predicate) { TreeSortedSet<T> result = TreeSortedSet.newSet(this.comparator()); this.forEach(new SelectProcedure<T>(predicate, result)); return result.toImmutable(); } public ImmutableSortedSet<T> reject(Predicate<? super T> predicate) { TreeSortedSet<T> result = TreeSortedSet.newSet(this.comparator()); this.forEach(new RejectProcedure<T>(predicate, result)); return result.toImmutable(); } public PartitionImmutableSortedSet<T> partition(Predicate<? super T> predicate) { return PartitionTreeSortedSet.of(this, predicate).toImmutable(); } public <V> ImmutableList<V> collect(Function<? super T, ? extends V> function) { MutableList<V> result = Lists.mutable.of(); this.forEach(new CollectProcedure<T, V>(function, result)); return result.toImmutable(); } public <V> ImmutableList<V> collectIf(Predicate<? super T> predicate, Function<? super T, ? extends V> function) { MutableList<V> result = Lists.mutable.of(); this.forEach(new CollectIfProcedure<T, V>(result, function, predicate)); return result.toImmutable(); } public <V> ImmutableList<V> flatCollect(Function<? super T, ? extends Iterable<V>> function) { MutableList<V> result = Lists.mutable.of(); this.forEach(new FlatCollectProcedure<T, V>(function, result)); return result.toImmutable(); } public <V> ImmutableSortedSetMultimap<V, T> groupBy(Function<? super T, ? extends V> function) { return this.groupBy(function, TreeSortedSetMultimap.<V, T>newMultimap(this.comparator())).toImmutable(); } public <V, R extends MutableMultimap<V, T>> R groupBy(Function<? super T, ? extends V> function, R target) { this.forEach(MultimapPutProcedure.on(target, function)); return target; } public <V> ImmutableSortedSetMultimap<V, T> groupByEach(Function<? super T, ? extends Iterable<V>> function) { return this.groupByEach(function, TreeSortedSetMultimap.<V, T>newMultimap(this.comparator())).toImmutable(); } public <V, R extends MutableMultimap<V, T>> R groupByEach(Function<? super T, ? extends Iterable<V>> function, R target) { this.forEach(MultimapEachPutProcedure.on(target, function)); return target; } public <S> ImmutableSortedSet<Pair<T, S>> zip(Iterable<S> that) { Comparator<? super T> comparator = this.comparator(); if (comparator == null) { TreeSortedSet<Pair<T, S>> pairs = TreeSortedSet.newSet(Comparators.<Pair<T, S>, T>byFunction(Functions.<T>firstOfPair(), Comparators.<T>naturalOrder())); return Iterate.zip(this, that, pairs).toImmutable(); } return Iterate.zip(this, that, TreeSortedSet.<Pair<T, S>>newSet(Comparators.<T>byFirstOfPair(comparator))).toImmutable(); } public ImmutableSortedSet<Pair<T, Integer>> zipWithIndex() { Comparator<? super T> comparator = this.comparator(); if (comparator == null) { TreeSortedSet<Pair<T, Integer>> pairs = TreeSortedSet.newSet(Comparators.<Pair<T, Integer>, T>byFunction(Functions.<T>firstOfPair(), Comparators.<T>naturalOrder())); return Iterate.zipWithIndex(this, pairs).toImmutable(); } return Iterate.zipWithIndex(this, TreeSortedSet.<Pair<T, Integer>>newSet(Comparators.<T>byFirstOfPair(comparator))).toImmutable(); } public ImmutableSortedSet<T> union(SetIterable<? extends T> set) { return SetIterables.unionInto(this, set, TreeSortedSet.<T>newSet(this.comparator())).toImmutable(); } public <R extends Set<T>> R unionInto(SetIterable<? extends T> set, R targetSet) { return SetIterables.unionInto(this, set, targetSet); } public ImmutableSortedSet<T> intersect(SetIterable<? extends T> set) { return SetIterables.intersectInto(this, set, TreeSortedSet.<T>newSet(this.comparator())).toImmutable(); } public <R extends Set<T>> R intersectInto(SetIterable<? extends T> set, R targetSet) { return SetIterables.intersectInto(this, set, targetSet); } public ImmutableSortedSet<T> difference(SetIterable<? extends T> subtrahendSet) { return SetIterables.differenceInto(this, subtrahendSet, TreeSortedSet.<T>newSet(this.comparator())).toImmutable(); } public <R extends Set<T>> R differenceInto(SetIterable<? extends T> subtrahendSet, R targetSet) { return SetIterables.differenceInto(this, subtrahendSet, targetSet); } public ImmutableSortedSet<T> symmetricDifference(SetIterable<? extends T> setB) { return SetIterables.symmetricDifferenceInto(this, setB, TreeSortedSet.<T>newSet(this.comparator())).toImmutable(); } public <R extends Set<T>> R symmetricDifferenceInto(SetIterable<? extends T> set, R targetSet) { return SetIterables.symmetricDifferenceInto(this, set, targetSet); } public boolean isSubsetOf(SetIterable<? extends T> candidateSuperset) { return SetIterables.isSubsetOf(this, candidateSuperset); } public boolean isProperSubsetOf(SetIterable<? extends T> candidateSuperset) { return SetIterables.isProperSubsetOf(this, candidateSuperset); } public ImmutableSortedSet<SortedSetIterable<T>> powerSet() { return (ImmutableSortedSet<SortedSetIterable<T>>) (ImmutableSortedSet<?>) SortedSetIterables.immutablePowerSet(this); } public <B> LazyIterable<Pair<T, B>> cartesianProduct(SetIterable<B> set) { return SetIterables.cartesianProduct(this, set); } public SortedSet<T> subSet(T fromElement, T toElement) { throw new UnsupportedOperationException(); } public SortedSet<T> headSet(T toElement) { throw new UnsupportedOperationException(); } public SortedSet<T> tailSet(T fromElement) { throw new UnsupportedOperationException(); } }
/* * Copyright 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.publish.ivy.internal.publication; import org.gradle.api.Action; import org.gradle.api.InvalidUserDataException; import org.gradle.api.artifacts.ModuleDependency; import org.gradle.api.artifacts.ModuleVersionIdentifier; import org.gradle.api.artifacts.ProjectDependency; import org.gradle.api.artifacts.PublishArtifact; import org.gradle.api.artifacts.DependencyArtifact; import org.gradle.api.component.SoftwareComponent; import org.gradle.api.file.FileCollection; import org.gradle.api.internal.artifacts.DefaultModuleVersionIdentifier; import org.gradle.api.internal.component.SoftwareComponentInternal; import org.gradle.api.internal.component.Usage; import org.gradle.api.internal.file.FileCollectionFactory; import org.gradle.api.internal.file.UnionFileCollection; import org.gradle.internal.typeconversion.NotationParser; import org.gradle.api.publish.internal.ProjectDependencyPublicationResolver; import org.gradle.api.publish.ivy.IvyArtifact; import org.gradle.api.publish.ivy.IvyConfigurationContainer; import org.gradle.api.publish.ivy.IvyModuleDescriptorSpec; import org.gradle.api.publish.ivy.internal.artifact.DefaultIvyArtifactSet; import org.gradle.api.publish.ivy.internal.dependency.DefaultIvyDependency; import org.gradle.api.publish.ivy.internal.dependency.DefaultIvyDependencySet; import org.gradle.api.publish.ivy.internal.dependency.IvyDependencyInternal; import org.gradle.api.publish.ivy.internal.publisher.IvyNormalizedPublication; import org.gradle.api.publish.ivy.internal.publisher.IvyPublicationIdentity; import org.gradle.internal.reflect.Instantiator; import java.io.File; import java.util.Collections; import java.util.Set; public class DefaultIvyPublication implements IvyPublicationInternal { private final String name; private final IvyModuleDescriptorSpecInternal descriptor; private final IvyPublicationIdentity publicationIdentity; private final IvyConfigurationContainer configurations; private final DefaultIvyArtifactSet ivyArtifacts; private final DefaultIvyDependencySet ivyDependencies; private final ProjectDependencyPublicationResolver projectDependencyResolver; private FileCollection descriptorFile; private SoftwareComponentInternal component; public DefaultIvyPublication( String name, Instantiator instantiator, IvyPublicationIdentity publicationIdentity, NotationParser<Object, IvyArtifact> ivyArtifactNotationParser, ProjectDependencyPublicationResolver projectDependencyResolver, FileCollectionFactory fileCollectionFactory ) { this.name = name; this.publicationIdentity = publicationIdentity; this.projectDependencyResolver = projectDependencyResolver; configurations = instantiator.newInstance(DefaultIvyConfigurationContainer.class, instantiator); ivyArtifacts = instantiator.newInstance(DefaultIvyArtifactSet.class, name, ivyArtifactNotationParser, fileCollectionFactory); ivyDependencies = instantiator.newInstance(DefaultIvyDependencySet.class); descriptor = instantiator.newInstance(DefaultIvyModuleDescriptorSpec.class, this); } public String getName() { return name; } public IvyModuleDescriptorSpecInternal getDescriptor() { return descriptor; } public void setDescriptorFile(FileCollection descriptorFile) { this.descriptorFile = descriptorFile; } public void descriptor(Action<? super IvyModuleDescriptorSpec> configure) { configure.execute(descriptor); } public void from(SoftwareComponent component) { if (this.component != null) { throw new InvalidUserDataException(String.format("Ivy publication '%s' cannot include multiple components", name)); } this.component = (SoftwareComponentInternal) component; configurations.maybeCreate("default"); for (Usage usage : this.component.getUsages()) { String conf = usage.getName(); configurations.maybeCreate(conf); configurations.getByName("default").extend(conf); for (PublishArtifact publishArtifact : usage.getArtifacts()) { artifact(publishArtifact).setConf(conf); } for (ModuleDependency dependency : usage.getDependencies()) { // TODO: When we support multiple components or configurable dependencies, we'll need to merge the confs of multiple dependencies with same id. String confMapping = String.format("%s->%s", conf, dependency.getConfiguration()); if (dependency instanceof ProjectDependency) { addProjectDependency((ProjectDependency) dependency, confMapping); } else { addModuleDependency(dependency, confMapping); } } } } private void addProjectDependency(ProjectDependency dependency, String confMapping) { ModuleVersionIdentifier identifier = projectDependencyResolver.resolve(dependency); ivyDependencies.add(new DefaultIvyDependency( identifier.getGroup(), identifier.getName(), identifier.getVersion(), confMapping, Collections.<DependencyArtifact>emptyList(), dependency.getExcludeRules())); } private void addModuleDependency(ModuleDependency dependency, String confMapping) { ivyDependencies.add(new DefaultIvyDependency(dependency.getGroup(), dependency.getName(), dependency.getVersion(), confMapping, dependency.getArtifacts(), dependency.getExcludeRules())); } public void configurations(Action<? super IvyConfigurationContainer> config) { config.execute(configurations); } public IvyConfigurationContainer getConfigurations() { return configurations; } public IvyArtifact artifact(Object source) { return ivyArtifacts.artifact(source); } public IvyArtifact artifact(Object source, Action<? super IvyArtifact> config) { return ivyArtifacts.artifact(source, config); } public void setArtifacts(Iterable<?> sources) { ivyArtifacts.clear(); for (Object source : sources) { artifact(source); } } public DefaultIvyArtifactSet getArtifacts() { return ivyArtifacts; } public String getOrganisation() { return publicationIdentity.getOrganisation(); } public void setOrganisation(String organisation) { publicationIdentity.setOrganisation(organisation); } public String getModule() { return publicationIdentity.getModule(); } public void setModule(String module) { publicationIdentity.setModule(module); } public String getRevision() { return publicationIdentity.getRevision(); } public void setRevision(String revision) { publicationIdentity.setRevision(revision); } public FileCollection getPublishableFiles() { return new UnionFileCollection(ivyArtifacts.getFiles(), descriptorFile); } public IvyPublicationIdentity getIdentity() { return publicationIdentity; } public Set<IvyDependencyInternal> getDependencies() { return ivyDependencies; } public IvyNormalizedPublication asNormalisedPublication() { return new IvyNormalizedPublication(name, getIdentity(), getDescriptorFile(), ivyArtifacts); } private File getDescriptorFile() { if (descriptorFile == null) { throw new IllegalStateException("descriptorFile not set for publication"); } return descriptorFile.getSingleFile(); } public ModuleVersionIdentifier getCoordinates() { return new DefaultModuleVersionIdentifier(getOrganisation(), getModule(), getRevision()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.tests.java.lang; import java.io.UnsupportedEncodingException; import java.util.Locale; public class String2Test extends junit.framework.TestCase { String hw1 = "HelloWorld"; String hw2 = "HelloWorld"; String hwlc = "helloworld"; String hwuc = "HELLOWORLD"; String hello1 = "Hello"; String world1 = "World"; String comp11 = "Test String"; Object obj = new Object(); char[] buf = { 'W', 'o', 'r', 'l', 'd' }; char[] rbuf = new char[5]; /** * java.lang.String#String() */ public void test_Constructor() { // Test for method java.lang.String() assertTrue("Created incorrect string", new String().equals("")); } /** * java.lang.String#String(byte[]) */ public void test_Constructor$B() { // Test for method java.lang.String(byte []) assertTrue("Failed to create string", new String(hw1.getBytes()) .equals(hw1)); } /** * java.lang.String#String(byte[], int) */ @SuppressWarnings("deprecation") public void test_Constructor$BI() { // Test for method java.lang.String(byte [], int) String s = new String(new byte[] { 65, 66, 67, 68, 69 }, 0); assertTrue("Incorrect string returned: " + s, s.equals("ABCDE")); s = new String(new byte[] { 65, 66, 67, 68, 69 }, 1); assertTrue("Did not use nonzero hibyte", !s.equals("ABCDE")); } /** * java.lang.String#String(byte[], int, int) */ public void test_Constructor$BII() { // Test for method java.lang.String(byte [], int, int) assertTrue("Failed to create string", new String(hw1.getBytes(), 0, hw1 .getBytes().length).equals(hw1)); boolean exception = false; try { new String(new byte[0], 0, Integer.MAX_VALUE); } catch (IndexOutOfBoundsException e) { exception = true; } assertTrue("Did not throw exception", exception); } /** * java.lang.String#String(byte[], int, int, int) */ @SuppressWarnings("deprecation") public void test_Constructor$BIII() { // Test for method java.lang.String(byte [], int, int, int) String s = new String(new byte[] { 65, 66, 67, 68, 69 }, 0, 1, 3); assertTrue("Incorrect string returned: " + s, s.equals("BCD")); s = new String(new byte[] { 65, 66, 67, 68, 69 }, 1, 0, 5); assertTrue("Did not use nonzero hibyte", !s.equals("ABCDE")); } /** * java.lang.String#String(byte[], int, int, java.lang.String) */ public void test_Constructor$BIILjava_lang_String() throws Exception { // Test for method java.lang.String(byte [], int, int, java.lang.String) String s = null; s = new String(new byte[] { 65, 66, 67, 68, 69 }, 0, 5, "8859_1"); assertTrue("Incorrect string returned: " + s, s.equals("ABCDE")); // Regression for HARMONY-1111 assertNotNull(new String(new byte[] { (byte) 0xC0 }, 0, 1, "UTF-8")); } /** * java.lang.String#String(byte[], java.lang.String) */ public void test_Constructor$BLjava_lang_String() throws Exception { // Test for method java.lang.String(byte [], java.lang.String) String s = null; s = new String(new byte[] { 65, 66, 67, 68, 69 }, "8859_1"); assertTrue("Incorrect string returned: " + s, s.equals("ABCDE")); } /** * java.lang.String#String(char[]) */ public void test_Constructor$C() { // Test for method java.lang.String(char []) assertEquals("Failed Constructor test", "World", new String(buf)); } /** * java.lang.String#String(char[], int, int) */ public void test_Constructor$CII() { // Test for method java.lang.String(char [], int, int) char[] buf = { 'H', 'e', 'l', 'l', 'o', 'W', 'o', 'r', 'l', 'd' }; String s = new String(buf, 0, buf.length); assertTrue("Incorrect string created", hw1.equals(s)); boolean exception = false; try { new String(new char[0], 0, Integer.MAX_VALUE); } catch (IndexOutOfBoundsException e) { exception = true; } assertTrue("Did not throw exception", exception); } /** * java.lang.String#String(int[], int, int) */ public void test_Constructor$III() { // Test for method java.lang.String(int [], int, int) try { new String(new int[0], 2, Integer.MAX_VALUE); fail("Did not throw exception"); } catch (IndexOutOfBoundsException e) { // expected } } /** * java.lang.String#String(java.lang.String) */ public void test_ConstructorLjava_lang_String() { // Test for method java.lang.String(java.lang.String) String s = new String("Hello World"); assertEquals("Failed to construct correct string", "Hello World", s); } /** * java.lang.String#String(java.lang.StringBuffer) */ public void test_ConstructorLjava_lang_StringBuffer() { // Test for method java.lang.String(java.lang.StringBuffer) StringBuffer sb = new StringBuffer(); sb.append("HelloWorld"); assertEquals("Created incorrect string", "HelloWorld", new String(sb)); } /** * java.lang.String#charAt(int) */ public void test_charAtI() { // Test for method char java.lang.String.charAt(int) assertTrue("Incorrect character returned", hw1.charAt(5) == 'W' && (hw1.charAt(1) != 'Z')); } /** * java.lang.String#compareTo(java.lang.String) */ public void test_compareToLjava_lang_String() { // Test for method int java.lang.String.compareTo(java.lang.String) assertTrue("Returned incorrect value for first < second", "aaaaab" .compareTo("aaaaac") < 0); assertEquals("Returned incorrect value for first = second", 0, "aaaaac" .compareTo("aaaaac")); assertTrue("Returned incorrect value for first > second", "aaaaac" .compareTo("aaaaab") > 0); assertTrue("Considered case to not be of importance", !("A" .compareTo("a") == 0)); try { "fixture".compareTo(null); fail("No NPE"); } catch (NullPointerException e) { } } /** * java.lang.String#compareToIgnoreCase(java.lang.String) */ public void test_compareToIgnoreCaseLjava_lang_String() { // Test for method int // java.lang.String.compareToIgnoreCase(java.lang.String) assertTrue("Returned incorrect value for first < second", "aaaaab" .compareToIgnoreCase("aaaaac") < 0); assertEquals("Returned incorrect value for first = second", 0, "aaaaac" .compareToIgnoreCase("aaaaac")); assertTrue("Returned incorrect value for first > second", "aaaaac" .compareToIgnoreCase("aaaaab") > 0); assertEquals("Considered case to not be of importance", 0, "A" .compareToIgnoreCase("a")); assertTrue("0xbf should not compare = to 'ss'", "\u00df" .compareToIgnoreCase("ss") != 0); assertEquals("0x130 should compare = to 'i'", 0, "\u0130" .compareToIgnoreCase("i")); assertEquals("0x131 should compare = to 'i'", 0, "\u0131" .compareToIgnoreCase("i")); Locale defLocale = Locale.getDefault(); try { Locale.setDefault(new Locale("tr", "")); assertEquals("Locale tr: 0x130 should compare = to 'i'", 0, "\u0130".compareToIgnoreCase("i")); assertEquals("Locale tr: 0x131 should compare = to 'i'", 0, "\u0131".compareToIgnoreCase("i")); } finally { Locale.setDefault(defLocale); } try { "fixture".compareToIgnoreCase(null); fail("No NPE"); } catch (NullPointerException e) { } } /** * java.lang.String#concat(java.lang.String) */ public void test_concatLjava_lang_String() { // Test for method java.lang.String // java.lang.String.concat(java.lang.String) assertTrue("Concatenation failed to produce correct string", hello1 .concat(world1).equals(hw1)); boolean exception = false; try { String a = new String("test"); String b = null; a.concat(b); } catch (NullPointerException e) { exception = true; } assertTrue("Concatenation failed to throw NP exception (1)", exception); exception = false; try { String a = new String(""); String b = null; a.concat(b); } catch (NullPointerException e) { exception = true; } assertTrue("Concatenation failed to throw NP exception (2)", exception); String s1 = ""; String s2 = "s2"; String s3 = s1.concat(s2); assertEquals(s2, s3); // The RI returns a new string even when it's the same as the argument string. // assertNotSame(s2, s3); s3 = s2.concat(s1); assertEquals(s2, s3); // Neither Android nor the RI returns a new string when it's the same as *this*. // assertNotSame(s2, s3); s3 = s2.concat(s1); assertSame(s2, s3); } /** * java.lang.String#copyValueOf(char[]) */ public void test_copyValueOf$C() { // Test for method java.lang.String java.lang.String.copyValueOf(char // []) char[] t = { 'H', 'e', 'l', 'l', 'o', 'W', 'o', 'r', 'l', 'd' }; assertEquals("copyValueOf returned incorrect String", "HelloWorld", String.copyValueOf(t)); } /** * java.lang.String#copyValueOf(char[], int, int) */ public void test_copyValueOf$CII() { // Test for method java.lang.String java.lang.String.copyValueOf(char // [], int, int) char[] t = { 'H', 'e', 'l', 'l', 'o', 'W', 'o', 'r', 'l', 'd' }; assertEquals("copyValueOf returned incorrect String", "World", String .copyValueOf(t, 5, 5)); } /** * java.lang.String#endsWith(java.lang.String) */ public void test_endsWithLjava_lang_String() { // Test for method boolean java.lang.String.endsWith(java.lang.String) assertTrue("Failed to fine ending string", hw1.endsWith("ld")); } /** * java.lang.String#equals(java.lang.Object) */ public void test_equalsLjava_lang_Object() { assertEquals("String not equal", hw1, hw2); assertEquals("Empty string equals check", "", ""); assertEquals("Null string equals check", (String) null, (String) null); assertFalse("Unequal strings reports as equal", hw1.equals(comp11)); assertFalse("Null string comparison failed", hw1.equals((String) null)); } /** * java.lang.String#equalsIgnoreCase(java.lang.String) */ public void test_equalsIgnoreCaseLjava_lang_String() { // Test for method boolean // java.lang.String.equalsIgnoreCase(java.lang.String) assertTrue("lc version returned unequal to uc", hwlc .equalsIgnoreCase(hwuc)); } /** * java.lang.String#getBytes() */ public void test_getBytes() { // Test for method byte [] java.lang.String.getBytes() byte[] sbytes = hw1.getBytes(); for (int i = 0; i < hw1.length(); i++) { assertTrue("Returned incorrect bytes", sbytes[i] == (byte) hw1.charAt(i)); } char[] chars = new char[1]; for (int i = 0; i < 65536; i++) { // skip surrogates if (i == 0xd800) i = 0xe000; byte[] result = null; chars[0] = (char) i; String string = new String(chars); try { result = string.getBytes("8859_1"); if (i < 256) { assertEquals((byte) i, result[0]); } else { /* * Substitute character should be 0x1A [1], but may be '?' * character. [1] * http://en.wikipedia.org/wiki/Substitute_character */ assertTrue(result[0] == '?' || result[0] == 0x1a); } } catch (java.io.UnsupportedEncodingException e) { } try { result = string.getBytes("UTF8"); int length = i < 0x80 ? 1 : (i < 0x800 ? 2 : 3); assertTrue("Wrong length UTF8: " + Integer.toHexString(i), result.length == length); assertTrue( "Wrong bytes UTF8: " + Integer.toHexString(i), (i < 0x80 && result[0] == i) || (i >= 0x80 && i < 0x800 && result[0] == (byte) (0xc0 | ((i & 0x7c0) >> 6)) && result[1] == (byte) (0x80 | (i & 0x3f))) || (i >= 0x800 && result[0] == (byte) (0xe0 | (i >> 12)) && result[1] == (byte) (0x80 | ((i & 0xfc0) >> 6)) && result[2] == (byte) (0x80 | (i & 0x3f)))); } catch (java.io.UnsupportedEncodingException e) { } String bytes = null; try { bytes = new String(result, "UTF8"); assertTrue("Wrong UTF8 byte length: " + bytes.length() + "(" + i + ")", bytes.length() == 1); assertTrue( "Wrong char UTF8: " + Integer.toHexString(bytes.charAt(0)) + " (" + i + ")", bytes.charAt(0) == i); } catch (java.io.UnsupportedEncodingException e) { } } byte[] bytes = new byte[1]; for (int i = 0; i < 256; i++) { bytes[0] = (byte) i; String result = null; try { result = new String(bytes, "8859_1"); assertEquals("Wrong char length", 1, result.length()); assertTrue("Wrong char value", result.charAt(0) == (char) i); } catch (java.io.UnsupportedEncodingException e) { } } } /** * java.lang.String#getBytes(int, int, byte[], int) */ @SuppressWarnings("deprecation") public void test_getBytesII$BI() { // Test for method void java.lang.String.getBytes(int, int, byte [], // int) byte[] buf = new byte[5]; "Hello World".getBytes(6, 11, buf, 0); assertEquals("Returned incorrect bytes", "World", new String(buf)); try { "Hello World".getBytes(-1, 1, null, 0); fail("Expected StringIndexOutOfBoundsException"); } catch (StringIndexOutOfBoundsException e) { } catch (NullPointerException e) { fail("Threw wrong exception"); } } /** * java.lang.String#getBytes(java.lang.String) */ public void test_getBytesLjava_lang_String() throws Exception { // Test for method byte [] java.lang.String.getBytes(java.lang.String) byte[] buf = "Hello World".getBytes(); assertEquals("Returned incorrect bytes", "Hello World", new String(buf)); try { "string".getBytes("8849_1"); fail("No UnsupportedEncodingException"); } catch (UnsupportedEncodingException e) { } byte[] bytes = "\u3048".getBytes("UTF-8"); byte[] expected = new byte[] { (byte) 0xE3, (byte) 0x81, (byte) 0x88 }; assertEquals(expected[0], bytes[0]); assertEquals(expected[1], bytes[1]); assertEquals(expected[2], bytes[2]); // Regression for HARMONY-663 try { "string".getBytes("?Q?D??_??_6ffa?+vG?_??\u951f\ufffd??"); fail("No UnsupportedEncodingException"); } catch (UnsupportedEncodingException e) { // expected } bytes = "-".getBytes("UTF-16"); expected = new byte[] { (byte) 0xff, (byte) 0xfe }; assertEquals(expected[0], bytes[0]); assertEquals(expected[1], bytes[1]); byte[] bytes2 = "-".getBytes("UTF-16LE"); assertEquals(bytes2[0], bytes[2]); assertEquals(bytes2[1], bytes[3]); } /* * java.lang.String#getBytes() */ public void test_getBytes_NPE() throws Exception { try { "abc".getBytes((String) null); fail("Should throw NullPointerException"); } catch (UnsupportedEncodingException whatTheRiDocumentsAndWeThrow) { } catch (NullPointerException whatTheRiActuallyThrows) { } try { "Hello World".getBytes(1, 2, null, 1); fail("Should throw NullPointerException"); } catch (NullPointerException e) { // Expected } } /** * java.lang.String#getChars(int, int, char[], int) */ public void test_getCharsII$CI() { // Test for method void java.lang.String.getChars(int, int, char [], // int) hw1.getChars(5, hw1.length(), rbuf, 0); for (int i = 0; i < rbuf.length; i++) assertTrue("getChars returned incorrect char(s)", rbuf[i] == buf[i]); } /** * java.lang.String#hashCode() */ public void test_hashCode() { // Test for method int java.lang.String.hashCode() int hwHashCode = 0; final int hwLength = hw1.length(); int powerOfThirtyOne = 1; for (int counter = hwLength - 1; counter >= 0; counter--) { hwHashCode += hw1.charAt(counter) * powerOfThirtyOne; powerOfThirtyOne *= 31; } assertEquals("String did not hash to correct value", hwHashCode, hw1.hashCode()); assertEquals("The empty string \"\" did not hash to zero", 0, "".hashCode()); assertEquals("Calculated wrong string hashcode", -1933545242, "Harmony".hashCode()); } /** * java.lang.String#indexOf(int) */ public void test_indexOfI() { // Test for method int java.lang.String.indexOf(int) assertEquals("Invalid index returned", 1, hw1.indexOf('e')); assertEquals("Invalid index returned", 1, "a\ud800\udc00".indexOf(0x10000)); } /** * java.lang.String#indexOf(int, int) */ public void test_indexOfII() { // Test for method int java.lang.String.indexOf(int, int) assertEquals("Invalid character index returned", 5, hw1.indexOf('W', 2)); assertEquals("Invalid index returned", 2, "ab\ud800\udc00".indexOf(0x10000, 1)); } /** * java.lang.String#indexOf(java.lang.String) */ public void test_indexOfLjava_lang_String() { // Test for method int java.lang.String.indexOf(java.lang.String) assertTrue("Failed to find string", hw1.indexOf("World") > 0); assertTrue("Failed to find string", !(hw1.indexOf("ZZ") > 0)); } /** * java.lang.String#indexOf(java.lang.String, int) */ public void test_indexOfLjava_lang_StringI() { // Test for method int java.lang.String.indexOf(java.lang.String, int) assertTrue("Failed to find string", hw1.indexOf("World", 0) > 0); assertTrue("Found string outside index", !(hw1.indexOf("Hello", 6) > 0)); assertEquals("Did not accept valid negative starting position", 0, hello1.indexOf("", -5)); assertEquals("Reported wrong error code", 5, hello1.indexOf("", 5)); assertEquals("Wrong for empty in empty", 0, "".indexOf("", 0)); } /** * java.lang.String#intern() */ public void test_intern() { // Test for method java.lang.String java.lang.String.intern() assertTrue("Intern returned incorrect result", hw1.intern() == hw2 .intern()); } /** * java.lang.String#lastIndexOf(int) */ public void test_lastIndexOfI() { // Test for method int java.lang.String.lastIndexOf(int) assertEquals("Failed to return correct index", 5, hw1.lastIndexOf('W')); assertEquals("Returned index for non-existent char", -1, hw1 .lastIndexOf('Z')); assertEquals("Failed to return correct index", 1, "a\ud800\udc00" .lastIndexOf(0x10000)); } /** * java.lang.String#lastIndexOf(int, int) */ public void test_lastIndexOfII() { // Test for method int java.lang.String.lastIndexOf(int, int) assertEquals("Failed to return correct index", 5, hw1.lastIndexOf('W', 6)); assertEquals("Returned index for char out of specified range", -1, hw1 .lastIndexOf('W', 4)); assertEquals("Returned index for non-existent char", -1, hw1 .lastIndexOf('Z', 9)); } /** * java.lang.String#lastIndexOf(java.lang.String) */ public void test_lastIndexOfLjava_lang_String() { // Test for method int java.lang.String.lastIndexOf(java.lang.String) assertEquals("Returned incorrect index", 5, hw1.lastIndexOf("World")); assertEquals("Found String outside of index", -1, hw1 .lastIndexOf("HeKKKKKKKK")); } /** * java.lang.String#lastIndexOf(java.lang.String, int) */ public void test_lastIndexOfLjava_lang_StringI() { // Test for method int java.lang.String.lastIndexOf(java.lang.String, // int) assertEquals("Returned incorrect index", 5, hw1.lastIndexOf("World", 9)); int result = hw1.lastIndexOf("Hello", 2); assertTrue("Found String outside of index: " + result, result == 0); assertEquals("Reported wrong error code", -1, hello1 .lastIndexOf("", -5)); assertEquals("Did not accept valid large starting position", 5, hello1 .lastIndexOf("", 5)); } /** * java.lang.String#length() */ public void test_length() { // Test for method int java.lang.String.length() assertEquals("Invalid length returned", 11, comp11.length()); } /** * java.lang.String#regionMatches(int, java.lang.String, int, int) */ public void test_regionMatchesILjava_lang_StringII() { // Test for method boolean java.lang.String.regionMatches(int, // java.lang.String, int, int) String bogusString = "xxcedkedkleiorem lvvwr e''' 3r3r 23r"; assertTrue("identical regions failed comparison", hw1.regionMatches(2, hw2, 2, 5)); assertTrue("Different regions returned true", !hw1.regionMatches(2, bogusString, 2, 5)); } /** * java.lang.String#regionMatches(boolean, int, java.lang.String, *int, int) */ public void test_regionMatchesZILjava_lang_StringII() { // Test for method boolean java.lang.String.regionMatches(boolean, int, // java.lang.String, int, int) String bogusString = "xxcedkedkleiorem lvvwr e''' 3r3r 23r"; assertTrue("identical regions failed comparison", hw1.regionMatches( false, 2, hw2, 2, 5)); assertTrue("identical regions failed comparison with different cases", hw1.regionMatches(true, 2, hw2, 2, 5)); assertTrue("Different regions returned true", !hw1.regionMatches(true, 2, bogusString, 2, 5)); assertTrue("identical regions failed comparison with different cases", hw1.regionMatches(false, 2, hw2, 2, 5)); } /** * java.lang.String#replace(char, char) */ public void test_replaceCC() { // Test for method java.lang.String java.lang.String.replace(char, char) assertEquals("Failed replace", "HezzoWorzd", hw1.replace('l', 'z')); } /** * java.lang.String#replace(CharSequence, CharSequence) */ public void test_replaceLjava_langCharSequenceLjava_langCharSequence() { assertEquals("Failed replace", "aaccdd", "aabbdd".replace( new StringBuffer("bb"), "cc")); assertEquals("Failed replace by bigger seq", "cccbccc", "aba".replace( "a", "ccc")); assertEquals("Failed replace by smaller seq", "$bba^", "$aaaaa^" .replace(new StringBuilder("aa"), "b")); assertEquals("Failed to replace empty string", "%%a%%b%%c%%", "abc".replace("", "%%")); assertEquals("Failed to replace with empty string", "aacc", "aabbcc".replace("b", "")); assertEquals("Failed to replace in empty string", "abc", "".replace("", "abc")); } /** * java.lang.String#startsWith(java.lang.String) */ public void test_startsWithLjava_lang_String() { // Test for method boolean java.lang.String.startsWith(java.lang.String) assertTrue("Failed to find string", hw1.startsWith("Hello")); assertTrue("Found incorrect string", !hw1.startsWith("T")); } /** * java.lang.String#startsWith(java.lang.String, int) */ public void test_startsWithLjava_lang_StringI() { // Test for method boolean java.lang.String.startsWith(java.lang.String, // int) assertTrue("Failed to find string", hw1.startsWith("World", 5)); assertTrue("Found incorrect string", !hw1.startsWith("Hello", 5)); } /** * java.lang.String#substring(int) */ public void test_substringI() { // Test for method java.lang.String java.lang.String.substring(int) assertEquals("Incorrect substring returned", "World", hw1.substring(5)); assertTrue("not identical", hw1.substring(0) == hw1); } /** * java.lang.String#substring(int, int) */ public void test_substringII() { // Test for method java.lang.String java.lang.String.substring(int, int) assertTrue("Incorrect substring returned", hw1.substring(0, 5).equals( "Hello") && (hw1.substring(5, 10).equals("World"))); assertTrue("not identical", hw1.substring(0, hw1.length()) == hw1); } /** * java.lang.String#substring(int, int) */ public void test_substringErrorMessage() { try { hw1.substring(-1, 1); } catch (StringIndexOutOfBoundsException ex) { String msg = ex.getMessage(); assertTrue("Expected message to contain -1: " + msg, msg .indexOf("-1") != -1); } try { hw1.substring(4, 1); } catch (StringIndexOutOfBoundsException ex) { String msg = ex.getMessage(); assertTrue("Expected message to contain -3: " + msg, msg .indexOf("-3") != -1); } try { hw1.substring(0, 100); } catch (StringIndexOutOfBoundsException ex) { String msg = ex.getMessage(); assertTrue("Expected message to contain 100: " + msg, msg .indexOf("100") != -1); } } /** * java.lang.String#toCharArray() */ public void test_toCharArray() { // Test for method char [] java.lang.String.toCharArray() String s = new String(buf, 0, buf.length); char[] schars = s.toCharArray(); for (int i = 0; i < s.length(); i++) assertTrue("Returned incorrect char aray", buf[i] == schars[i]); } /** * java.lang.String#toLowerCase() */ public void test_toLowerCase() { // Test for method java.lang.String java.lang.String.toLowerCase() assertTrue("toLowerCase case conversion did not succeed", hwuc .toLowerCase().equals(hwlc)); assertEquals( "a) Sigma has ordinary lower case value when isolated with Unicode 4.0", "\u03c3", "\u03a3".toLowerCase()); assertEquals( "b) Sigma has final form lower case value at end of word with Unicode 4.0", "a\u03c2", "a\u03a3".toLowerCase()); assertEquals("toLowerCase case conversion did not succeed", "\uD801\uDC44", "\uD801\uDC1C".toLowerCase()); } /** * java.lang.String#toLowerCase(java.util.Locale) */ public void test_toLowerCaseLjava_util_Locale() { // Test for method java.lang.String // java.lang.String.toLowerCase(java.util.Locale) assertTrue("toLowerCase case conversion did not succeed", hwuc .toLowerCase(java.util.Locale.getDefault()).equals(hwlc)); assertEquals("Invalid \\u0049 for English", "\u0069", "\u0049" .toLowerCase(Locale.ENGLISH)); assertEquals("Invalid \\u0049 for Turkish", "\u0131", "\u0049" .toLowerCase(new Locale("tr", ""))); } /** * java.lang.String#toString() */ public void test_toString() { // Test for method java.lang.String java.lang.String.toString() assertTrue("Incorrect string returned", hw1.toString().equals(hw1)); } /** * java.lang.String#toUpperCase() */ public void test_toUpperCase() { // Test for method java.lang.String java.lang.String.toUpperCase() assertTrue("Returned string is not UpperCase", hwlc.toUpperCase() .equals(hwuc)); assertEquals("Wrong conversion", "SS", "\u00df".toUpperCase()); String s = "a\u00df\u1f56"; assertTrue("Invalid conversion", !s.toUpperCase().equals(s)); assertEquals("toUpperCase case conversion did not succeed", "\uD801\uDC1C", "\uD801\uDC44".toUpperCase()); } /** * java.lang.String#toUpperCase(java.util.Locale) */ public void test_toUpperCaseLjava_util_Locale() { // Test for method java.lang.String // java.lang.String.toUpperCase(java.util.Locale) assertTrue("Returned string is not UpperCase", hwlc.toUpperCase() .equals(hwuc)); assertEquals("Invalid \\u0069 for English", "\u0049", "\u0069" .toUpperCase(Locale.ENGLISH)); assertEquals("Invalid \\u0069 for Turkish", "\u0130", "\u0069" .toUpperCase(new Locale("tr", ""))); } /** * java.lang.String#toUpperCase(java.util.Locale) */ public void test_toUpperCaseLjava_util_Locale_subtest0() { // Test for method java.lang.String // java.lang.String.toUpperCase(java.util.Locale) } /** * java.lang.String#trim() */ public void test_trim() { // Test for method java.lang.String java.lang.String.trim() assertTrue("Incorrect string returned", " HelloWorld ".trim().equals( hw1)); } /** * java.lang.String#valueOf(char[]) */ public void test_valueOf$C() { // Test for method java.lang.String java.lang.String.valueOf(char []) assertEquals("Returned incorrect String", "World", String.valueOf(buf)); } /** * java.lang.String#valueOf(char[], int, int) */ public void test_valueOf$CII() { // Test for method java.lang.String java.lang.String.valueOf(char [], // int, int) char[] t = { 'H', 'e', 'l', 'l', 'o', 'W', 'o', 'r', 'l', 'd' }; assertEquals("copyValueOf returned incorrect String", "World", String .valueOf(t, 5, 5)); } /** * java.lang.String#valueOf(char) */ public void test_valueOfC() { // Test for method java.lang.String java.lang.String.valueOf(char) for (int i = 0; i < 65536; i++) assertTrue("Incorrect valueOf(char) returned: " + i, String .valueOf((char) i).charAt(0) == (char) i); } /** * java.lang.String#valueOf(double) */ public void test_valueOfD() { // Test for method java.lang.String java.lang.String.valueOf(double) assertEquals("Incorrect double string returned", "1.7976931348623157E308", String.valueOf(Double.MAX_VALUE)); } /** * java.lang.String#valueOf(float) */ public void test_valueOfF() { // Test for method java.lang.String java.lang.String.valueOf(float) assertTrue("incorrect float string returned--got: " + String.valueOf(1.0F) + " wanted: 1.0", String.valueOf(1.0F) .equals("1.0")); assertTrue("incorrect float string returned--got: " + String.valueOf(0.9F) + " wanted: 0.9", String.valueOf(0.9F) .equals("0.9")); assertTrue("incorrect float string returned--got: " + String.valueOf(109.567F) + " wanted: 109.567", String .valueOf(109.567F).equals("109.567")); } /** * java.lang.String#valueOf(int) */ public void test_valueOfI() { // Test for method java.lang.String java.lang.String.valueOf(int) assertEquals("returned invalid int string", "1", String.valueOf(1)); } /** * java.lang.String#valueOf(long) */ public void test_valueOfJ() { // Test for method java.lang.String java.lang.String.valueOf(long) assertEquals("returned incorrect long string", "927654321098", String .valueOf(927654321098L)); } /** * java.lang.String#valueOf(java.lang.Object) */ public void test_valueOfLjava_lang_Object() { // Test for method java.lang.String // java.lang.String.valueOf(java.lang.Object) assertTrue("Incorrect Object string returned", obj.toString().equals( String.valueOf(obj))); } /** * java.lang.String#valueOf(boolean) */ public void test_valueOfZ() { // Test for method java.lang.String java.lang.String.valueOf(boolean) assertTrue("Incorrect boolean string returned", String.valueOf(false) .equals("false") && (String.valueOf(true).equals("true"))); } /** * java.lang.String#contentEquals(CharSequence cs) */ public void test_contentEqualsLjava_lang_CharSequence() { // Test for method java.lang.String // java.lang.String.contentEquals(CharSequence cs) assertFalse("Incorrect result of compare", "qwerty".contentEquals("")); } /** * java.lang.String#format(Locale, String, Object[]) */ @SuppressWarnings("boxing") public void test_format() { assertEquals("13% of sum is 0x11", String.format("%d%% of %s is 0x%x", 13, "sum", 17)); assertEquals("empty format", "", String.format("", 123, this)); try { String.format(null); fail("NPE is expected on null format"); } catch (NullPointerException ok) { } } }
/* * Copyright 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.transformer; import static com.google.android.exoplayer2.DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS; import static com.google.android.exoplayer2.DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_MS; import static com.google.android.exoplayer2.DefaultLoadControl.DEFAULT_MAX_BUFFER_MS; import static com.google.android.exoplayer2.DefaultLoadControl.DEFAULT_MIN_BUFFER_MS; import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Assertions.checkState; import static com.google.android.exoplayer2.util.Assertions.checkStateNotNull; import static java.lang.Math.min; import android.content.Context; import android.media.MediaFormat; import android.media.MediaMuxer; import android.os.Handler; import android.os.Looper; import android.os.ParcelFileDescriptor; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.DefaultLoadControl; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.Renderer; import com.google.android.exoplayer2.RenderersFactory; import com.google.android.exoplayer2.SimpleExoPlayer; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.analytics.AnalyticsListener; import com.google.android.exoplayer2.audio.AudioRendererEventListener; import com.google.android.exoplayer2.extractor.DefaultExtractorsFactory; import com.google.android.exoplayer2.extractor.mp4.Mp4Extractor; import com.google.android.exoplayer2.metadata.MetadataOutput; import com.google.android.exoplayer2.source.DefaultMediaSourceFactory; import com.google.android.exoplayer2.source.MediaSourceFactory; import com.google.android.exoplayer2.source.TrackGroupArray; import com.google.android.exoplayer2.text.TextOutput; import com.google.android.exoplayer2.trackselection.DefaultTrackSelector; import com.google.android.exoplayer2.trackselection.TrackSelectionArray; import com.google.android.exoplayer2.util.Clock; import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.video.VideoRendererEventListener; import java.io.IOException; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import org.checkerframework.checker.nullness.qual.MonotonicNonNull; /** * A transformer to transform media inputs. * * <p>The same Transformer instance can be used to transform multiple inputs (sequentially, not * concurrently). * * <p>Transformer instances must be accessed from a single application thread. For the vast majority * of cases this should be the application's main thread. The thread on which a Transformer instance * must be accessed can be explicitly specified by passing a {@link Looper} when creating the * transformer. If no Looper is specified, then the Looper of the thread that the {@link * Transformer.Builder} is created on is used, or if that thread does not have a Looper, the Looper * of the application's main thread is used. In all cases the Looper of the thread from which the * transformer must be accessed can be queried using {@link #getApplicationLooper()}. */ @RequiresApi(18) public final class Transformer { /** A builder for {@link Transformer} instances. */ public static final class Builder { private @MonotonicNonNull Context context; private @MonotonicNonNull MediaSourceFactory mediaSourceFactory; private Muxer.Factory muxerFactory; private boolean removeAudio; private boolean removeVideo; private boolean flattenForSlowMotion; private String outputMimeType; private Transformer.Listener listener; private Looper looper; private Clock clock; /** Creates a builder with default values. */ public Builder() { muxerFactory = new FrameworkMuxer.Factory(); outputMimeType = MimeTypes.VIDEO_MP4; listener = new Listener() {}; looper = Util.getCurrentOrMainLooper(); clock = Clock.DEFAULT; } /** Creates a builder with the values of the provided {@link Transformer}. */ private Builder(Transformer transformer) { this.context = transformer.context; this.mediaSourceFactory = transformer.mediaSourceFactory; this.muxerFactory = transformer.muxerFactory; this.removeAudio = transformer.transformation.removeAudio; this.removeVideo = transformer.transformation.removeVideo; this.flattenForSlowMotion = transformer.transformation.flattenForSlowMotion; this.outputMimeType = transformer.transformation.outputMimeType; this.listener = transformer.listener; this.looper = transformer.looper; this.clock = transformer.clock; } /** * Sets the {@link Context}. * * <p>This parameter is mandatory. * * @param context The {@link Context}. * @return This builder. */ public Builder setContext(Context context) { this.context = context.getApplicationContext(); return this; } /** * Sets the {@link MediaSourceFactory} to be used to retrieve the inputs to transform. The * default value is a {@link DefaultMediaSourceFactory} built with the context provided in * {@link #setContext(Context)}. * * @param mediaSourceFactory A {@link MediaSourceFactory}. * @return This builder. */ public Builder setMediaSourceFactory(MediaSourceFactory mediaSourceFactory) { this.mediaSourceFactory = mediaSourceFactory; return this; } /** * Sets whether to remove the audio from the output. The default value is {@code false}. * * <p>The audio and video cannot both be removed because the output would not contain any * samples. * * @param removeAudio Whether to remove the audio. * @return This builder. */ public Builder setRemoveAudio(boolean removeAudio) { this.removeAudio = removeAudio; return this; } /** * Sets whether to remove the video from the output. The default value is {@code false}. * * <p>The audio and video cannot both be removed because the output would not contain any * samples. * * @param removeVideo Whether to remove the video. * @return This builder. */ public Builder setRemoveVideo(boolean removeVideo) { this.removeVideo = removeVideo; return this; } /** * Sets whether the input should be flattened for media containing slow motion markers. The * transformed output is obtained by removing the slow motion metadata and by actually slowing * down the parts of the video and audio streams defined in this metadata. The default value for * {@code flattenForSlowMotion} is {@code false}. * * <p>Only Samsung Extension Format (SEF) slow motion metadata type is supported. The * transformation has no effect if the input does not contain this metadata type. * * <p>For SEF slow motion media, the following assumptions are made on the input: * * <ul> * <li>The input container format is (unfragmented) MP4. * <li>The input contains an AVC video elementary stream with temporal SVC. * <li>The recording frame rate of the video is 120 or 240 fps. * </ul> * * <p>If specifying a {@link MediaSourceFactory} using {@link * #setMediaSourceFactory(MediaSourceFactory)}, make sure that {@link * Mp4Extractor#FLAG_READ_SEF_DATA} is set on the {@link Mp4Extractor} used. Otherwise, the slow * motion metadata will be ignored and the input won't be flattened. * * @param flattenForSlowMotion Whether to flatten for slow motion. * @return This builder. */ public Builder setFlattenForSlowMotion(boolean flattenForSlowMotion) { this.flattenForSlowMotion = flattenForSlowMotion; return this; } /** * Sets the MIME type of the output. The default value is {@link MimeTypes#VIDEO_MP4}. Supported * values are: * * <ul> * <li>{@link MimeTypes#VIDEO_MP4} * <li>{@link MimeTypes#VIDEO_WEBM} from API level 21 * </ul> * * @param outputMimeType The MIME type of the output. * @return This builder. */ public Builder setOutputMimeType(String outputMimeType) { this.outputMimeType = outputMimeType; return this; } /** * Sets the {@link Transformer.Listener} to listen to the transformation events. * * <p>This is equivalent to {@link Transformer#setListener(Listener)}. * * @param listener A {@link Transformer.Listener}. * @return This builder. */ public Builder setListener(Transformer.Listener listener) { this.listener = listener; return this; } /** * Sets the {@link Looper} that must be used for all calls to the transformer and that is used * to call listeners on. The default value is the Looper of the thread that this builder was * created on, or if that thread does not have a Looper, the Looper of the application's main * thread. * * @param looper A {@link Looper}. * @return This builder. */ public Builder setLooper(Looper looper) { this.looper = looper; return this; } /** * Sets the {@link Clock} that will be used by the transformer. The default value is {@link * Clock#DEFAULT}. * * @param clock The {@link Clock} instance. * @return This builder. */ @VisibleForTesting /* package */ Builder setClock(Clock clock) { this.clock = clock; return this; } /** * Sets the factory for muxers that write the media container. * * @param muxerFactory A {@link Muxer.Factory}. * @return This builder. */ @VisibleForTesting /* package */ Builder setMuxerFactory(Muxer.Factory muxerFactory) { this.muxerFactory = muxerFactory; return this; } /** * Builds a {@link Transformer} instance. * * @throws IllegalStateException If the {@link Context} has not been provided. * @throws IllegalStateException If both audio and video have been removed (otherwise the output * would not contain any samples). * @throws IllegalStateException If the muxer doesn't support the requested output MIME type. */ public Transformer build() { checkStateNotNull(context); if (mediaSourceFactory == null) { DefaultExtractorsFactory defaultExtractorsFactory = new DefaultExtractorsFactory(); if (flattenForSlowMotion) { defaultExtractorsFactory.setMp4ExtractorFlags(Mp4Extractor.FLAG_READ_SEF_DATA); } mediaSourceFactory = new DefaultMediaSourceFactory(context, defaultExtractorsFactory); } checkState( muxerFactory.supportsOutputMimeType(outputMimeType), "Unsupported output MIME type: " + outputMimeType); Transformation transformation = new Transformation(removeAudio, removeVideo, flattenForSlowMotion, outputMimeType); return new Transformer( context, mediaSourceFactory, muxerFactory, transformation, listener, looper, clock); } } /** A listener for the transformation events. */ public interface Listener { /** * Called when the transformation is completed. * * @param inputMediaItem The {@link MediaItem} for which the transformation is completed. */ default void onTransformationCompleted(MediaItem inputMediaItem) {} /** * Called if an error occurs during the transformation. * * @param inputMediaItem The {@link MediaItem} for which the error occurs. * @param exception The exception describing the error. */ default void onTransformationError(MediaItem inputMediaItem, Exception exception) {} } /** * Progress state. One of {@link #PROGRESS_STATE_WAITING_FOR_AVAILABILITY}, {@link * #PROGRESS_STATE_AVAILABLE}, {@link #PROGRESS_STATE_UNAVAILABLE}, {@link * #PROGRESS_STATE_NO_TRANSFORMATION} */ @Documented @Retention(RetentionPolicy.SOURCE) @IntDef({ PROGRESS_STATE_WAITING_FOR_AVAILABILITY, PROGRESS_STATE_AVAILABLE, PROGRESS_STATE_UNAVAILABLE, PROGRESS_STATE_NO_TRANSFORMATION }) public @interface ProgressState {} /** * Indicates that the progress is unavailable for the current transformation, but might become * available. */ public static final int PROGRESS_STATE_WAITING_FOR_AVAILABILITY = 0; /** Indicates that the progress is available. */ public static final int PROGRESS_STATE_AVAILABLE = 1; /** Indicates that the progress is permanently unavailable for the current transformation. */ public static final int PROGRESS_STATE_UNAVAILABLE = 2; /** Indicates that there is no current transformation. */ public static final int PROGRESS_STATE_NO_TRANSFORMATION = 4; private final Context context; private final MediaSourceFactory mediaSourceFactory; private final Muxer.Factory muxerFactory; private final Transformation transformation; private final Looper looper; private final Clock clock; private Transformer.Listener listener; @Nullable private MuxerWrapper muxerWrapper; @Nullable private SimpleExoPlayer player; @ProgressState private int progressState; private Transformer( Context context, MediaSourceFactory mediaSourceFactory, Muxer.Factory muxerFactory, Transformation transformation, Transformer.Listener listener, Looper looper, Clock clock) { checkState( !transformation.removeAudio || !transformation.removeVideo, "Audio and video cannot both be removed."); this.context = context; this.mediaSourceFactory = mediaSourceFactory; this.muxerFactory = muxerFactory; this.transformation = transformation; this.listener = listener; this.looper = looper; this.clock = clock; progressState = PROGRESS_STATE_NO_TRANSFORMATION; } /** Returns a {@link Transformer.Builder} initialized with the values of this instance. */ public Builder buildUpon() { return new Builder(this); } /** * Sets the {@link Transformer.Listener} to listen to the transformation events. * * @param listener A {@link Transformer.Listener}. * @throws IllegalStateException If this method is called from the wrong thread. */ public void setListener(Transformer.Listener listener) { verifyApplicationThread(); this.listener = listener; } /** * Starts an asynchronous operation to transform the given {@link MediaItem}. * * <p>The transformation state is notified through the {@link Builder#setListener(Listener) * listener}. * * <p>Concurrent transformations on the same Transformer object are not allowed. * * <p>The output can contain at most one video track and one audio track. Other track types are * ignored. For adaptive bitrate {@link com.google.android.exoplayer2.source.MediaSource media * sources}, the highest bitrate video and audio streams are selected. * * @param mediaItem The {@link MediaItem} to transform. The supported sample formats depend on the * output container format and are described in {@link MediaMuxer#addTrack(MediaFormat)}. * @param path The path to the output file. * @throws IllegalArgumentException If the path is invalid. * @throws IllegalStateException If this method is called from the wrong thread. * @throws IllegalStateException If a transformation is already in progress. * @throws IOException If an error occurs opening the output file for writing. */ public void startTransformation(MediaItem mediaItem, String path) throws IOException { startTransformation(mediaItem, muxerFactory.create(path, transformation.outputMimeType)); } /** * Starts an asynchronous operation to transform the given {@link MediaItem}. * * <p>The transformation state is notified through the {@link Builder#setListener(Listener) * listener}. * * <p>Concurrent transformations on the same Transformer object are not allowed. * * <p>The output can contain at most one video track and one audio track. Other track types are * ignored. For adaptive bitrate {@link com.google.android.exoplayer2.source.MediaSource media * sources}, the highest bitrate video and audio streams are selected. * * @param mediaItem The {@link MediaItem} to transform. The supported sample formats depend on the * output container format and are described in {@link MediaMuxer#addTrack(MediaFormat)}. * @param parcelFileDescriptor A readable and writable {@link ParcelFileDescriptor} of the output. * The file referenced by this ParcelFileDescriptor should not be used before the * transformation is completed. It is the responsibility of the caller to close the * ParcelFileDescriptor. This can be done after this method returns. * @throws IllegalArgumentException If the file descriptor is invalid. * @throws IllegalStateException If this method is called from the wrong thread. * @throws IllegalStateException If a transformation is already in progress. * @throws IOException If an error occurs opening the output file for writing. */ @RequiresApi(26) public void startTransformation(MediaItem mediaItem, ParcelFileDescriptor parcelFileDescriptor) throws IOException { startTransformation( mediaItem, muxerFactory.create(parcelFileDescriptor, transformation.outputMimeType)); } private void startTransformation(MediaItem mediaItem, Muxer muxer) { verifyApplicationThread(); if (player != null) { throw new IllegalStateException("There is already a transformation in progress."); } MuxerWrapper muxerWrapper = new MuxerWrapper(muxer); this.muxerWrapper = muxerWrapper; DefaultTrackSelector trackSelector = new DefaultTrackSelector(context); trackSelector.setParameters( new DefaultTrackSelector.ParametersBuilder(context) .setForceHighestSupportedBitrate(true) .build()); // Arbitrarily decrease buffers for playback so that samples start being sent earlier to the // muxer (rebuffers are less problematic for the transformation use case). DefaultLoadControl loadControl = new DefaultLoadControl.Builder() .setBufferDurationsMs( DEFAULT_MIN_BUFFER_MS, DEFAULT_MAX_BUFFER_MS, DEFAULT_BUFFER_FOR_PLAYBACK_MS / 10, DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS / 10) .build(); player = new SimpleExoPlayer.Builder( context, new TransformerRenderersFactory(muxerWrapper, transformation)) .setMediaSourceFactory(mediaSourceFactory) .setTrackSelector(trackSelector) .setLoadControl(loadControl) .setLooper(looper) .setClock(clock) .build(); player.setMediaItem(mediaItem); player.addAnalyticsListener(new TransformerAnalyticsListener(mediaItem, muxerWrapper)); player.prepare(); progressState = PROGRESS_STATE_WAITING_FOR_AVAILABILITY; } /** * Returns the {@link Looper} associated with the application thread that's used to access the * transformer and on which transformer events are received. */ public Looper getApplicationLooper() { return looper; } /** * Returns the current {@link ProgressState} and updates {@code progressHolder} with the current * progress if it is {@link #PROGRESS_STATE_AVAILABLE available}. * * <p>After a transformation {@link Listener#onTransformationCompleted(MediaItem) completes}, this * method returns {@link #PROGRESS_STATE_NO_TRANSFORMATION}. * * @param progressHolder A {@link ProgressHolder}, updated to hold the percentage progress if * {@link #PROGRESS_STATE_AVAILABLE available}. * @return The {@link ProgressState}. * @throws IllegalStateException If this method is called from the wrong thread. */ @ProgressState public int getProgress(ProgressHolder progressHolder) { verifyApplicationThread(); if (progressState == PROGRESS_STATE_AVAILABLE) { Player player = checkNotNull(this.player); long durationMs = player.getDuration(); long positionMs = player.getCurrentPosition(); progressHolder.progress = min((int) (positionMs * 100 / durationMs), 99); } return progressState; } /** * Cancels the transformation that is currently in progress, if any. * * @throws IllegalStateException If this method is called from the wrong thread. */ public void cancel() { releaseResources(/* forCancellation= */ true); } /** * Releases the resources. * * @param forCancellation Whether the reason for releasing the resources is the transformation * cancellation. * @throws IllegalStateException If this method is called from the wrong thread. * @throws IllegalStateException If the muxer is in the wrong state and {@code forCancellation} is * false. */ private void releaseResources(boolean forCancellation) { verifyApplicationThread(); if (player != null) { player.release(); player = null; } if (muxerWrapper != null) { muxerWrapper.release(forCancellation); muxerWrapper = null; } progressState = PROGRESS_STATE_NO_TRANSFORMATION; } private void verifyApplicationThread() { if (Looper.myLooper() != looper) { throw new IllegalStateException("Transformer is accessed on the wrong thread."); } } private static final class TransformerRenderersFactory implements RenderersFactory { private final MuxerWrapper muxerWrapper; private final TransformerMediaClock mediaClock; private final Transformation transformation; public TransformerRenderersFactory(MuxerWrapper muxerWrapper, Transformation transformation) { this.muxerWrapper = muxerWrapper; this.transformation = transformation; mediaClock = new TransformerMediaClock(); } @Override public Renderer[] createRenderers( Handler eventHandler, VideoRendererEventListener videoRendererEventListener, AudioRendererEventListener audioRendererEventListener, TextOutput textRendererOutput, MetadataOutput metadataRendererOutput) { int rendererCount = transformation.removeAudio || transformation.removeVideo ? 1 : 2; Renderer[] renderers = new Renderer[rendererCount]; int index = 0; if (!transformation.removeAudio) { renderers[index] = new TransformerAudioRenderer(muxerWrapper, mediaClock, transformation); index++; } if (!transformation.removeVideo) { renderers[index] = new TransformerVideoRenderer(muxerWrapper, mediaClock, transformation); index++; } return renderers; } } private final class TransformerAnalyticsListener implements AnalyticsListener { private final MediaItem mediaItem; private final MuxerWrapper muxerWrapper; public TransformerAnalyticsListener(MediaItem mediaItem, MuxerWrapper muxerWrapper) { this.mediaItem = mediaItem; this.muxerWrapper = muxerWrapper; } @Override public void onPlaybackStateChanged(EventTime eventTime, int state) { if (state == Player.STATE_ENDED) { handleTransformationEnded(/* exception= */ null); } } @Override public void onTimelineChanged(EventTime eventTime, int reason) { if (progressState != PROGRESS_STATE_WAITING_FOR_AVAILABILITY) { return; } Timeline.Window window = new Timeline.Window(); eventTime.timeline.getWindow(/* windowIndex= */ 0, window); if (!window.isPlaceholder) { long durationUs = window.durationUs; // Make progress permanently unavailable if the duration is unknown, so that it doesn't jump // to a high value at the end of the transformation if the duration is set once the media is // entirely loaded. progressState = durationUs <= 0 || durationUs == C.TIME_UNSET ? PROGRESS_STATE_UNAVAILABLE : PROGRESS_STATE_AVAILABLE; checkNotNull(player).play(); } } @Override public void onTracksChanged( EventTime eventTime, TrackGroupArray trackGroups, TrackSelectionArray trackSelections) { if (muxerWrapper.getTrackCount() == 0) { handleTransformationEnded( new IllegalStateException( "The output does not contain any tracks. Check that at least one of the input" + " sample formats is supported.")); } } @Override public void onPlayerError(EventTime eventTime, ExoPlaybackException error) { handleTransformationEnded(error); } private void handleTransformationEnded(@Nullable Exception exception) { try { releaseResources(/* forCancellation= */ false); } catch (IllegalStateException e) { if (exception == null) { exception = e; } } if (exception == null) { listener.onTransformationCompleted(mediaItem); } else { listener.onTransformationError(mediaItem, exception); } } } }
package net.sf.jabref.logic.importer.fileformat; import java.io.BufferedReader; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.regex.Matcher; import java.util.regex.Pattern; import net.sf.jabref.logic.formatter.casechanger.TitleCaseFormatter; import net.sf.jabref.logic.importer.Importer; import net.sf.jabref.logic.importer.ParserResult; import net.sf.jabref.logic.util.FileExtensions; import net.sf.jabref.model.entry.BibEntry; import net.sf.jabref.model.entry.FieldName; import net.sf.jabref.model.entry.MonthUtil; /** * Importer for the ISI Web of Science, INSPEC and Medline format. * <p> * Documentation about ISI WOS format: * <p> * <ul> * <li>http://wos.isitrial.com/help/helpprn.html</li> * </ul> * <p> * <ul> * <li>Check compatibility with other ISI2Bib tools like: * http://www-lab.imr.tohoku.ac.jp/~t-nissie/computer/software/isi/ or * http://www.tug.org/tex-archive/biblio/bibtex/utils/isi2bibtex/isi2bibtex or * http://web.mit.edu/emilio/www/utils.html</li> * <li>Deal with capitalization correctly</li> * </ul> */ public class IsiImporter extends Importer { private static final Pattern SUB_SUP_PATTERN = Pattern.compile("/(sub|sup)\\s+(.*?)\\s*/"); // 2006.09.05: Modified pattern to avoid false positives for other files due to an // extra | at the end: private static final Pattern ISI_PATTERN = Pattern.compile("FN ISI Export Format|VR 1.|PY \\d{4}"); @Override public String getName() { return "ISI"; } @Override public FileExtensions getExtensions() { return FileExtensions.ISI; } @Override public String getId() { return "isi"; } @Override public String getDescription() { return "Importer for the ISI Web of Science, INSPEC and Medline format."; } @Override public boolean isRecognizedFormat(BufferedReader reader) throws IOException { String str; int i = 0; while (((str = reader.readLine()) != null) && (i < 50)) { /** * The following line gives false positives for RIS files, so it * should not be uncommented. The hypen is a characteristic of the * RIS format. * * str = str.replace(" - ", "") */ if (IsiImporter.ISI_PATTERN.matcher(str).find()) { return true; } i++; } return false; } public static void processSubSup(Map<String, String> map) { String[] subsup = {FieldName.TITLE, FieldName.ABSTRACT, FieldName.REVIEW, "notes"}; for (String aSubsup : subsup) { if (map.containsKey(aSubsup)) { Matcher m = IsiImporter.SUB_SUP_PATTERN.matcher(map.get(aSubsup)); StringBuffer sb = new StringBuffer(); while (m.find()) { String group2 = m.group(2); group2 = group2.replaceAll("\\$", "\\\\\\\\\\\\\\$"); // Escaping // insanity! // :-) if (group2.length() > 1) { group2 = "{" + group2 + "}"; } if ("sub".equals(m.group(1))) { m.appendReplacement(sb, "\\$_" + group2 + "\\$"); } else { m.appendReplacement(sb, "\\$^" + group2 + "\\$"); } } m.appendTail(sb); map.put(aSubsup, sb.toString()); } } } private static void processCapitalization(Map<String, String> map) { String[] subsup = {FieldName.TITLE, FieldName.JOURNAL, FieldName.PUBLISHER}; for (String aSubsup : subsup) { if (map.containsKey(aSubsup)) { String s = map.get(aSubsup); if (s.toUpperCase().equals(s)) { s = new TitleCaseFormatter().format(s); map.put(aSubsup, s); } } } } @Override public ParserResult importDatabase(BufferedReader reader) throws IOException { Objects.requireNonNull(reader); List<BibEntry> bibitems = new ArrayList<>(); StringBuilder sb = new StringBuilder(); // Pattern fieldPattern = Pattern.compile("^AU |^TI |^SO |^DT |^C1 |^AB // |^ID |^BP |^PY |^SE |^PY |^VL |^IS "); String str; while ((str = reader.readLine()) != null) { if (str.length() < 3) { continue; } // beginning of a new item if ("PT ".equals(str.substring(0, 3))) { sb.append("::").append(str); } else { String beg = str.substring(0, 3).trim(); // I could have used the fieldPattern regular expression instead // however this seems to be // quick and dirty and it works! if (beg.length() == 2) { sb.append(" ## "); // mark the beginning of each field sb.append(str); } else { sb.append("EOLEOL"); // mark the end of each line sb.append(str.trim()); // remove the initial spaces } } } String[] entries = sb.toString().split("::"); Map<String, String> hm = new HashMap<>(); // skip the first entry as it is either empty or has document header for (String entry : entries) { String[] fields = entry.split(" ## "); if (fields.length == 0) { fields = entry.split("\n"); } String Type = ""; String PT = ""; String pages = ""; hm.clear(); for (String field : fields) { // empty field don't do anything if (field.length() <= 2) { continue; } String beg = field.substring(0, 2); String value = field.substring(3); if (value.startsWith(" - ")) { value = value.substring(3); } value = value.trim(); if ("PT".equals(beg)) { if (value.startsWith("J")) { PT = "article"; } else { PT = value; } Type = "article"; // make all of them PT? } else if ("TY".equals(beg)) { if ("JOUR".equals(value)) { Type = "article"; } else if ("CONF".equals(value)) { Type = "inproceedings"; } } else if ("JO".equals(beg)) { hm.put(FieldName.BOOKTITLE, value); } else if ("AU".equals(beg)) { String author = IsiImporter.isiAuthorsConvert(value.replace("EOLEOL", " and ")); // if there is already someone there then append with "and" if (hm.get(FieldName.AUTHOR) != null) { author = hm.get(FieldName.AUTHOR) + " and " + author; } hm.put(FieldName.AUTHOR, author); } else if ("TI".equals(beg)) { hm.put(FieldName.TITLE, value.replace("EOLEOL", " ")); } else if ("SO".equals(beg) || "JA".equals(beg)) { hm.put(FieldName.JOURNAL, value.replace("EOLEOL", " ")); } else if ("ID".equals(beg) || "KW".equals(beg)) { value = value.replace("EOLEOL", " "); String existingKeywords = hm.get(FieldName.KEYWORDS); if ((existingKeywords == null) || existingKeywords.contains(value)) { existingKeywords = value; } else { existingKeywords += ", " + value; } hm.put(FieldName.KEYWORDS, existingKeywords); } else if ("AB".equals(beg)) { hm.put(FieldName.ABSTRACT, value.replace("EOLEOL", " ")); } else if ("BP".equals(beg) || "BR".equals(beg) || "SP".equals(beg)) { pages = value; } else if ("EP".equals(beg)) { int detpos = value.indexOf(' '); // tweak for IEEE Explore if ((detpos != -1) && !value.substring(0, detpos).trim().isEmpty()) { value = value.substring(0, detpos); } pages = pages + "--" + value; } else if ("PS".equals(beg)) { pages = IsiImporter.parsePages(value); } else if ("AR".equals(beg)) { pages = value; } else if ("IS".equals(beg)) { hm.put(FieldName.NUMBER, value); } else if ("PY".equals(beg)) { hm.put(FieldName.YEAR, value); } else if ("VL".equals(beg)) { hm.put(FieldName.VOLUME, value); } else if ("PU".equals(beg)) { hm.put(FieldName.PUBLISHER, value); } else if ("DI".equals(beg)) { hm.put(FieldName.DOI, value); } else if ("PD".equals(beg)) { String month = IsiImporter.parseMonth(value); if (month != null) { hm.put(FieldName.MONTH, month); } } else if ("DT".equals(beg)) { Type = value; if ("Review".equals(Type)) { Type = "article"; // set "Review" in Note/Comment? } else if (Type.startsWith("Article") || Type.startsWith("Journal") || "article".equals(PT)) { Type = "article"; } else { Type = BibEntry.DEFAULT_TYPE; } } else if ("CR".equals(beg)) { hm.put("CitedReferences", value.replace("EOLEOL", " ; ").trim()); } else { // Preserve all other entries except if ("ER".equals(beg) || "EF".equals(beg) || "VR".equals(beg) || "FN".equals(beg)) { continue; } hm.put(beg.toLowerCase(), value); } } if (!"".equals(pages)) { hm.put(FieldName.PAGES, pages); } // Skip empty entries if (hm.isEmpty()) { continue; } BibEntry b = new BibEntry(Type); // id assumes an existing database so don't // Remove empty fields: List<Object> toRemove = new ArrayList<>(); for (Map.Entry<String, String> field : hm.entrySet()) { String content = field.getValue(); if ((content == null) || content.trim().isEmpty()) { toRemove.add(field.getKey()); } } for (Object aToRemove : toRemove) { hm.remove(aToRemove); } // Polish entries IsiImporter.processSubSup(hm); IsiImporter.processCapitalization(hm); b.setField(hm); bibitems.add(b); } return new ParserResult(bibitems); } private static String parsePages(String value) { int lastDash = value.lastIndexOf('-'); return value.substring(0, lastDash) + "--" + value.substring(lastDash + 1); } public static String parseMonth(String value) { String[] parts = value.split("\\s|\\-"); for (String part1 : parts) { MonthUtil.Month month = MonthUtil.getMonthByShortName(part1.toLowerCase()); if (month.isValid()) { return month.bibtexFormat; } } // Try two digit month for (String part : parts) { try { int number = Integer.parseInt(part); MonthUtil.Month month = MonthUtil.getMonthByNumber(number); if (month.isValid()) { return month.bibtexFormat; } } catch (NumberFormatException ignored) { // Ignored } } return null; } /** * Will expand ISI first names. * <p> * Fixed bug from: * http://sourceforge.net/tracker/index.php?func=detail&aid=1542552&group_id=92314&atid=600306 */ public static String isiAuthorConvert(String author) { String[] s = author.split(","); if (s.length != 2) { return author; } StringBuilder sb = new StringBuilder(); String last = s[0].trim(); sb.append(last).append(", "); String first = s[1].trim(); String[] firstParts = first.split("\\s+"); for (int i = 0; i < firstParts.length; i++) { first = firstParts[i]; // Do we have only uppercase chars? if (first.toUpperCase().equals(first)) { first = first.replace(".", ""); for (int j = 0; j < first.length(); j++) { sb.append(first.charAt(j)).append('.'); if (j < (first.length() - 1)) { sb.append(' '); } } } else { sb.append(first); } if (i < (firstParts.length - 1)) { sb.append(' '); } } return sb.toString(); } private static String[] isiAuthorsConvert(String[] authors) { String[] result = new String[authors.length]; for (int i = 0; i < result.length; i++) { result[i] = IsiImporter.isiAuthorConvert(authors[i]); } return result; } public static String isiAuthorsConvert(String authors) { String[] s = IsiImporter.isiAuthorsConvert(authors.split(" and |;")); return String.join(" and ", s); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package king.flow.common; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import java.io.File; import java.nio.charset.Charset; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import king.flow.action.business.ShowClockAction; import king.flow.data.TLSResult; import king.flow.view.Action; import king.flow.view.Action.CleanAction; import king.flow.view.Action.EjectCardAction; import king.flow.view.Action.WithdrawCardAction; import king.flow.view.Action.EncryptKeyboardAction; import king.flow.view.Action.HideAction; import king.flow.view.Action.InsertICardAction; import king.flow.view.Action.LimitInputAction; import king.flow.view.Action.MoveCursorAction; import king.flow.view.Action.NumericPadAction; import king.flow.view.Action.OpenBrowserAction; import king.flow.view.Action.PlayMediaAction; import king.flow.view.Action.PlayVideoAction; import king.flow.view.Action.PrintPassbookAction; import king.flow.view.Action.RunCommandAction; import king.flow.view.Action.RwFingerPrintAction; import king.flow.view.Action.SetFontAction; import king.flow.view.Action.SetPrinterAction; import king.flow.view.Action.ShowComboBoxAction; import king.flow.view.Action.ShowGridAction; import king.flow.view.Action.ShowTableAction; import king.flow.view.Action.Swipe2In1CardAction; import king.flow.view.Action.SwipeCardAction; import king.flow.view.Action.UploadFileAction; import king.flow.view.Action.UseTipAction; import king.flow.view.Action.VirtualKeyboardAction; import king.flow.view.Action.WriteICardAction; import king.flow.view.ComponentEnum; import king.flow.view.DefinedAction; import king.flow.view.DeviceEnum; import king.flow.view.JumpAction; import king.flow.view.MsgSendAction; /** * * @author LiuJin */ public class CommonConstants { public static final String APP_STARTUP_ENTRY = "bank.exe"; public static final Charset UTF8 = Charset.forName("UTF-8"); static final File[] SYS_ROOTS = File.listRoots(); public static final int DRIVER_COUNT = SYS_ROOTS.length; public static final String XML_NODE_PREFIX = "N_"; public static final String REVERT = "re_"; public static final String BID = "bid"; public static final String UID_PREFIX = "<" + TLSResult.UID + ">"; public static final String UID_AFFIX = "</" + TLSResult.UID + ">"; public static final String DEFAULT_DATE_FORMATE = "yyyy-MM-dd"; public static final String VALID_BANK_CARD = "validBankCard"; public static final String BALANCED_PAY_MAC = "balancedPayMAC"; public static final String CANCEL_ENCRYPTION_KEYBOARD = "[CANCEL]"; public static final String QUIT_ENCRYPTION_KEYBOARD = "[QUIT]"; public static final String INVALID_ENCRYPTION_LENGTH = "encryption.keyboard.type.length.prompt"; public static final String TIMEOUT_ENCRYPTION_TYPE = "encryption.keyboard.type.timeout.prompt"; public static final String ERROR_ENCRYPTION_TYPE = "encryption.keyboard.type.fail.prompt"; public static final int CONTAINER_KEY = Integer.MAX_VALUE; public static final int NORMAL = 0; public static final int ABNORMAL = 1; public static final int BALANCE = 12345; public static final int RESTART_SIGNAL = 1; public static final int DOWNLOAD_KEY_SIGNAL = 1; public static final int UPDATE_SIGNAL = 1; public static final int WATCHDOG_CHECK_INTERVAL = 5; public static final String VERSION; public static final long DEBUG_MODE_PROGRESS_TIME = TimeUnit.SECONDS.toMillis(3); public static final long RUN_MODE_PROGRESS_TIME = TimeUnit.SECONDS.toMillis(1); // public static final String VERSION = Paths.get(".").toAbsolutePath().normalize().toString(); static { String workingPath = System.getProperty("user.dir"); final int lastIndexOf = workingPath.lastIndexOf('_'); if (lastIndexOf != -1 && lastIndexOf < workingPath.length() - 1) { VERSION = workingPath.substring(lastIndexOf + 1); } else { VERSION = "Unknown"; } } /* JMX configuration */ private static String getJmxRmiUrl(int port) { return "service:jmx:rmi:///jndi/rmi://localhost:" + port + "/jmxrmi"; } public static final int APP_JMX_RMI_PORT = 9998; public static final String APP_JMX_RMI_URL = getJmxRmiUrl(APP_JMX_RMI_PORT); public static final int WATCHDOG_JMX_RMI_PORT = 9999; public static final String WATCHDOG_JMX_RMI_URL = getJmxRmiUrl(WATCHDOG_JMX_RMI_PORT); /* system variable pattern */ public static final String SYSTEM_VAR_PATTERN = "\\$\\{(_?\\p{Alpha}+_\\p{Alpha}+)+\\}"; public static final String TEXT_MINGLED_SYSTEM_VAR_PATTERN = ".*" + SYSTEM_VAR_PATTERN + ".*"; public static final String TERMINAL_ID_SYS_VAR = "TERMINAL_ID"; /* swing default config */ public static final int DEFAULT_TABLE_ROW_COUNT = 15; public static final int TABLE_ROW_HEIGHT = 25; public static final int DEFAULT_VIDEO_REPLAY_INTERVAL_SECOND = 20; /* packet header ID */ public static final int GENERAL_MSG_CODE = 0; //common message public static final int REGISTRY_MSG_CODE = 1; //terminal registration message public static final int KEY_DOWNLOAD_MSG_CODE = 2; //download secret key message public static final int MANAGER_MSG_CODE = 100; //management message /*MAX_MESSAGES_PER_READ refers to DefaultChannelConfig, AdaptiveRecvByteBufAllocator, FixedRecvByteBufAllocator */ public static final int MAX_MESSAGES_PER_READ = 64; //how many read actions in one message conversation public static final int MIN_RECEIVED_BUFFER_SIZE = 1024; //1024 bytes public static final int RECEIVED_BUFFER_SIZE = 32 * 1024; //32k bytes public static final int MAX_RECEIVED_BUFFER_SIZE = 64 * 1024; //64k bytes /* keyboard cipher key */ public static final String WORK_SECRET_KEY = "workSecretKey"; public static final String MA_KEY = "maKey"; public static final String MASTER_KEY = "masterKey"; /* packet result flag */ public static final int SUCCESSFUL_MSG_CODE = 0; /* xml jaxb context */ public static final String NET_CONF_PACKAGE_CONTEXT = "king.flow.net"; public static final String TLS_PACKAGE_CONTEXT = "king.flow.data"; public static final String UI_CONF_PACKAGE_CONTEXT = "king.flow.view"; public static final String KING_FLOW_BACKGROUND = "king.flow.background"; public static final String KING_FLOW_PROGRESS = "king.flow.progress"; public static final String TEXT_TYPE_TOOL_CONFIG = "chinese.text.type.config"; public static final String COMBOBOX_ITEMS_PROPERTY_PATTERN = "([^,/]*/[^,/]*,)*+([^,/]*/[^,/]*){1}+"; public static final String ADVANCED_TABLE_TOTAL_PAGES = "total"; public static final String ADVANCED_TABLE_VALUE = "value"; public static final String ADVANCED_TABLE_CURRENT_PAGE = "current"; /* card-reading state */ public static final int INVALID_CARD_STATE = -1; public static final int MAGNET_CARD_STATE = 2; public static final int IC_CARD_STATE = 3; /* union-pay transaction type */ public static final String UNION_PAY_REGISTRATION = "1"; public static final String UNION_PAY_TRANSACTION = "3"; public static final String UNION_PAY_TRANSACTION_BALANCE = "4"; /* card affiliation type */ public static final String CARD_AFFILIATION_INTERNAL = "1"; public static final String CARD_AFFILIATION_EXTERNAL = "2"; /* supported driver types */ static final ImmutableSet<DeviceEnum> SUPPORTED_DEVICES = new ImmutableSet.Builder<DeviceEnum>() .add(DeviceEnum.IC_CARD) .add(DeviceEnum.CASH_SAVER) .add(DeviceEnum.GZ_CARD) .add(DeviceEnum.HIS_CARD) .add(DeviceEnum.KEYBOARD) .add(DeviceEnum.MAGNET_CARD) .add(DeviceEnum.MEDICARE_CARD) .add(DeviceEnum.PATIENT_CARD) .add(DeviceEnum.PID_CARD) .add(DeviceEnum.PKG_8583) .add(DeviceEnum.PRINTER) .add(DeviceEnum.SENSOR_CARD) .add(DeviceEnum.TWO_IN_ONE_CARD) .build(); /* action-component relationship map */ public static final String JUMP_ACTION = JumpAction.class.getSimpleName(); public static final String SET_FONT_ACTION = SetFontAction.class.getSimpleName(); public static final String CLEAN_ACTION = CleanAction.class.getSimpleName(); public static final String HIDE_ACTION = HideAction.class.getSimpleName(); public static final String USE_TIP_ACTION = UseTipAction.class.getSimpleName(); public static final String PLAY_MEDIA_ACTION = PlayMediaAction.class.getSimpleName(); public static final String SEND_MSG_ACTION = MsgSendAction.class.getSimpleName(); public static final String MOVE_CURSOR_ACTION = MoveCursorAction.class.getSimpleName(); public static final String LIMIT_INPUT_ACTION = LimitInputAction.class.getSimpleName(); public static final String SHOW_COMBOBOX_ACTION = ShowComboBoxAction.class.getSimpleName(); public static final String SHOW_TABLE_ACTION = ShowTableAction.class.getSimpleName(); public static final String SHOW_CLOCK_ACTION = ShowClockAction.class.getSimpleName(); public static final String OPEN_BROWSER_ACTION = OpenBrowserAction.class.getSimpleName(); public static final String RUN_COMMAND_ACTION = RunCommandAction.class.getSimpleName(); public static final String OPEN_VIRTUAL_KEYBOARD_ACTION = VirtualKeyboardAction.class.getSimpleName(); public static final String PRINT_RECEIPT_ACTION = SetPrinterAction.class.getSimpleName(); public static final String INSERT_IC_ACTION = InsertICardAction.class.getSimpleName(); public static final String WRITE_IC_ACTION = WriteICardAction.class.getSimpleName(); public static final String BALANCE_TRANS_ACTION = "BalanceTransAction"; public static final String PRINT_PASSBOOK_ACTION = PrintPassbookAction.class.getSimpleName(); public static final String UPLOAD_FILE_ACTION = UploadFileAction.class.getSimpleName(); public static final String SWIPE_CARD_ACTION = SwipeCardAction.class.getSimpleName(); public static final String SWIPE_TWO_IN_ONE_CARD_ACTION = Swipe2In1CardAction.class.getSimpleName(); public static final String EJECT_CARD_ACTION = EjectCardAction.class.getSimpleName(); public static final String WITHDRAW_CARD_ACTION = WithdrawCardAction.class.getSimpleName(); public static final String READ_WRITE_FINGERPRINT_ACTION = RwFingerPrintAction.class.getSimpleName(); public static final String PLAY_VIDEO_ACTION = PlayVideoAction.class.getSimpleName(); public static final String CUSTOMIZED_ACTION = DefinedAction.class.getSimpleName(); public static final String ENCRYPT_KEYBORAD_ACTION = EncryptKeyboardAction.class.getSimpleName(); public static final String SHOW_GRID_ACTION = ShowGridAction.class.getSimpleName(); public static final String TYPE_NUMERIC_PAD_ACTION = NumericPadAction.class.getSimpleName(); public static final String WEB_LOAD_ACTION = Action.WebLoadAction.class.getSimpleName(); static final Map<ComponentEnum, List<String>> ACTION_COMPONENT_MAP = new ImmutableMap.Builder<ComponentEnum, List<String>>() .put(ComponentEnum.BUTTON, new ImmutableList.Builder<String>() .add(CUSTOMIZED_ACTION) .add(JUMP_ACTION) .add(SET_FONT_ACTION) .add(CLEAN_ACTION) .add(HIDE_ACTION) .add(USE_TIP_ACTION) .add(PLAY_MEDIA_ACTION) .add(OPEN_BROWSER_ACTION) .add(RUN_COMMAND_ACTION) .add(OPEN_VIRTUAL_KEYBOARD_ACTION) .add(PRINT_RECEIPT_ACTION) .add(SEND_MSG_ACTION) .add(INSERT_IC_ACTION) .add(WRITE_IC_ACTION) .add(MOVE_CURSOR_ACTION) .add(PRINT_PASSBOOK_ACTION) .add(UPLOAD_FILE_ACTION) .add(BALANCE_TRANS_ACTION) .add(EJECT_CARD_ACTION) .add(WITHDRAW_CARD_ACTION) .add(WEB_LOAD_ACTION) .build()) .put(ComponentEnum.COMBO_BOX, new ImmutableList.Builder<String>() .add(CUSTOMIZED_ACTION) .add(SET_FONT_ACTION) .add(USE_TIP_ACTION) .add(SHOW_COMBOBOX_ACTION) .add(SWIPE_CARD_ACTION) .add(SWIPE_TWO_IN_ONE_CARD_ACTION) .add(PLAY_MEDIA_ACTION) .add(MOVE_CURSOR_ACTION) .build()) .put(ComponentEnum.LABEL, new ImmutableList.Builder<String>() .add(CUSTOMIZED_ACTION) .add(SET_FONT_ACTION) .add(USE_TIP_ACTION) .add(SHOW_CLOCK_ACTION) .build()) .put(ComponentEnum.TEXT_FIELD, new ImmutableList.Builder<String>() .add(CUSTOMIZED_ACTION) .add(SET_FONT_ACTION) .add(LIMIT_INPUT_ACTION) .add(USE_TIP_ACTION) .add(PLAY_MEDIA_ACTION) .add(READ_WRITE_FINGERPRINT_ACTION) .add(OPEN_VIRTUAL_KEYBOARD_ACTION) .add(MOVE_CURSOR_ACTION) .build()) .put(ComponentEnum.PASSWORD_FIELD, new ImmutableList.Builder<String>() .add(CUSTOMIZED_ACTION) .add(SET_FONT_ACTION) .add(LIMIT_INPUT_ACTION) .add(USE_TIP_ACTION) .add(PLAY_MEDIA_ACTION) .add(READ_WRITE_FINGERPRINT_ACTION) .add(MOVE_CURSOR_ACTION) .add(ENCRYPT_KEYBORAD_ACTION) .build()) .put(ComponentEnum.TABLE, new ImmutableList.Builder<String>() .add(CUSTOMIZED_ACTION) .add(SET_FONT_ACTION) .add(USE_TIP_ACTION) .add(SHOW_TABLE_ACTION) .build()) .put(ComponentEnum.ADVANCED_TABLE, new ImmutableList.Builder<String>() .add(CUSTOMIZED_ACTION) .add(SET_FONT_ACTION) .add(SHOW_TABLE_ACTION) .add(SEND_MSG_ACTION) .build()) .put(ComponentEnum.VIDEO_PLAYER, new ImmutableList.Builder<String>() .add(CUSTOMIZED_ACTION) .add(PLAY_VIDEO_ACTION) .build()) .put(ComponentEnum.GRID, new ImmutableList.Builder<String>() .add(SHOW_GRID_ACTION) .build()) .put(ComponentEnum.NUMERIC_PAD, new ImmutableList.Builder<String>() .add(TYPE_NUMERIC_PAD_ACTION) .build()) .build(); }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Random; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Time; import org.junit.Before; import org.junit.Test; public class TestLightWeightHashSet{ private static final Log LOG = LogFactory .getLog("org.apache.hadoop.hdfs.TestLightWeightHashSet"); private final ArrayList<Integer> list = new ArrayList<Integer>(); private final int NUM = 100; private LightWeightHashSet<Integer> set; private Random rand; @Before public void setUp() { float maxF = LightWeightHashSet.DEFAULT_MAX_LOAD_FACTOR; float minF = LightWeightHashSet.DEFAUT_MIN_LOAD_FACTOR; int initCapacity = LightWeightHashSet.MINIMUM_CAPACITY; rand = new Random(Time.now()); list.clear(); for (int i = 0; i < NUM; i++) { list.add(rand.nextInt()); } set = new LightWeightHashSet<Integer>(initCapacity, maxF, minF); } @Test public void testEmptyBasic() { LOG.info("Test empty basic"); Iterator<Integer> iter = set.iterator(); // iterator should not have next assertFalse(iter.hasNext()); assertEquals(0, set.size()); assertTrue(set.isEmpty()); LOG.info("Test empty - DONE"); } @Test public void testOneElementBasic() { LOG.info("Test one element basic"); set.add(list.get(0)); // set should be non-empty assertEquals(1, set.size()); assertFalse(set.isEmpty()); // iterator should have next Iterator<Integer> iter = set.iterator(); assertTrue(iter.hasNext()); // iterator should not have next assertEquals(list.get(0), iter.next()); assertFalse(iter.hasNext()); LOG.info("Test one element basic - DONE"); } @Test public void testMultiBasic() { LOG.info("Test multi element basic"); // add once for (Integer i : list) { assertTrue(set.add(i)); } assertEquals(list.size(), set.size()); // check if the elements are in the set for (Integer i : list) { assertTrue(set.contains(i)); } // add again - should return false each time for (Integer i : list) { assertFalse(set.add(i)); } // check again if the elements are there for (Integer i : list) { assertTrue(set.contains(i)); } Iterator<Integer> iter = set.iterator(); int num = 0; while (iter.hasNext()) { Integer next = iter.next(); assertNotNull(next); assertTrue(list.contains(next)); num++; } // check the number of element from the iterator assertEquals(list.size(), num); LOG.info("Test multi element basic - DONE"); } @Test public void testRemoveOne() { LOG.info("Test remove one"); assertTrue(set.add(list.get(0))); assertEquals(1, set.size()); // remove from the head/tail assertTrue(set.remove(list.get(0))); assertEquals(0, set.size()); // check the iterator Iterator<Integer> iter = set.iterator(); assertFalse(iter.hasNext()); // add the element back to the set assertTrue(set.add(list.get(0))); assertEquals(1, set.size()); iter = set.iterator(); assertTrue(iter.hasNext()); LOG.info("Test remove one - DONE"); } @Test public void testRemoveMulti() { LOG.info("Test remove multi"); for (Integer i : list) { assertTrue(set.add(i)); } for (int i = 0; i < NUM / 2; i++) { assertTrue(set.remove(list.get(i))); } // the deleted elements should not be there for (int i = 0; i < NUM / 2; i++) { assertFalse(set.contains(list.get(i))); } // the rest should be there for (int i = NUM / 2; i < NUM; i++) { assertTrue(set.contains(list.get(i))); } LOG.info("Test remove multi - DONE"); } @Test public void testRemoveAll() { LOG.info("Test remove all"); for (Integer i : list) { assertTrue(set.add(i)); } for (int i = 0; i < NUM; i++) { assertTrue(set.remove(list.get(i))); } // the deleted elements should not be there for (int i = 0; i < NUM; i++) { assertFalse(set.contains(list.get(i))); } // iterator should not have next Iterator<Integer> iter = set.iterator(); assertFalse(iter.hasNext()); assertTrue(set.isEmpty()); LOG.info("Test remove all - DONE"); } @Test public void testPollAll() { LOG.info("Test poll all"); for (Integer i : list) { assertTrue(set.add(i)); } // remove all elements by polling List<Integer> poll = set.pollAll(); assertEquals(0, set.size()); assertTrue(set.isEmpty()); // the deleted elements should not be there for (int i = 0; i < NUM; i++) { assertFalse(set.contains(list.get(i))); } // we should get all original items for (Integer i : poll) { assertTrue(list.contains(i)); } Iterator<Integer> iter = set.iterator(); assertFalse(iter.hasNext()); LOG.info("Test poll all - DONE"); } @Test public void testPollNMulti() { LOG.info("Test pollN multi"); // use addAll set.addAll(list); // poll zero List<Integer> poll = set.pollN(0); assertEquals(0, poll.size()); for (Integer i : list) { assertTrue(set.contains(i)); } // poll existing elements (less than size) poll = set.pollN(10); assertEquals(10, poll.size()); for (Integer i : poll) { // should be in original items assertTrue(list.contains(i)); // should not be in the set anymore assertFalse(set.contains(i)); } // poll more elements than present poll = set.pollN(1000); assertEquals(NUM - 10, poll.size()); for (Integer i : poll) { // should be in original items assertTrue(list.contains(i)); } // set is empty assertTrue(set.isEmpty()); assertEquals(0, set.size()); LOG.info("Test pollN multi - DONE"); } @Test public void testPollNMultiArray() { LOG.info("Test pollN multi array"); // use addAll set.addAll(list); // poll existing elements (less than size) Integer[] poll = new Integer[10]; poll = set.pollToArray(poll); assertEquals(10, poll.length); for (Integer i : poll) { // should be in original items assertTrue(list.contains(i)); // should not be in the set anymore assertFalse(set.contains(i)); } // poll other elements (more than size) poll = new Integer[NUM]; poll = set.pollToArray(poll); assertEquals(NUM - 10, poll.length); for (int i = 0; i < NUM - 10; i++) { assertTrue(list.contains(poll[i])); } // set is empty assertTrue(set.isEmpty()); assertEquals(0, set.size()); // ////// set.addAll(list); // poll existing elements (exactly the size) poll = new Integer[NUM]; poll = set.pollToArray(poll); assertTrue(set.isEmpty()); assertEquals(0, set.size()); assertEquals(NUM, poll.length); for (int i = 0; i < NUM; i++) { assertTrue(list.contains(poll[i])); } // ////// // ////// set.addAll(list); // poll existing elements (exactly the size) poll = new Integer[0]; poll = set.pollToArray(poll); for (int i = 0; i < NUM; i++) { assertTrue(set.contains(list.get(i))); } assertEquals(0, poll.length); // ////// LOG.info("Test pollN multi array- DONE"); } @Test public void testClear() { LOG.info("Test clear"); // use addAll set.addAll(list); assertEquals(NUM, set.size()); assertFalse(set.isEmpty()); // clear the set set.clear(); assertEquals(0, set.size()); assertTrue(set.isEmpty()); // iterator should be empty Iterator<Integer> iter = set.iterator(); assertFalse(iter.hasNext()); LOG.info("Test clear - DONE"); } @Test public void testCapacity() { LOG.info("Test capacity"); float maxF = LightWeightHashSet.DEFAULT_MAX_LOAD_FACTOR; float minF = LightWeightHashSet.DEFAUT_MIN_LOAD_FACTOR; // capacity lower than min_capacity set = new LightWeightHashSet<Integer>(1, maxF, minF); assertEquals(LightWeightHashSet.MINIMUM_CAPACITY, set.getCapacity()); // capacity not a power of two set = new LightWeightHashSet<Integer>(30, maxF, minF); assertEquals(Math.max(LightWeightHashSet.MINIMUM_CAPACITY, 32), set.getCapacity()); // capacity valid set = new LightWeightHashSet<Integer>(64, maxF, minF); assertEquals(Math.max(LightWeightHashSet.MINIMUM_CAPACITY, 64), set.getCapacity()); // add NUM elements set.addAll(list); int expCap = LightWeightHashSet.MINIMUM_CAPACITY; while (expCap < NUM && maxF * expCap < NUM) expCap <<= 1; assertEquals(expCap, set.getCapacity()); // see if the set shrinks if we remove elements by removing set.clear(); set.addAll(list); int toRemove = set.size() - (int) (set.getCapacity() * minF) + 1; for (int i = 0; i < toRemove; i++) { set.remove(list.get(i)); } assertEquals(Math.max(LightWeightHashSet.MINIMUM_CAPACITY, expCap / 2), set.getCapacity()); LOG.info("Test capacity - DONE"); } @Test public void testOther() { LOG.info("Test other"); // remove all assertTrue(set.addAll(list)); assertTrue(set.removeAll(list)); assertTrue(set.isEmpty()); // remove sublist List<Integer> sub = new LinkedList<Integer>(); for (int i = 0; i < 10; i++) { sub.add(list.get(i)); } assertTrue(set.addAll(list)); assertTrue(set.removeAll(sub)); assertFalse(set.isEmpty()); assertEquals(NUM - 10, set.size()); for (Integer i : sub) { assertFalse(set.contains(i)); } assertFalse(set.containsAll(sub)); // the rest of the elements should be there List<Integer> sub2 = new LinkedList<Integer>(); for (int i = 10; i < NUM; i++) { sub2.add(list.get(i)); } assertTrue(set.containsAll(sub2)); // to array Integer[] array = set.toArray(new Integer[0]); assertEquals(NUM - 10, array.length); for (int i = 0; i < array.length; i++) { assertTrue(sub2.contains(array[i])); } assertEquals(NUM - 10, set.size()); // to array Object[] array2 = set.toArray(); assertEquals(NUM - 10, array2.length); for (int i = 0; i < array2.length; i++) { assertTrue(sub2.contains((Integer) array2[i])); } LOG.info("Test other - DONE"); } @Test public void testGetElement() { LightWeightHashSet<TestObject> objSet = new LightWeightHashSet<TestObject>(); TestObject objA = new TestObject("object A"); TestObject equalToObjA = new TestObject("object A"); TestObject objB = new TestObject("object B"); objSet.add(objA); objSet.add(objB); assertSame(objA, objSet.getElement(objA)); assertSame(objA, objSet.getElement(equalToObjA)); assertSame(objB, objSet.getElement(objB)); assertNull(objSet.getElement(new TestObject("not in set"))); } /** * Wrapper class which is used in * {@link TestLightWeightHashSet#testGetElement()} */ private static class TestObject { private final String value; public TestObject(String value) { super(); this.value = value; } @Override public int hashCode() { return value.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; TestObject other = (TestObject) obj; return this.value.equals(other.value); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import static org.apache.geode.distributed.ConfigurationProperties.ENABLE_TIME_STATISTICS; import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS; import static org.apache.geode.distributed.ConfigurationProperties.LOG_LEVEL; import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT; import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_ARCHIVE_FILE; import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_SAMPLING_ENABLED; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.File; import java.util.Properties; import java.util.Set; import java.util.concurrent.TimeUnit; import org.awaitility.Awaitility; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.apache.geode.cache.AttributesFactory; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.DataPolicy; import org.apache.geode.cache.DiskWriteAttributesFactory; import org.apache.geode.cache.EvictionAction; import org.apache.geode.cache.EvictionAttributes; import org.apache.geode.cache.Region; import org.apache.geode.distributed.DistributedSystem; /** * Tests the old disk apis to make sure they do the correct thing. Once we drop these old deprecated * disk apis then this unit test can be removed. */ public class DiskOldAPIsJUnitTest { protected static Cache cache = null; protected static DistributedSystem ds = null; @Before public void setUp() throws Exception { Properties props = new Properties(); props.setProperty(MCAST_PORT, "0"); props.setProperty(LOCATORS, ""); props.setProperty(LOG_LEVEL, "config"); // to keep diskPerf logs smaller props.setProperty(STATISTIC_SAMPLING_ENABLED, "true"); props.setProperty(ENABLE_TIME_STATISTICS, "true"); props.setProperty(STATISTIC_ARCHIVE_FILE, "stats.gfs"); cache = new CacheFactory(props).create(); ds = cache.getDistributedSystem(); DiskStoreImpl.SET_IGNORE_PREALLOCATE = true; } @After public void tearDown() throws Exception { cache.close(); DiskStoreImpl.SET_IGNORE_PREALLOCATE = false; } /** * Make sure that if diskWriteAttributes sets sync then it shows up in the new apis. */ @Test public void testSyncBit() throws Exception { doSyncBitTest(true); doSyncBitTest(false); doSyncBitTest(true); } private void doSyncBitTest(boolean destroyRegion) throws Exception { DiskWriteAttributesFactory dwaf = new DiskWriteAttributesFactory(); dwaf.setSynchronous(true); AttributesFactory af = new AttributesFactory(); af.setDiskWriteAttributes(dwaf.create()); af.setDataPolicy(DataPolicy.PERSISTENT_REPLICATE); Region r = cache.createRegion("r", af.create()); assertEquals(true, r.getAttributes().isDiskSynchronous()); if (destroyRegion) { r.localDestroyRegion(); } else { r.close(); } Awaitility.await().atLeast(1, TimeUnit.MILLISECONDS).until(() -> true); dwaf.setSynchronous(false); af.setDiskWriteAttributes(dwaf.create()); r = cache.createRegion("r", af.create()); assertEquals(false, r.getAttributes().isDiskSynchronous()); if (destroyRegion) { r.localDestroyRegion(); } else { r.close(); } // now try it with a persistent pr dwaf.setSynchronous(true); af.setDiskWriteAttributes(dwaf.create()); af.setDataPolicy(DataPolicy.PERSISTENT_PARTITION); r = cache.createRegion("r2", af.create()); assertEquals(true, r.getAttributes().isDiskSynchronous()); r.put("key", "value"); { PartitionedRegion pr = (PartitionedRegion) r; PartitionedRegionDataStore prds = pr.getDataStore(); Set<BucketRegion> s = prds.getAllLocalBucketRegions(); assertTrue(s.size() > 0); for (BucketRegion br : s) { assertEquals(true, br.getAttributes().isDiskSynchronous()); } } if (destroyRegion) { r.localDestroyRegion(); } else { r.close(); } dwaf.setSynchronous(false); af.setDiskWriteAttributes(dwaf.create()); af.setDataPolicy(DataPolicy.PERSISTENT_PARTITION); r = cache.createRegion("r2", af.create()); assertEquals(false, r.getAttributes().isDiskSynchronous()); r.put("key", "value"); { PartitionedRegion pr = (PartitionedRegion) r; PartitionedRegionDataStore prds = pr.getDataStore(); Set<BucketRegion> s = prds.getAllLocalBucketRegions(); assertTrue(s.size() > 0); for (BucketRegion br : s) { assertEquals(false, br.getAttributes().isDiskSynchronous()); } } if (destroyRegion) { r.localDestroyRegion(); } else { r.close(); } // now try it with an overflow pr dwaf.setSynchronous(true); af.setDiskWriteAttributes(dwaf.create()); af.setDataPolicy(DataPolicy.PARTITION); af.setEvictionAttributes( EvictionAttributes.createLRUEntryAttributes(1, EvictionAction.OVERFLOW_TO_DISK)); r = cache.createRegion("r3", af.create()); assertEquals(true, r.getAttributes().isDiskSynchronous()); { for (int i = 0; i < 300; i++) { r.put("key" + i, "value" + i); } PartitionedRegion pr = (PartitionedRegion) r; PartitionedRegionDataStore prds = pr.getDataStore(); Set<BucketRegion> s = prds.getAllLocalBucketRegions(); assertTrue(s.size() > 0); for (BucketRegion br : s) { assertEquals(true, br.getAttributes().isDiskSynchronous()); } } if (destroyRegion) { r.localDestroyRegion(); } else { r.close(); } dwaf.setSynchronous(false); af.setDiskWriteAttributes(dwaf.create()); af.setDataPolicy(DataPolicy.PARTITION); af.setEvictionAttributes( EvictionAttributes.createLRUEntryAttributes(1, EvictionAction.OVERFLOW_TO_DISK)); r = cache.createRegion("r3", af.create()); assertEquals(false, r.getAttributes().isDiskSynchronous()); { for (int i = 0; i < 300; i++) { r.put("key" + i, "value" + i); } PartitionedRegion pr = (PartitionedRegion) r; PartitionedRegionDataStore prds = pr.getDataStore(); Set<BucketRegion> s = prds.getAllLocalBucketRegions(); assertTrue(s.size() > 0); for (BucketRegion br : s) { assertEquals(false, br.getAttributes().isDiskSynchronous()); } } if (destroyRegion) { r.localDestroyRegion(); } else { r.close(); } } /** * Make sure that if diskWriteAttributes are used that the diskStore that is created will use * them. Note that the isSync bit is tested by another method. */ @Test public void testDWA_1() throws Exception { DiskWriteAttributesFactory dwaf = new DiskWriteAttributesFactory(); dwaf.setMaxOplogSize(1); dwaf.setTimeInterval(333); dwaf.setBytesThreshold(666); AttributesFactory af = new AttributesFactory(); af.setDiskWriteAttributes(dwaf.create()); af.setDataPolicy(DataPolicy.PERSISTENT_REPLICATE); Region r = cache.createRegion("r", af.create()); { LocalRegion lr = (LocalRegion) r; DiskStoreImpl ds = lr.getDiskStore(); assertEquals(1, ds.getMaxOplogSize()); assertEquals(333, ds.getTimeInterval()); // byteThreshold > 0 --> queueSize == 1 assertEquals(1, ds.getQueueSize()); } r.localDestroyRegion(); // now try it with a pr af.setDiskWriteAttributes(dwaf.create()); af.setDataPolicy(DataPolicy.PERSISTENT_PARTITION); r = cache.createRegion("r", af.create()); { LocalRegion lr = (LocalRegion) r; DiskStoreImpl ds = lr.getDiskStore(); assertEquals(1, ds.getMaxOplogSize()); assertEquals(333, ds.getTimeInterval()); // byteThreshold > 0 --> queueSize == 1 assertEquals(1, ds.getQueueSize()); } r.put("key", "value"); { PartitionedRegion pr = (PartitionedRegion) r; PartitionedRegionDataStore prds = pr.getDataStore(); Set<BucketRegion> s = prds.getAllLocalBucketRegions(); assertTrue(s.size() > 0); for (BucketRegion br : s) { LocalRegion lr = (LocalRegion) br; DiskStoreImpl ds = lr.getDiskStore(); assertEquals(1, ds.getMaxOplogSize()); assertEquals(333, ds.getTimeInterval()); // byteThreshold > 0 --> queueSize == 1 assertEquals(1, ds.getQueueSize()); } } r.localDestroyRegion(); } @Test public void testDWA_2() throws Exception { DiskWriteAttributesFactory dwaf = new DiskWriteAttributesFactory(); dwaf.setMaxOplogSize(2); dwaf.setTimeInterval(1); dwaf.setBytesThreshold(0); AttributesFactory af = new AttributesFactory(); af.setDiskWriteAttributes(dwaf.create()); af.setDataPolicy(DataPolicy.PERSISTENT_REPLICATE); Region r = cache.createRegion("r", af.create()); { LocalRegion lr = (LocalRegion) r; DiskStoreImpl ds = lr.getDiskStore(); assertEquals(2, ds.getMaxOplogSize()); assertEquals(1, ds.getTimeInterval()); assertEquals(0, ds.getQueueSize()); } r.localDestroyRegion(); // now try it with a pr af.setDiskWriteAttributes(dwaf.create()); af.setDataPolicy(DataPolicy.PERSISTENT_PARTITION); r = cache.createRegion("r", af.create()); { LocalRegion lr = (LocalRegion) r; DiskStoreImpl ds = lr.getDiskStore(); assertEquals(2, ds.getMaxOplogSize()); assertEquals(1, ds.getTimeInterval()); assertEquals(0, ds.getQueueSize()); } r.put("key", "value"); { PartitionedRegion pr = (PartitionedRegion) r; PartitionedRegionDataStore prds = pr.getDataStore(); Set<BucketRegion> s = prds.getAllLocalBucketRegions(); assertTrue(s.size() > 0); for (BucketRegion br : s) { LocalRegion lr = (LocalRegion) br; DiskStoreImpl ds = lr.getDiskStore(); assertEquals(2, ds.getMaxOplogSize()); assertEquals(1, ds.getTimeInterval()); assertEquals(0, ds.getQueueSize()); } } r.localDestroyRegion(); } /** * Make sure the old diskDirs apis get mapped onto the diskStore. */ @Test public void testDiskDirs() throws Exception { File f1 = new File("testDiskDir1"); f1.mkdir(); File f2 = new File("testDiskDir2"); f2.mkdir(); try { AttributesFactory af = new AttributesFactory(); af.setDiskDirs(new File[] {f1, f2}); af.setDataPolicy(DataPolicy.PERSISTENT_REPLICATE); Region r = cache.createRegion("r", af.create()); { LocalRegion lr = (LocalRegion) r; DiskStoreImpl ds = lr.getDiskStore(); File[] dirs = ds.getDiskDirs(); assertEquals(2, dirs.length); assertEquals(f1, dirs[0]); assertEquals(f2, dirs[1]); } r.localDestroyRegion(); // now try it with a pr af.setDataPolicy(DataPolicy.PERSISTENT_PARTITION); r = cache.createRegion("r", af.create()); { LocalRegion lr = (LocalRegion) r; DiskStoreImpl ds = lr.getDiskStore(); File[] dirs = ds.getDiskDirs(); assertEquals(2, dirs.length); assertEquals(f1, dirs[0]); assertEquals(f2, dirs[1]); } r.put("key", "value"); { PartitionedRegion pr = (PartitionedRegion) r; PartitionedRegionDataStore prds = pr.getDataStore(); Set<BucketRegion> s = prds.getAllLocalBucketRegions(); assertTrue(s.size() > 0); for (BucketRegion br : s) { LocalRegion lr = (LocalRegion) br; DiskStoreImpl ds = lr.getDiskStore(); File[] dirs = ds.getDiskDirs(); assertEquals(2, dirs.length); assertEquals(f1, dirs[0]); assertEquals(f2, dirs[1]); } } r.localDestroyRegion(); } finally { cache.close(); removeDir(f1); removeDir(f2); } } /** * Make sure the old diskDirs apis get mapped onto the diskStore. */ @Test public void testDiskDirsAndSizes() throws Exception { File f1 = new File("testDiskDir1"); f1.mkdir(); File f2 = new File("testDiskDir2"); f2.mkdir(); try { AttributesFactory af = new AttributesFactory(); af.setDiskDirsAndSizes(new File[] {f1, f2}, new int[] {1, 2}); af.setDataPolicy(DataPolicy.PERSISTENT_REPLICATE); Region r = cache.createRegion("r", af.create()); { LocalRegion lr = (LocalRegion) r; DiskStoreImpl ds = lr.getDiskStore(); File[] dirs = ds.getDiskDirs(); assertEquals(2, dirs.length); assertEquals(f1, dirs[0]); assertEquals(f2, dirs[1]); int[] sizes = ds.getDiskDirSizes(); assertEquals(2, sizes.length); assertEquals(1, sizes[0]); assertEquals(2, sizes[1]); } r.localDestroyRegion(); // now try it with a pr af.setDataPolicy(DataPolicy.PERSISTENT_PARTITION); r = cache.createRegion("r", af.create()); { LocalRegion lr = (LocalRegion) r; DiskStoreImpl ds = lr.getDiskStore(); File[] dirs = ds.getDiskDirs(); assertEquals(2, dirs.length); assertEquals(f1, dirs[0]); assertEquals(f2, dirs[1]); int[] sizes = ds.getDiskDirSizes(); assertEquals(2, sizes.length); assertEquals(1, sizes[0]); assertEquals(2, sizes[1]); } r.put("key", "value"); { PartitionedRegion pr = (PartitionedRegion) r; PartitionedRegionDataStore prds = pr.getDataStore(); Set<BucketRegion> s = prds.getAllLocalBucketRegions(); assertTrue(s.size() > 0); for (BucketRegion br : s) { LocalRegion lr = (LocalRegion) br; DiskStoreImpl ds = lr.getDiskStore(); File[] dirs = ds.getDiskDirs(); assertEquals(2, dirs.length); assertEquals(f1, dirs[0]); assertEquals(f2, dirs[1]); int[] sizes = ds.getDiskDirSizes(); assertEquals(2, sizes.length); assertEquals(1, sizes[0]); assertEquals(2, sizes[1]); } } r.localDestroyRegion(); } finally { cache.close(); removeDir(f1); removeDir(f2); } } private static void removeDir(File dir) { File[] files = dir.listFiles(); for (int i = 0; i < files.length; i++) { files[i].delete(); } dir.delete(); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.rest.service.api.identity; import org.apache.http.HttpStatus; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.flowable.idm.api.Group; import org.flowable.idm.api.User; import org.flowable.rest.service.BaseSpringRestTestCase; import org.flowable.rest.service.api.RestUrls; import org.junit.Test; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import static org.junit.Assert.*; /** * @author Frederik Heremans */ public class GroupMembershipResourceTest extends BaseSpringRestTestCase { @Test public void testCreateMembership() throws Exception { try { Group testGroup = identityService.newGroup("testgroup"); testGroup.setName("Test group"); testGroup.setType("Test type"); identityService.saveGroup(testGroup); User testUser = identityService.newUser("testuser"); identityService.saveUser(testUser); ObjectNode requestNode = objectMapper.createObjectNode(); requestNode.put("userId", "testuser"); HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_MEMBERSHIP_COLLECTION, "testgroup")); httpPost.setEntity(new StringEntity(requestNode.toString())); CloseableHttpResponse response = executeRequest(httpPost, HttpStatus.SC_CREATED); JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); assertNotNull(responseNode); assertEquals("testuser", responseNode.get("userId").textValue()); assertEquals("testgroup", responseNode.get("groupId").textValue()); assertTrue(responseNode.get("url").textValue().endsWith(RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_MEMBERSHIP, testGroup.getId(), testUser.getId()))); Group createdGroup = identityService.createGroupQuery().groupId("testgroup").singleResult(); assertNotNull(createdGroup); assertEquals("Test group", createdGroup.getName()); assertEquals("Test type", createdGroup.getType()); assertNotNull(identityService.createUserQuery().memberOfGroup("testgroup").singleResult()); assertEquals("testuser", identityService.createUserQuery().memberOfGroup("testgroup").singleResult().getId()); } finally { try { identityService.deleteGroup("testgroup"); } catch (Throwable ignore) { // Ignore, since the group may not have been created in the test // or already deleted } try { identityService.deleteUser("testuser"); } catch (Throwable ignore) { // Ignore, since the group may not have been created in the test // or already deleted } } } @Test public void testCreateMembershipAlreadyExisting() throws Exception { try { Group testGroup = identityService.newGroup("testgroup"); testGroup.setName("Test group"); testGroup.setType("Test type"); identityService.saveGroup(testGroup); User testUser = identityService.newUser("testuser"); identityService.saveUser(testUser); identityService.createMembership("testuser", "testgroup"); ObjectNode requestNode = objectMapper.createObjectNode(); requestNode.put("userId", "testuser"); HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_MEMBERSHIP_COLLECTION, "testgroup")); httpPost.setEntity(new StringEntity(requestNode.toString())); closeResponse(executeRequest(httpPost, HttpStatus.SC_CONFLICT)); } finally { try { identityService.deleteGroup("testgroup"); } catch (Throwable ignore) { // Ignore, since the group may not have been created in the test // or already deleted } try { identityService.deleteUser("testuser"); } catch (Throwable ignore) { // Ignore, since the group may not have been created in the test // or already deleted } } } @Test public void testDeleteMembership() throws Exception { try { Group testGroup = identityService.newGroup("testgroup"); testGroup.setName("Test group"); testGroup.setType("Test type"); identityService.saveGroup(testGroup); User testUser = identityService.newUser("testuser"); identityService.saveUser(testUser); identityService.createMembership("testuser", "testgroup"); HttpDelete httpDelete = new HttpDelete(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_MEMBERSHIP, "testgroup", "testuser")); CloseableHttpResponse response = executeRequest(httpDelete, HttpStatus.SC_NO_CONTENT); closeResponse(response); // Check if membership is actually deleted assertNull(identityService.createUserQuery().memberOfGroup("testgroup").singleResult()); } finally { try { identityService.deleteGroup("testgroup"); } catch (Throwable ignore) { // Ignore, since the group may not have been created in the test // or already deleted } try { identityService.deleteUser("testuser"); } catch (Throwable ignore) { // Ignore, since the group may not have been created in the test // or already deleted } } } /** * Test delete membership that is no member in the group. */ @Test public void testDeleteMembershipNoMember() throws Exception { try { Group testGroup = identityService.newGroup("testgroup"); testGroup.setName("Test group"); testGroup.setType("Test type"); identityService.saveGroup(testGroup); User testUser = identityService.newUser("testuser"); identityService.saveUser(testUser); HttpDelete httpDelete = new HttpDelete(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_MEMBERSHIP, "testgroup", "testuser")); closeResponse(executeRequest(httpDelete, HttpStatus.SC_NOT_FOUND)); } finally { try { identityService.deleteGroup("testgroup"); } catch (Throwable ignore) { // Ignore, since the group may not have been created in the test // or already deleted } try { identityService.deleteUser("testuser"); } catch (Throwable ignore) { // Ignore, since the group may not have been created in the test // or already deleted } } } /** * Test deleting member from an unexisting group. */ @Test public void testDeleteMemberfromUnexistingGroup() throws Exception { HttpDelete httpDelete = new HttpDelete(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_MEMBERSHIP, "unexisting", "kermit")); closeResponse(executeRequest(httpDelete, HttpStatus.SC_NOT_FOUND)); } /** * Test adding member to an unexisting group. */ @Test public void testAddMemberToUnexistingGroup() throws Exception { HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_MEMBERSHIP_COLLECTION, "unexisting")); httpPost.setEntity(new StringEntity(objectMapper.createObjectNode().toString())); closeResponse(executeRequest(httpPost, HttpStatus.SC_NOT_FOUND)); } /** * Test adding member to a group, without specifying userId */ @Test public void testAddMemberNoUserId() throws Exception { HttpPost httpPost = new HttpPost(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_GROUP_MEMBERSHIP_COLLECTION, "admin")); httpPost.setEntity(new StringEntity(objectMapper.createObjectNode().toString())); closeResponse(executeRequest(httpPost, HttpStatus.SC_BAD_REQUEST)); } }
/** */ package kieker.model.analysismodel.deployment.impl; import kieker.model.analysismodel.deployment.DeployedOperation; import kieker.model.analysismodel.deployment.DeploymentPackage; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.BasicEMap; import org.eclipse.emf.common.util.EMap; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>EString To Deployed Operation Map Entry</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link kieker.model.analysismodel.deployment.impl.EStringToDeployedOperationMapEntryImpl#getTypedKey <em>Key</em>}</li> * <li>{@link kieker.model.analysismodel.deployment.impl.EStringToDeployedOperationMapEntryImpl#getTypedValue <em>Value</em>}</li> * </ul> * * @generated */ public class EStringToDeployedOperationMapEntryImpl extends MinimalEObjectImpl.Container implements BasicEMap.Entry<String,DeployedOperation> { /** * The default value of the '{@link #getTypedKey() <em>Key</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTypedKey() * @generated * @ordered */ protected static final String KEY_EDEFAULT = null; /** * The cached value of the '{@link #getTypedKey() <em>Key</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTypedKey() * @generated * @ordered */ protected String key = KEY_EDEFAULT; /** * The cached value of the '{@link #getTypedValue() <em>Value</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTypedValue() * @generated * @ordered */ protected DeployedOperation value; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected EStringToDeployedOperationMapEntryImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return DeploymentPackage.Literals.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getTypedKey() { return key; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTypedKey(String newKey) { String oldKey = key; key = newKey; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__KEY, oldKey, key)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DeployedOperation getTypedValue() { return value; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetTypedValue(DeployedOperation newValue, NotificationChain msgs) { DeployedOperation oldValue = value; value = newValue; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__VALUE, oldValue, newValue); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTypedValue(DeployedOperation newValue) { if (newValue != value) { NotificationChain msgs = null; if (value != null) msgs = ((InternalEObject)value).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__VALUE, null, msgs); if (newValue != null) msgs = ((InternalEObject)newValue).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__VALUE, null, msgs); msgs = basicSetTypedValue(newValue, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__VALUE, newValue, newValue)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__VALUE: return basicSetTypedValue(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__KEY: return getTypedKey(); case DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__VALUE: return getTypedValue(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__KEY: setTypedKey((String)newValue); return; case DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__VALUE: setTypedValue((DeployedOperation)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__KEY: setTypedKey(KEY_EDEFAULT); return; case DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__VALUE: setTypedValue((DeployedOperation)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__KEY: return KEY_EDEFAULT == null ? key != null : !KEY_EDEFAULT.equals(key); case DeploymentPackage.ESTRING_TO_DEPLOYED_OPERATION_MAP_ENTRY__VALUE: return value != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuilder result = new StringBuilder(super.toString()); result.append(" (key: "); result.append(key); result.append(')'); return result.toString(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected int hash = -1; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public int getHash() { if (hash == -1) { Object theKey = getKey(); hash = (theKey == null ? 0 : theKey.hashCode()); } return hash; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void setHash(int hash) { this.hash = hash; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getKey() { return getTypedKey(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void setKey(String key) { setTypedKey(key); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public DeployedOperation getValue() { return getTypedValue(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public DeployedOperation setValue(DeployedOperation value) { DeployedOperation oldValue = getValue(); setTypedValue(value); return oldValue; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") public EMap<String, DeployedOperation> getEMap() { EObject container = eContainer(); return container == null ? null : (EMap<String, DeployedOperation>)container.eGet(eContainmentFeature()); } } //EStringToDeployedOperationMapEntryImpl
/******************************************************************************* * Copyright FUJITSU LIMITED 2017 *******************************************************************************/ package org.oscm.ui.filter; import java.io.IOException; import java.net.URLDecoder; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Vector; import javax.servlet.ServletInputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import org.oscm.logging.Log4jLogger; import org.oscm.logging.LoggerFactory; import org.oscm.types.enumtypes.LogMessageIdentifier; import org.oscm.ui.common.Constants; /** * Request wrapper to perform a post request to the single sign on bridge. * */ public class SsoHttpServletRequestWrapper extends HttpServletRequestWrapper { private static final Log4jLogger logger = LoggerFactory .getLogger(SsoHttpServletRequestWrapper.class); private static final String HEADER_CONTENT_TYPE = "Content-type"; private static final String HEADER_CONTENT_TYPE_FORM = "application/x-www-form-urlencoded"; private static final String HEADER_CONTENT_LENGTH = "Content-length"; private ServletInputStream servletInputStream; private Map<String, String> parameters; private SsoParameters ssoParameters = null; private String method = null; private String requestURI = null; public SsoHttpServletRequestWrapper(HttpServletRequest request) { super(request); try { servletInputStream = request.getInputStream(); if (servletInputStream != null && !servletInputStream.markSupported()) { servletInputStream = new BufferedServletInputStream( servletInputStream); byte[] buf = new byte[1024]; int len = buf.length; int offset = 0; servletInputStream.mark(buf.length); do { int inputLen = servletInputStream.read(buf, offset, len - offset); if (inputLen <= 0) { break; } offset += inputLen; } while ((len - offset) > 0); servletInputStream.reset(); parameters = new HashMap<String, String>(); String str = new String(buf, 0, offset, Constants.CHARACTER_ENCODING_UTF8); String[] pairs = str.split("&"); for (int i = 0; i < pairs.length; i++) { int idx = pairs[i].indexOf('='); if (idx >= 0) { parameters.put(pairs[i].substring(0, idx), URLDecoder .decode(pairs[i].substring(idx + 1), Constants.CHARACTER_ENCODING_UTF8)); } } } } catch (IOException e) { logger.logError( Log4jLogger.SYSTEM_LOG, e, LogMessageIdentifier.ERROR_ACCESS_INPUT_STREAM_OF_REQUEST_FAILED); } } /** * */ public SsoHttpServletRequestWrapper(HttpServletRequest request, String subKey, String loginUrl, String instanceId, String userToken, String contextPath) { this(request); if (loginUrl == null) { loginUrl = ""; } setRequestURI(request.getContextPath() + Constants.SERVICE_BASE_URI + "/" + subKey + loginUrl); setMethod(Constants.REQ_METHOD_POST); SsoParameters ssoParameters = new SsoParameters(); if (contextPath == null) { contextPath = getParameter(Constants.REQ_PARAM_CONTEXT_PATH); } if (contextPath != null) { ssoParameters.setContextPath(contextPath); } ssoParameters.setInstanceId(instanceId); ssoParameters.setSubscriptionKey(subKey); ssoParameters.setBssId(request.getSession().getId()); ssoParameters.setUsertoken(userToken); setSsoParameters(ssoParameters); } @Override public ServletInputStream getInputStream() throws IOException { return new BufferedServletInputStream(ssoParameters.getInputStream()); } public void setInputStream(ServletInputStream servletInputStream) { this.servletInputStream = servletInputStream; } public SsoParameters getSsoParameters() { return ssoParameters; } public void setSsoParameters(SsoParameters ssoParameters) { this.ssoParameters = ssoParameters; } @Override public String getParameter(String name) { return parameters.get(name); } @Override public String getMethod() { if (method == null) { return super.getMethod(); } return method; } @Override public String getContentType() { if (method == null) { return super.getContentType(); } return HEADER_CONTENT_TYPE_FORM; } public void setMethod(String method) { this.method = method; } @Override public String getRequestURI() { if (requestURI == null) { return super.getRequestURI(); } return requestURI; } public void setRequestURI(String requestURI) { this.requestURI = requestURI; } @Override public int getContentLength() { if (ssoParameters == null) { return super.getContentLength(); } return ssoParameters.getContentLength(); } public String getHeaderInt(String name) { if (name.equalsIgnoreCase(HEADER_CONTENT_LENGTH) && ssoParameters != null) { return String.valueOf(ssoParameters.getContentLength()); } else if (name.equalsIgnoreCase(HEADER_CONTENT_TYPE) && method != null && Constants.REQ_METHOD_POST.equalsIgnoreCase(method)) return HEADER_CONTENT_TYPE_FORM; return null; } @Override public String getHeader(String name) { String value = getHeaderInt(name); if (value != null) { return value; } return super.getHeader(name); } @Override public Enumeration<String> getHeaders(String name) { String value = getHeaderInt(name); if (value != null) { Vector<String> v = new Vector<String>(); v.add(value); return v.elements(); } return super.getHeaders(name); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices; import com.google.common.base.Function; import com.google.common.collect.*; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.*; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.ShardLock; import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.*; import org.elasticsearch.index.aliases.IndexAliasesServiceModule; import org.elasticsearch.index.analysis.AnalysisModule; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.IndexCacheModule; import org.elasticsearch.index.fielddata.IndexFieldDataModule; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.indexing.IndexingStats; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceModule; import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.query.IndexQueryParserModule; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.settings.IndexSettingsModule; import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityModule; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStoreModule; import org.elasticsearch.indices.analysis.IndicesAnalysisService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.IndexPluginsModule; import org.elasticsearch.plugins.PluginsService; import java.io.Closeable; import java.io.IOException; import java.nio.file.Files; import java.util.*; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import static com.google.common.collect.Maps.newHashMap; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; /** * */ public class IndicesService extends AbstractLifecycleComponent<IndicesService> implements Iterable<IndexService> { public static final String INDICES_SHARDS_CLOSED_TIMEOUT = "indices.shards_closed_timeout"; private final InternalIndicesLifecycle indicesLifecycle; private final IndicesAnalysisService indicesAnalysisService; private final Injector injector; private final PluginsService pluginsService; private final NodeEnvironment nodeEnv; private final TimeValue shardsClosedTimeout; private volatile Map<String, Tuple<IndexService, Injector>> indices = ImmutableMap.of(); private final Map<Index, List<PendingDelete>> pendingDeletes = new HashMap<>(); private final OldShardsStats oldShardsStats = new OldShardsStats(); @Inject public IndicesService(Settings settings, IndicesLifecycle indicesLifecycle, IndicesAnalysisService indicesAnalysisService, Injector injector, NodeEnvironment nodeEnv) { super(settings); this.indicesLifecycle = (InternalIndicesLifecycle) indicesLifecycle; this.indicesAnalysisService = indicesAnalysisService; this.injector = injector; this.pluginsService = injector.getInstance(PluginsService.class); this.indicesLifecycle.addListener(oldShardsStats); this.nodeEnv = nodeEnv; this.shardsClosedTimeout = settings.getAsTime(INDICES_SHARDS_CLOSED_TIMEOUT, new TimeValue(1, TimeUnit.DAYS)); } @Override protected void doStart() { } @Override protected void doStop() { ImmutableSet<String> indices = ImmutableSet.copyOf(this.indices.keySet()); final CountDownLatch latch = new CountDownLatch(indices.size()); final ExecutorService indicesStopExecutor = Executors.newFixedThreadPool(5, EsExecutors.daemonThreadFactory("indices_shutdown")); for (final String index : indices) { indicesStopExecutor.execute(new Runnable() { @Override public void run() { try { removeIndex(index, "shutdown", false); } catch (Throwable e) { logger.warn("failed to remove index on stop [" + index + "]", e); } finally { latch.countDown(); } } }); } try { if (latch.await(shardsClosedTimeout.seconds(), TimeUnit.SECONDS) == false) { logger.warn("Not all shards are closed yet, waited {}sec - stopping service", shardsClosedTimeout.seconds()); } } catch (InterruptedException e) { // ignore } finally { indicesStopExecutor.shutdown(); } } @Override protected void doClose() { IOUtils.closeWhileHandlingException(injector.getInstance(RecoverySettings.class), indicesAnalysisService); } public IndicesLifecycle indicesLifecycle() { return this.indicesLifecycle; } /** * Returns the node stats indices stats. The <tt>includePrevious</tt> flag controls * if old shards stats will be aggregated as well (only for relevant stats, such as * refresh and indexing, not for docs/store). */ public NodeIndicesStats stats(boolean includePrevious) { return stats(includePrevious, new CommonStatsFlags().all()); } public NodeIndicesStats stats(boolean includePrevious, CommonStatsFlags flags) { CommonStats oldStats = new CommonStats(flags); if (includePrevious) { Flag[] setFlags = flags.getFlags(); for (Flag flag : setFlags) { switch (flag) { case Get: oldStats.get.add(oldShardsStats.getStats); break; case Indexing: oldStats.indexing.add(oldShardsStats.indexingStats); break; case Search: oldStats.search.add(oldShardsStats.searchStats); break; case Merge: oldStats.merge.add(oldShardsStats.mergeStats); break; case Refresh: oldStats.refresh.add(oldShardsStats.refreshStats); break; case Recovery: oldStats.recoveryStats.add(oldShardsStats.recoveryStats); break; case Flush: oldStats.flush.add(oldShardsStats.flushStats); break; } } } Map<Index, List<IndexShardStats>> statsByShard = Maps.newHashMap(); for (Tuple<IndexService, Injector> value : indices.values()) { IndexService indexService = value.v1(); for (IndexShard indexShard : indexService) { try { if (indexShard.routingEntry() == null) { continue; } IndexShardStats indexShardStats = new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard, indexShard.routingEntry(), flags) }); if (!statsByShard.containsKey(indexService.index())) { statsByShard.put(indexService.index(), Lists.<IndexShardStats>newArrayList(indexShardStats)); } else { statsByShard.get(indexService.index()).add(indexShardStats); } } catch (IllegalIndexShardStateException e) { // we can safely ignore illegal state on ones that are closing for example logger.trace("{} ignoring shard stats", e, indexShard.shardId()); } } } return new NodeIndicesStats(oldStats, statsByShard); } /** * Returns <tt>true</tt> if changes (adding / removing) indices, shards and so on are allowed. */ public boolean changesAllowed() { // we check on stop here since we defined stop when we delete the indices return lifecycle.started(); } @Override public Iterator<IndexService> iterator() { return Iterators.transform(indices.values().iterator(), new Function<Tuple<IndexService, Injector>, IndexService>() { @Override public IndexService apply(Tuple<IndexService, Injector> input) { return input.v1(); } }); } public boolean hasIndex(String index) { return indices.containsKey(index); } /** * Returns an IndexService for the specified index if exists otherwise returns <code>null</code>. * */ @Nullable public IndexService indexService(String index) { Tuple<IndexService, Injector> indexServiceInjectorTuple = indices.get(index); if (indexServiceInjectorTuple == null) { return null; } else { return indexServiceInjectorTuple.v1(); } } /** * Returns an IndexService for the specified index if exists otherwise a {@link IndexMissingException} is thrown. */ public IndexService indexServiceSafe(String index) throws IndexMissingException { IndexService indexService = indexService(index); if (indexService == null) { throw new IndexMissingException(new Index(index)); } return indexService; } public synchronized IndexService createIndex(String sIndexName, @IndexSettings Settings settings, String localNodeId) { if (!lifecycle.started()) { throw new IllegalStateException("Can't create an index [" + sIndexName + "], node is closed"); } Index index = new Index(sIndexName); if (indices.containsKey(index.name())) { throw new IndexAlreadyExistsException(index); } indicesLifecycle.beforeIndexCreated(index, settings); logger.debug("creating Index [{}], shards [{}]/[{}{}]", sIndexName, settings.get(SETTING_NUMBER_OF_SHARDS), settings.get(SETTING_NUMBER_OF_REPLICAS), IndexMetaData.isIndexUsingShadowReplicas(settings) ? "s" : ""); Settings indexSettings = settingsBuilder() .put(this.settings) .put(settings) .classLoader(settings.getClassLoader()) .build(); ModulesBuilder modules = new ModulesBuilder(); modules.add(new IndexNameModule(index)); modules.add(new LocalNodeIdModule(localNodeId)); modules.add(new IndexSettingsModule(index, indexSettings)); modules.add(new IndexPluginsModule(indexSettings, pluginsService)); modules.add(new IndexStoreModule(indexSettings)); modules.add(new AnalysisModule(indexSettings, indicesAnalysisService)); modules.add(new SimilarityModule(indexSettings)); modules.add(new IndexCacheModule(indexSettings)); modules.add(new IndexFieldDataModule(indexSettings)); modules.add(new MapperServiceModule()); modules.add(new IndexQueryParserModule(indexSettings)); modules.add(new IndexAliasesServiceModule()); modules.add(new IndexModule(indexSettings)); Injector indexInjector; try { indexInjector = modules.createChildInjector(injector); } catch (CreationException e) { throw new IndexCreationException(index, Injectors.getFirstErrorFailure(e)); } catch (Throwable e) { throw new IndexCreationException(index, e); } IndexService indexService = indexInjector.getInstance(IndexService.class); indicesLifecycle.afterIndexCreated(indexService); indices = newMapBuilder(indices).put(index.name(), new Tuple<>(indexService, indexInjector)).immutableMap(); return indexService; } /** * Removes the given index from this service and releases all associated resources. Persistent parts of the index * like the shards files, state and transaction logs are kept around in the case of a disaster recovery. * @param index the index to remove * @param reason the high level reason causing this removal */ public void removeIndex(String index, String reason) { removeIndex(index, reason, false); } private void removeIndex(String index, String reason, boolean delete) { try { final IndexService indexService; final Injector indexInjector; synchronized (this) { if (indices.containsKey(index) == false) { return; } logger.debug("[{}] closing ... (reason [{}])", index, reason); Map<String, Tuple<IndexService, Injector>> tmpMap = newHashMap(indices); Tuple<IndexService, Injector> remove = tmpMap.remove(index); indexService = remove.v1(); indexInjector = remove.v2(); indices = ImmutableMap.copyOf(tmpMap); } indicesLifecycle.beforeIndexClosed(indexService); if (delete) { indicesLifecycle.beforeIndexDeleted(indexService); } IOUtils.close(Iterables.transform(pluginsService.indexServices(), new Function<Class<? extends Closeable>, Closeable>() { @Override public Closeable apply(Class<? extends Closeable> input) { return indexInjector.getInstance(input); } })); logger.debug("[{}] closing index service (reason [{}])", index, reason); indexService.close(reason, delete); logger.debug("[{}] closing index cache (reason [{}])", index, reason); indexInjector.getInstance(IndexCache.class).close(); logger.debug("[{}] clearing index field data (reason [{}])", index, reason); indexInjector.getInstance(IndexFieldDataService.class).clear(); logger.debug("[{}] closing analysis service (reason [{}])", index, reason); indexInjector.getInstance(AnalysisService.class).close(); logger.debug("[{}] closing mapper service (reason [{}])", index, reason); indexInjector.getInstance(MapperService.class).close(); logger.debug("[{}] closing index query parser service (reason [{}])", index, reason); indexInjector.getInstance(IndexQueryParserService.class).close(); logger.debug("[{}] closing index service (reason [{}])", index, reason); indexInjector.getInstance(IndexStore.class).close(); logger.debug("[{}] closed... (reason [{}])", index, reason); indicesLifecycle.afterIndexClosed(indexService.index(), indexService.settingsService().getSettings()); if (delete) { final Settings indexSettings = indexService.getIndexSettings(); indicesLifecycle.afterIndexDeleted(indexService.index(), indexSettings); // now we are done - try to wipe data on disk if possible deleteIndexStore(reason, indexService.index(), indexSettings); } } catch (IOException ex) { throw new ElasticsearchException("failed to remove index " + index, ex); } } static class OldShardsStats extends IndicesLifecycle.Listener { final SearchStats searchStats = new SearchStats(); final GetStats getStats = new GetStats(); final IndexingStats indexingStats = new IndexingStats(); final MergeStats mergeStats = new MergeStats(); final RefreshStats refreshStats = new RefreshStats(); final FlushStats flushStats = new FlushStats(); final RecoveryStats recoveryStats = new RecoveryStats(); @Override public synchronized void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, @IndexSettings Settings indexSettings) { if (indexShard != null) { getStats.add(indexShard.getStats()); indexingStats.add(indexShard.indexingStats(), false); searchStats.add(indexShard.searchStats(), false); mergeStats.add(indexShard.mergeStats()); refreshStats.add(indexShard.refreshStats()); flushStats.add(indexShard.flushStats()); recoveryStats.addAsOld(indexShard.recoveryStats()); } } } /** * Deletes the given index. Persistent parts of the index * like the shards files, state and transaction logs are removed once all resources are released. * * Equivalent to {@link #removeIndex(String, String)} but fires * different lifecycle events to ensure pending resources of this index are immediately removed. * @param index the index to delete * @param reason the high level reason causing this delete */ public void deleteIndex(String index, String reason) throws IOException { removeIndex(index, reason, true); } public void deleteClosedIndex(String reason, IndexMetaData metaData, ClusterState clusterState) { if (nodeEnv.hasNodeFile()) { String indexName = metaData.getIndex(); try { if (clusterState.metaData().hasIndex(indexName)) { final IndexMetaData index = clusterState.metaData().index(indexName); throw new IllegalStateException("Can't delete closed index store for [" + indexName + "] - it's still part of the cluster state [" + index.getUUID() + "] [" + metaData.getUUID() + "]"); } deleteIndexStore(reason, metaData, clusterState); } catch (IOException e) { logger.warn("[{}] failed to delete closed index", e, metaData.index()); } } } /** * Deletes the index store trying to acquire all shards locks for this index. * This method will delete the metadata for the index even if the actual shards can't be locked. */ public void deleteIndexStore(String reason, IndexMetaData metaData, ClusterState clusterState) throws IOException { if (nodeEnv.hasNodeFile()) { synchronized (this) { String indexName = metaData.index(); if (indices.containsKey(indexName)) { String localUUid = indices.get(indexName).v1().indexUUID(); throw new IllegalStateException("Can't delete index store for [" + indexName + "] - it's still part of the indices service [" + localUUid+ "] [" + metaData.getUUID() + "]"); } if (clusterState.metaData().hasIndex(indexName) && (clusterState.nodes().localNode().masterNode() == true)) { // we do not delete the store if it is a master eligible node and the index is still in the cluster state // because we want to keep the meta data for indices around even if no shards are left here final IndexMetaData index = clusterState.metaData().index(indexName); throw new IllegalStateException("Can't delete closed index store for [" + indexName + "] - it's still part of the cluster state [" + index.getUUID() + "] [" + metaData.getUUID() + "]"); } } Index index = new Index(metaData.index()); final Settings indexSettings = buildIndexSettings(metaData); deleteIndexStore(reason, index, indexSettings); } } private void deleteIndexStore(String reason, Index index, Settings indexSettings) throws IOException { boolean success = false; try { // we are trying to delete the index store here - not a big deal if the lock can't be obtained // the store metadata gets wiped anyway even without the lock this is just best effort since // every shards deletes its content under the shard lock it owns. logger.debug("{} deleting index store reason [{}]", index, reason); if (canDeleteIndexContents(index, indexSettings)) { nodeEnv.deleteIndexDirectorySafe(index, 0, indexSettings); } success = true; } catch (LockObtainFailedException ex) { logger.debug("{} failed to delete index store - at least one shards is still locked", ex, index); } catch (Exception ex) { logger.warn("{} failed to delete index", ex, index); } finally { if (success == false) { addPendingDelete(index, indexSettings); } // this is a pure protection to make sure this index doesn't get re-imported as a dangeling index. // we should in the future rather write a tombstone rather than wiping the metadata. MetaDataStateFormat.deleteMetaState(nodeEnv.indexPaths(index)); } } /** * Deletes the shard with an already acquired shard lock. * @param reason the reason for the shard deletion * @param lock the lock of the shard to delete * @param indexSettings the shards index settings. * @throws IOException if an IOException occurs */ public void deleteShardStore(String reason, ShardLock lock, Settings indexSettings) throws IOException { ShardId shardId = lock.getShardId(); logger.trace("{} deleting shard reason [{}]", shardId, reason); nodeEnv.deleteShardDirectoryUnderLock(lock, indexSettings); } /** * This method deletes the shard contents on disk for the given shard ID. This method will fail if the shard deleting * is prevented by {@link #canDeleteShardContent(org.elasticsearch.index.shard.ShardId, org.elasticsearch.cluster.metadata.IndexMetaData)} * of if the shards lock can not be acquired. * @param reason the reason for the shard deletion * @param shardId the shards ID to delete * @param metaData the shards index metadata. This is required to access the indexes settings etc. * @throws IOException if an IOException occurs */ public void deleteShardStore(String reason, ShardId shardId, IndexMetaData metaData) throws IOException { final Settings indexSettings = buildIndexSettings(metaData); if (canDeleteShardContent(shardId, indexSettings) == false) { throw new IllegalStateException("Can't delete shard " + shardId); } nodeEnv.deleteShardDirectorySafe(shardId, indexSettings); logger.trace("{} deleting shard reason [{}]", shardId, reason); } /** * This method returns true if the current node is allowed to delete the * given index. If the index uses a shared filesystem this method always * returns false. * @param index {@code Index} to check whether deletion is allowed * @param indexSettings {@code Settings} for the given index * @return true if the index can be deleted on this node */ public boolean canDeleteIndexContents(Index index, Settings indexSettings) { final Tuple<IndexService, Injector> indexServiceInjectorTuple = this.indices.get(index.name()); if (IndexMetaData.isOnSharedFilesystem(indexSettings) == false) { if (indexServiceInjectorTuple == null && nodeEnv.hasNodeFile()) { return true; } } else { logger.trace("{} skipping index directory deletion due to shadow replicas", index); } return false; } /** * Returns <code>true</code> iff the shards content for the given shard can be deleted. * This method will return <code>false</code> if: * <ul> * <li>if the shard is still allocated / active on this node</li> * <li>if for instance if the shard is located on shared and should not be deleted</li> * <li>if the shards data locations do not exists</li> * </ul> * * @param shardId the shard to delete. * @param metaData the shards index metadata. This is required to access the indexes settings etc. */ public boolean canDeleteShardContent(ShardId shardId, IndexMetaData metaData) { // we need the metadata here since we have to build the complete settings // to decide where the shard content lives. In the future we might even need more info here ie. for shadow replicas // The plan was to make it harder to miss-use and ask for metadata instead of simple settings assert shardId.getIndex().equals(metaData.getIndex()); final Settings indexSettings = buildIndexSettings(metaData); return canDeleteShardContent(shardId, indexSettings); } private boolean canDeleteShardContent(ShardId shardId, @IndexSettings Settings indexSettings) { final Tuple<IndexService, Injector> indexServiceInjectorTuple = this.indices.get(shardId.getIndex()); if (IndexMetaData.isOnSharedFilesystem(indexSettings) == false) { if (indexServiceInjectorTuple != null && nodeEnv.hasNodeFile()) { final IndexService indexService = indexServiceInjectorTuple.v1(); return indexService.hasShard(shardId.id()) == false; } else if (nodeEnv.hasNodeFile()) { if (NodeEnvironment.hasCustomDataPath(indexSettings)) { return Files.exists(nodeEnv.resolveCustomLocation(indexSettings, shardId)); } else { return FileSystemUtils.exists(nodeEnv.availableShardPaths(shardId)); } } } else { logger.trace("{} skipping shard directory deletion due to shadow replicas", shardId); } return false; } private Settings buildIndexSettings(IndexMetaData metaData) { // play safe here and make sure that we take node level settings into account. // we might run on nodes where we use shard FS and then in the future don't delete // actual content. ImmutableSettings.Builder builder = settingsBuilder(); builder.put(settings); builder.put(metaData.getSettings()); return builder.build(); } /** * Adds a pending delete for the given index shard. */ public void addPendingDelete(ShardId shardId, @IndexSettings Settings settings) { if (shardId == null) { throw new IllegalArgumentException("shardId must not be null"); } if (settings == null) { throw new IllegalArgumentException("settings must not be null"); } PendingDelete pendingDelete = new PendingDelete(shardId, settings, false); addPendingDelete(shardId.index(), pendingDelete); } private void addPendingDelete(Index index, PendingDelete pendingDelete) { synchronized (pendingDeletes) { List<PendingDelete> list = pendingDeletes.get(index); if (list == null) { list = new ArrayList<>(); pendingDeletes.put(index, list); } list.add(pendingDelete); } } /** * Adds a pending delete for the given index shard. */ public void addPendingDelete(Index index, @IndexSettings Settings settings) { PendingDelete pendingDelete = new PendingDelete(null, settings, true); addPendingDelete(index, pendingDelete); } private static final class PendingDelete implements Comparable<PendingDelete> { final ShardId shardId; final Settings settings; final boolean deleteIndex; public PendingDelete(ShardId shardId, Settings settings, boolean deleteIndex) { this.shardId = shardId; this.settings = settings; this.deleteIndex = deleteIndex; assert deleteIndex || shardId != null; } @Override public String toString() { return shardId.toString(); } @Override public int compareTo(PendingDelete o) { int left = deleteIndex ? -1 : shardId.id(); int right = o.deleteIndex ? -1 : o.shardId.id(); return Integer.compare(left, right); } } /** * Processes all pending deletes for the given index. This method will acquire all locks for the given index and will * process all pending deletes for this index. Pending deletes might occur if the OS doesn't allow deletion of files because * they are used by a different process ie. on Windows where files might still be open by a virus scanner. On a shared * filesystem a replica might not have been closed when the primary is deleted causing problems on delete calls so we * schedule there deletes later. * @param index the index to process the pending deletes for * @param timeout the timeout used for processing pending deletes */ public void processPendingDeletes(Index index, @IndexSettings Settings indexSettings, TimeValue timeout) throws IOException { logger.debug("{} processing pending deletes", index); final long startTime = System.currentTimeMillis(); final List<ShardLock> shardLocks = nodeEnv.lockAllForIndex(index, indexSettings, timeout.millis()); try { Map<ShardId, ShardLock> locks = new HashMap<>(); for (ShardLock lock : shardLocks) { locks.put(lock.getShardId(), lock); } final List<PendingDelete> remove; synchronized (pendingDeletes) { remove = pendingDeletes.remove(index); } if (remove != null && remove.isEmpty() == false) { CollectionUtil.timSort(remove); // make sure we delete indices first final long maxSleepTimeMs = 10 * 1000; // ensure we retry after 10 sec long sleepTime = 10; do { if (remove.isEmpty()) { break; } Iterator<PendingDelete> iterator = remove.iterator(); while (iterator.hasNext()) { PendingDelete delete = iterator.next(); if (delete.deleteIndex) { logger.debug("{} deleting index store reason [{}]", index, "pending delete"); try { nodeEnv.deleteIndexDirectoryUnderLock(index, indexSettings); iterator.remove(); } catch (IOException ex) { logger.debug("{} retry pending delete", ex, index); } } else { ShardLock shardLock = locks.get(delete.shardId); if (shardLock != null) { try { deleteShardStore("pending delete", shardLock, delete.settings); iterator.remove(); } catch (IOException ex) { logger.debug("{} retry pending delete", ex, shardLock.getShardId()); } } else { logger.warn("{} no shard lock for pending delete", delete.shardId); iterator.remove(); } } } if (remove.isEmpty() == false) { logger.warn("{} still pending deletes present for shards {} - retrying", index, remove.toString()); try { Thread.sleep(sleepTime); sleepTime = Math.min(maxSleepTimeMs, sleepTime * 2); // increase the sleep time gradually logger.debug("{} schedule pending delete retry after {} ms", index, sleepTime); } catch (InterruptedException e) { Thread.interrupted(); return; } } } while ((System.currentTimeMillis() - startTime) < timeout.millis()); } } finally { IOUtils.close(shardLocks); } } int numPendingDeletes(Index index) { synchronized (pendingDeletes) { List<PendingDelete> deleteList = pendingDeletes.get(index); if (deleteList == null) { return 0; } return deleteList.size(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.olingo.fit.tecsvc.client; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import java.net.URI; import org.apache.commons.io.IOUtils; import org.apache.olingo.client.api.communication.ODataClientErrorException; import org.apache.olingo.client.api.communication.request.ODataBasicRequest; import org.apache.olingo.client.api.communication.request.cud.ODataDeleteRequest; import org.apache.olingo.client.api.communication.request.cud.ODataEntityUpdateRequest; import org.apache.olingo.client.api.communication.request.cud.ODataPropertyUpdateRequest; import org.apache.olingo.client.api.communication.request.cud.ODataValueUpdateRequest; import org.apache.olingo.client.api.communication.request.cud.UpdateType; import org.apache.olingo.client.api.communication.request.retrieve.EdmMetadataRequest; import org.apache.olingo.client.api.communication.request.retrieve.ODataEntityRequest; import org.apache.olingo.client.api.communication.request.retrieve.ODataPropertyRequest; import org.apache.olingo.client.api.communication.request.retrieve.ODataServiceDocumentRequest; import org.apache.olingo.client.api.communication.request.retrieve.ODataValueRequest; import org.apache.olingo.client.api.communication.request.streamed.ODataMediaEntityUpdateRequest; import org.apache.olingo.client.api.communication.response.ODataDeleteResponse; import org.apache.olingo.client.api.communication.response.ODataRetrieveResponse; import org.apache.olingo.client.api.domain.ClientEntity; import org.apache.olingo.client.api.domain.ClientPrimitiveValue; import org.apache.olingo.client.api.domain.ClientProperty; import org.apache.olingo.client.api.domain.ClientServiceDocument; import org.apache.olingo.client.api.http.HttpClientException; import org.apache.olingo.commons.api.edm.Edm; import org.apache.olingo.commons.api.edm.FullQualifiedName; import org.apache.olingo.commons.api.http.HttpHeader; import org.apache.olingo.commons.api.http.HttpStatusCode; import org.apache.olingo.fit.tecsvc.TecSvcConst; import org.junit.Test; public class ConditionalITCase extends AbstractParamTecSvcITCase { private final URI uriEntity = getClient().newURIBuilder(TecSvcConst.BASE_URI) .appendEntitySetSegment("ESCompAllPrim").appendKeySegment(0).build(); private final URI uriProperty = getClient().newURIBuilder(uriEntity.toASCIIString()) .appendPropertySegment("PropertyComp").appendPropertySegment("PropertyDuration").build(); private final URI uriPropertyValue = getClient().newURIBuilder( uriProperty.toASCIIString()).appendValueSegment().build(); private final URI uriMedia = getClient().newURIBuilder(TecSvcConst.BASE_URI) .appendEntitySetSegment("ESMedia").appendKeySegment(1).appendValueSegment().build(); @Test public void readServiceDocument() throws Exception { ODataServiceDocumentRequest request = getClient().getRetrieveRequestFactory() .getServiceDocumentRequest(TecSvcConst.BASE_URI); setCookieHeader(request); ODataRetrieveResponse<ClientServiceDocument> response = request.execute(); saveCookieHeader(response); assertEquals(HttpStatusCode.OK.getStatusCode(), response.getStatusCode()); request = getClient().getRetrieveRequestFactory().getServiceDocumentRequest(TecSvcConst.BASE_URI); request.setIfNoneMatch(response.getETag()); setCookieHeader(request); response = request.execute(); saveCookieHeader(response); assertEquals(HttpStatusCode.NOT_MODIFIED.getStatusCode(), response.getStatusCode()); request = getClient().getRetrieveRequestFactory().getServiceDocumentRequest(TecSvcConst.BASE_URI); request.setIfMatch("W/\"0\""); setCookieHeader(request); executeAndExpectError(request, HttpStatusCode.PRECONDITION_FAILED); } @Test public void readMetadataDocument() throws Exception { EdmMetadataRequest request = getClient().getRetrieveRequestFactory().getMetadataRequest(TecSvcConst.BASE_URI); setCookieHeader(request); ODataRetrieveResponse<Edm> response = request.execute(); saveCookieHeader(response); assertEquals(HttpStatusCode.OK.getStatusCode(), response.getStatusCode()); request = getClient().getRetrieveRequestFactory().getMetadataRequest(TecSvcConst.BASE_URI); request.setIfNoneMatch(response.getETag()); setCookieHeader(request); response = request.execute(); saveCookieHeader(response); assertEquals(HttpStatusCode.NOT_MODIFIED.getStatusCode(), response.getStatusCode()); request = getClient().getRetrieveRequestFactory().getMetadataRequest(TecSvcConst.BASE_URI); request.setIfMatch("W/\"0\""); setCookieHeader(request); executeAndExpectError(request, HttpStatusCode.PRECONDITION_FAILED); } @Test public void readWithWrongIfMatch() throws Exception { ODataEntityRequest<ClientEntity> request = getClient().getRetrieveRequestFactory().getEntityRequest(uriEntity); request.setIfMatch("W/\"1\""); assertNotNull(request); setCookieHeader(request); executeAndExpectError(request, HttpStatusCode.PRECONDITION_FAILED); } @Test public void readNotModified() throws Exception { ODataEntityRequest<ClientEntity> request = getClient().getRetrieveRequestFactory().getEntityRequest(uriEntity); request.setIfNoneMatch("W/\"0\""); assertNotNull(request); setCookieHeader(request); final ODataRetrieveResponse<ClientEntity> response = request.execute(); saveCookieHeader(response); assertEquals(HttpStatusCode.NOT_MODIFIED.getStatusCode(), response.getStatusCode()); } @Test public void updateWithoutIfMatch() throws Exception { executeAndExpectError( getClient().getCUDRequestFactory().getEntityUpdateRequest( uriEntity, UpdateType.PATCH, getFactory().newEntity(new FullQualifiedName("olingo.Order"))), HttpStatusCode.PRECONDITION_REQUIRED); } @Test public void updateWithWrongIfMatch() throws Exception { ODataEntityUpdateRequest<ClientEntity> request = getClient().getCUDRequestFactory().getEntityUpdateRequest( uriEntity, UpdateType.PATCH, getFactory().newEntity(new FullQualifiedName("olingo.Order"))); request.setIfMatch("W/\"1\""); executeAndExpectError(request, HttpStatusCode.PRECONDITION_FAILED); } @Test public void updateMediaWithWrongIfMatch() throws Exception { ODataMediaEntityUpdateRequest<ClientEntity> request = getClient().getCUDRequestFactory().getMediaEntityUpdateRequest(uriMedia, IOUtils.toInputStream("ignored")); request.setIfMatch("W/\"42\""); try { request.payloadManager().getResponse(); fail("Expected Exception not thrown!"); } catch (final HttpClientException e) { final ODataClientErrorException ex = (ODataClientErrorException) e.getCause().getCause(); assertEquals(HttpStatusCode.PRECONDITION_FAILED.getStatusCode(), ex.getStatusLine().getStatusCode()); assertThat(ex.getODataError().getMessage(), containsString("condition")); } } @Test public void deleteWithWrongIfMatch() throws Exception { ODataDeleteRequest request = getClient().getCUDRequestFactory().getDeleteRequest(uriEntity); request.setIfMatch("W/\"1\""); executeAndExpectError(request, HttpStatusCode.PRECONDITION_FAILED); } @Test public void deleteMediaWithWrongIfMatch() throws Exception { ODataDeleteRequest request = getClient().getCUDRequestFactory().getDeleteRequest(uriMedia); request.setIfMatch("W/\"42\""); executeAndExpectError(request, HttpStatusCode.PRECONDITION_FAILED); } @Test public void indirectEntityChange() throws Exception { final String eTag = "W/\"0\""; ODataDeleteRequest deleteRequest = getClient().getCUDRequestFactory().getDeleteRequest(uriProperty); deleteRequest.setIfMatch(eTag); final ODataDeleteResponse response = deleteRequest.execute(); ODataEntityUpdateRequest<ClientEntity> request = getClient().getCUDRequestFactory().getEntityUpdateRequest( uriEntity, UpdateType.PATCH, getFactory().newEntity(new FullQualifiedName("olingo.Order"))); request.setIfMatch(eTag); // This request has to be in the same session as the first in order to access the same data provider. request.addCustomHeader(HttpHeader.COOKIE, response.getHeader(HttpHeader.SET_COOKIE).iterator().next()); executeAndExpectError(request, HttpStatusCode.PRECONDITION_FAILED); } @Test public void readPropertyNotModified() throws Exception { ODataPropertyRequest<ClientProperty> request = getClient().getRetrieveRequestFactory().getPropertyRequest(uriProperty); request.setIfNoneMatch("W/\"0\""); setCookieHeader(request); final ODataRetrieveResponse<ClientProperty> response = request.execute(); saveCookieHeader(response); assertEquals(HttpStatusCode.NOT_MODIFIED.getStatusCode(), response.getStatusCode()); } @Test public void readPropertyValueNotModified() throws Exception { ODataValueRequest request = getClient().getRetrieveRequestFactory().getPropertyValueRequest(uriPropertyValue); request.setIfNoneMatch("W/\"0\""); setCookieHeader(request); final ODataRetrieveResponse<ClientPrimitiveValue> response = request.execute(); saveCookieHeader(response); assertEquals(HttpStatusCode.NOT_MODIFIED.getStatusCode(), response.getStatusCode()); } @Test public void updatePropertyWithoutIfMatch() throws Exception { final ODataPropertyUpdateRequest request = getClient().getCUDRequestFactory().getPropertyPrimitiveValueUpdateRequest( uriProperty, getFactory().newPrimitiveProperty("PropertyDuration", getFactory().newPrimitiveValueBuilder().buildString("PT42S"))); executeAndExpectError(request, HttpStatusCode.PRECONDITION_REQUIRED); } @Test public void updatePropertyWithWrongIfMatch() throws Exception { ODataPropertyUpdateRequest request = getClient().getCUDRequestFactory().getPropertyPrimitiveValueUpdateRequest( uriProperty, getFactory().newPrimitiveProperty("PropertyDuration", getFactory().newPrimitiveValueBuilder().buildString("PT42S"))); request.setIfMatch("W/\"1\""); executeAndExpectError(request, HttpStatusCode.PRECONDITION_FAILED); } @Test public void updatePropertyValueWithoutIfMatch() throws Exception { final ODataValueUpdateRequest request = getClient().getCUDRequestFactory().getValueUpdateRequest( uriPropertyValue, UpdateType.REPLACE, getFactory().newPrimitiveValueBuilder().buildString("PT42S")); executeAndExpectError(request, HttpStatusCode.PRECONDITION_REQUIRED); } @Test public void updatePropertyValueWithWrongIfMatch() throws Exception { ODataValueUpdateRequest request = getClient().getCUDRequestFactory().getValueUpdateRequest( uriPropertyValue, UpdateType.REPLACE, getFactory().newPrimitiveValueBuilder().buildString("PT42S")); request.setIfMatch("W/\"1\""); executeAndExpectError(request, HttpStatusCode.PRECONDITION_FAILED); } @Test public void deletePropertyWithoutIfMatch() throws Exception { final ODataDeleteRequest request = getClient().getCUDRequestFactory().getDeleteRequest(uriProperty); executeAndExpectError(request, HttpStatusCode.PRECONDITION_REQUIRED); } @Test public void deletePropertyWithWrongIfMatch() throws Exception { ODataDeleteRequest request = getClient().getCUDRequestFactory().getDeleteRequest(uriProperty); request.setIfMatch("W/\"1\""); executeAndExpectError(request, HttpStatusCode.PRECONDITION_FAILED); } @Test public void deletePropertyValue() throws Exception { ODataDeleteRequest request = getClient().getCUDRequestFactory().getDeleteRequest(uriPropertyValue); request.setIfMatch("W/\"0\""); final ODataDeleteResponse response = request.execute(); assertEquals(HttpStatusCode.NO_CONTENT.getStatusCode(), response.getStatusCode()); assertNotNull(response.getETag()); assertNotEquals(request.getIfMatch(), response.getETag()); } @Test public void deletePropertyValueWithoutIfMatch() throws Exception { final ODataDeleteRequest request = getClient().getCUDRequestFactory().getDeleteRequest(uriPropertyValue); executeAndExpectError(request, HttpStatusCode.PRECONDITION_REQUIRED); } @Test public void deletePropertyValueWithWrongIfMatch() throws Exception { ODataDeleteRequest request = getClient().getCUDRequestFactory().getDeleteRequest(uriPropertyValue); request.setIfMatch("W/\"1\""); executeAndExpectError(request, HttpStatusCode.PRECONDITION_FAILED); } private void executeAndExpectError(ODataBasicRequest<?> request, final HttpStatusCode status) { try { request.execute(); fail("Expected Exception not thrown!"); } catch (final ODataClientErrorException e) { assertEquals(status.getStatusCode(), e.getStatusLine().getStatusCode()); assertThat(e.getODataError().getMessage(), anyOf(containsString("condition"), containsString("match"))); } } }
package org.testcontainers.jdbc; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; /** * This is an Immutable class holding JDBC Connection Url and its parsed components, used by {@link ContainerDatabaseDriver}. * <p> * {@link ConnectionUrl#parseUrl()} method must be called after instantiating this class. * * @author manikmagar */ @EqualsAndHashCode(of = "url") @Getter public class ConnectionUrl { private String url; private String databaseType; private Optional<String> imageTag; /** * This is a part of the connection string that may specify host:port/databasename. * It may vary for different clients and so clients can parse it as needed. */ private String dbHostString; private boolean inDaemonMode = false; private Optional<String> databaseHost = Optional.empty(); private Optional<Integer> databasePort = Optional.empty(); private Optional<String> databaseName = Optional.empty(); private Optional<String> initScriptPath = Optional.empty(); private Optional<InitFunctionDef> initFunction = Optional.empty(); private Optional<String> queryString; private Map<String, String> containerParameters; private Map<String, String> queryParameters; public static ConnectionUrl newInstance(final String url){ ConnectionUrl connectionUrl = new ConnectionUrl(url); connectionUrl.parseUrl(); return connectionUrl; } private ConnectionUrl(final String url) { this.url = Objects.requireNonNull(url, "Connection URL cannot be null"); } public static boolean accepts(final String url) { return url.startsWith("jdbc:tc:"); } /** * This method applies various REGEX Patterns to parse the URL associated with this instance. * This is called from a @{@link ConnectionUrl#newInstance(String)} static factory method to create immutable instance of {@link ConnectionUrl}. * To avoid mutation after class is instantiated, this method should not be publicly accessible. */ private void parseUrl() { /* Extract from the JDBC connection URL: * The database type (e.g. mysql, postgresql, ...) * The docker tag, if provided. * The URL query string, if provided */ Matcher urlMatcher = Patterns.URL_MATCHING_PATTERN.matcher(this.getUrl()); if (!urlMatcher.matches()) { //Try for Oracle pattern urlMatcher = Patterns.ORACLE_URL_MATCHING_PATTERN.matcher(this.getUrl()); if (!urlMatcher.matches()) { throw new IllegalArgumentException("JDBC URL matches jdbc:tc: prefix but the database or tag name could not be identified"); } } databaseType = urlMatcher.group(1); imageTag = Optional.ofNullable(urlMatcher.group(3)); //String like hostname:port/database name, which may vary based on target database. //Clients can further parse it as needed. dbHostString = urlMatcher.group(4); //In case it matches to the default pattern Matcher dbInstanceMatcher = Patterns.DB_INSTANCE_MATCHING_PATTERN.matcher(dbHostString); if (dbInstanceMatcher.matches()) { databaseHost = Optional.of(dbInstanceMatcher.group(1)); databasePort = Optional.ofNullable(dbInstanceMatcher.group(3)).map(value -> Integer.valueOf(value)); databaseName = Optional.of(dbInstanceMatcher.group(4)); } queryParameters = Collections.unmodifiableMap( parseQueryParameters( Optional.ofNullable(urlMatcher.group(5)).orElse(""))); String query = queryParameters .entrySet() .stream() .map(e -> e.getKey() + "=" + e.getValue()) .collect(Collectors.joining("&")); if (query == null || query.trim().length() == 0) { queryString = Optional.empty(); } else { queryString = Optional.of("?" + query); } containerParameters = Collections.unmodifiableMap(parseContainerParameters()); initScriptPath = Optional.ofNullable(containerParameters.get("TC_INITSCRIPT")); Matcher funcMatcher = Patterns.INITFUNCTION_MATCHING_PATTERN.matcher(this.getUrl()); if (funcMatcher.matches()) { initFunction = Optional.of(new InitFunctionDef(funcMatcher.group(2), funcMatcher.group(4))); } Matcher daemonMatcher = Patterns.DAEMON_MATCHING_PATTERN.matcher(this.getUrl()); inDaemonMode = daemonMatcher.matches() && Boolean.parseBoolean(daemonMatcher.group(2)); } /** * Get the TestContainers Parameters such as Init Function, Init Script path etc. * * @return {@link Map} */ private Map<String, String> parseContainerParameters() { Map<String, String> results = new HashMap<>(); Matcher matcher = Patterns.TC_PARAM_MATCHING_PATTERN.matcher(this.getUrl()); while (matcher.find()) { String key = matcher.group(1); String value = matcher.group(2); results.put(key, value); } return results; } /** * Get all Query parameters specified in the Connection URL after ?. This DOES NOT include TestContainers (TC_*) parameters. * * @return {@link Map} */ private Map<String, String> parseQueryParameters(final String queryString) { Map<String, String> results = new HashMap<>(); Matcher matcher = Patterns.QUERY_PARAM_MATCHING_PATTERN.matcher(queryString); while (matcher.find()) { String key = matcher.group(1); String value = matcher.group(2); if(!key.matches(Patterns.TC_PARAM_NAME_PATTERN)) results.put(key, value); } return results; } /** * This interface defines the Regex Patterns used by {@link ConnectionUrl}. * * @author manikmagar */ public interface Patterns { Pattern URL_MATCHING_PATTERN = Pattern.compile("jdbc:tc:([a-z]+)(:([^:]+))?://([^\\?]+)(\\?.*)?"); Pattern ORACLE_URL_MATCHING_PATTERN = Pattern.compile("jdbc:tc:([a-z]+)(:([^(thin:)]+))?:thin:@([^\\?]+)(\\?.*)?"); //Matches to part of string - hostname:port/databasename Pattern DB_INSTANCE_MATCHING_PATTERN = Pattern.compile("([^:]+)(:([0-9]+))?/([^\\\\?]+)"); Pattern DAEMON_MATCHING_PATTERN = Pattern.compile(".*([\\?&]?)TC_DAEMON=([^\\?&]+).*"); Pattern INITSCRIPT_MATCHING_PATTERN = Pattern.compile(".*([\\?&]?)TC_INITSCRIPT=([^\\?&]+).*"); Pattern INITFUNCTION_MATCHING_PATTERN = Pattern.compile(".*([\\?&]?)TC_INITFUNCTION=" + "((\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*\\.)*\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)" + "::" + "(\\p{javaJavaIdentifierStart}\\p{javaJavaIdentifierPart}*)" + ".*"); String TC_PARAM_NAME_PATTERN = "(TC_[A-Z_]+)"; Pattern TC_PARAM_MATCHING_PATTERN = Pattern.compile(TC_PARAM_NAME_PATTERN + "=([^\\?&]+)"); Pattern QUERY_PARAM_MATCHING_PATTERN = Pattern.compile("([^\\?&=]+)=([^\\?&]+)"); } @Getter @AllArgsConstructor public class InitFunctionDef { private String className; private String methodName; } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package git4idea.rebase; import com.intellij.dvcs.DvcsUtil; import com.intellij.openapi.application.AccessToken; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vcs.ProjectLevelVcsManager; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.openapi.vfs.VirtualFile; import git4idea.GitPlatformFacade; import git4idea.GitUtil; import git4idea.GitVcs; import git4idea.commands.*; import git4idea.merge.GitConflictResolver; import git4idea.update.GitUpdateResult; import git4idea.util.GitUIUtil; import git4idea.util.LocalChangesWouldBeOverwrittenHelper; import git4idea.util.StringScanner; import git4idea.util.UntrackedFilesNotifier; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static git4idea.commands.GitLocalChangesWouldBeOverwrittenDetector.Operation.CHECKOUT; /** * @author Kirill Likhodedov */ public class GitRebaser { private final Project myProject; private GitVcs myVcs; private List<GitRebaseUtils.CommitInfo> mySkippedCommits; private static final Logger LOG = Logger.getInstance(GitRebaser.class); @NotNull private final Git myGit; private @Nullable ProgressIndicator myProgressIndicator; public GitRebaser(Project project, @NotNull Git git, @Nullable ProgressIndicator progressIndicator) { myProject = project; myGit = git; myProgressIndicator = progressIndicator; myVcs = GitVcs.getInstance(project); mySkippedCommits = new ArrayList<GitRebaseUtils.CommitInfo>(); } public GitUpdateResult rebase(@NotNull VirtualFile root, @NotNull List<String> parameters, @Nullable final Runnable onCancel, @Nullable GitLineHandlerListener lineListener) { final GitLineHandler rebaseHandler = createHandler(root); rebaseHandler.setStdoutSuppressed(false); rebaseHandler.addParameters(parameters); if (lineListener != null) { rebaseHandler.addLineListener(lineListener); } final GitRebaseProblemDetector rebaseConflictDetector = new GitRebaseProblemDetector(); rebaseHandler.addLineListener(rebaseConflictDetector); GitUntrackedFilesOverwrittenByOperationDetector untrackedFilesDetector = new GitUntrackedFilesOverwrittenByOperationDetector(root); GitLocalChangesWouldBeOverwrittenDetector localChangesDetector = new GitLocalChangesWouldBeOverwrittenDetector(root, CHECKOUT); rebaseHandler.addLineListener(untrackedFilesDetector); rebaseHandler.addLineListener(localChangesDetector); String progressTitle = "Rebasing"; GitTask rebaseTask = new GitTask(myProject, rebaseHandler, progressTitle); rebaseTask.setProgressIndicator(myProgressIndicator); rebaseTask.setProgressAnalyzer(new GitStandardProgressAnalyzer()); final AtomicReference<GitUpdateResult> updateResult = new AtomicReference<GitUpdateResult>(); final AtomicBoolean failure = new AtomicBoolean(); AccessToken token = DvcsUtil.workingTreeChangeStarted(myProject); try { rebaseTask.executeInBackground(true, new GitTaskResultHandlerAdapter() { @Override protected void onSuccess() { updateResult.set(GitUpdateResult.SUCCESS); } @Override protected void onCancel() { if (onCancel != null) { onCancel.run(); } updateResult.set(GitUpdateResult.CANCEL); } @Override protected void onFailure() { failure.set(true); } }); if (failure.get()) { updateResult.set(handleRebaseFailure(rebaseHandler, root, rebaseConflictDetector, untrackedFilesDetector, localChangesDetector)); } } finally { DvcsUtil.workingTreeChangeFinished(myProject, token); } return updateResult.get(); } protected GitLineHandler createHandler(VirtualFile root) { return new GitLineHandler(myProject, root, GitCommand.REBASE); } public void abortRebase(@NotNull VirtualFile root) { LOG.info("abortRebase " + root); final GitLineHandler rh = new GitLineHandler(myProject, root, GitCommand.REBASE); rh.setStdoutSuppressed(false); rh.addParameters("--abort"); GitTask task = new GitTask(myProject, rh, "Aborting rebase"); task.setProgressIndicator(myProgressIndicator); task.executeAsync(new GitTaskResultNotificationHandler(myProject, "Rebase aborted", "Abort rebase cancelled", "Error aborting rebase")); } public boolean continueRebase(@NotNull VirtualFile root) { return continueRebase(root, "--continue"); } /** * Runs 'git rebase --continue' on several roots consequently. * @return true if rebase successfully finished. */ public boolean continueRebase(@NotNull Collection<VirtualFile> rebasingRoots) { AccessToken token = DvcsUtil.workingTreeChangeStarted(myProject); try { boolean success = true; for (VirtualFile root : rebasingRoots) { success &= continueRebase(root); } return success; } finally { DvcsUtil.workingTreeChangeFinished(myProject, token); } } // start operation may be "--continue" or "--skip" depending on the situation. private boolean continueRebase(final @NotNull VirtualFile root, @NotNull String startOperation) { LOG.info("continueRebase " + root + " " + startOperation); final GitLineHandler rh = new GitLineHandler(myProject, root, GitCommand.REBASE); rh.setStdoutSuppressed(false); rh.addParameters(startOperation); final GitRebaseProblemDetector rebaseConflictDetector = new GitRebaseProblemDetector(); rh.addLineListener(rebaseConflictDetector); makeContinueRebaseInteractiveEditor(root, rh); final GitTask rebaseTask = new GitTask(myProject, rh, "git rebase " + startOperation); rebaseTask.setProgressAnalyzer(new GitStandardProgressAnalyzer()); rebaseTask.setProgressIndicator(myProgressIndicator); return executeRebaseTaskInBackground(root, rh, rebaseConflictDetector, rebaseTask); } protected void makeContinueRebaseInteractiveEditor(VirtualFile root, GitLineHandler rh) { GitRebaseEditorService rebaseEditorService = GitRebaseEditorService.getInstance(); // TODO If interactive rebase with commit rewording was invoked, this should take the reworded message GitRebaser.TrivialEditor editor = new GitRebaser.TrivialEditor(rebaseEditorService, myProject, root, rh); Integer rebaseEditorNo = editor.getHandlerNo(); rebaseEditorService.configureHandler(rh, rebaseEditorNo); } /** * @return Roots which have unfinished rebase process. May be empty. */ public @NotNull Collection<VirtualFile> getRebasingRoots() { final Collection<VirtualFile> rebasingRoots = new HashSet<VirtualFile>(); for (VirtualFile root : ProjectLevelVcsManager.getInstance(myProject).getRootsUnderVcs(myVcs)) { if (GitRebaseUtils.isRebaseInTheProgress(root)) { rebasingRoots.add(root); } } return rebasingRoots; } /** * Reorders commits so that the given commits go before others, just after the given parentCommit. * For example, if A->B->C->D are unpushed commits and B and D are supplied to this method, then after rebase the commits will * look like that: B->D->A->C. * NB: If there are merges in the unpushed commits being reordered, a conflict would happen. The calling code should probably * prohibit reordering merge commits. */ public boolean reoderCommitsIfNeeded(@NotNull final VirtualFile root, @NotNull String parentCommit, @NotNull List<String> olderCommits) throws VcsException { List<String> allCommits = new ArrayList<String>(); //TODO if (olderCommits.isEmpty() || olderCommits.size() == allCommits.size()) { LOG.info("Nothing to reorder. olderCommits: " + olderCommits + " allCommits: " + allCommits); return true; } final GitLineHandler h = new GitLineHandler(myProject, root, GitCommand.REBASE); h.setStdoutSuppressed(false); Integer rebaseEditorNo = null; GitRebaseEditorService rebaseEditorService = GitRebaseEditorService.getInstance(); try { h.addParameters("-i", "-m", "-v"); h.addParameters(parentCommit); final GitRebaseProblemDetector rebaseConflictDetector = new GitRebaseProblemDetector(); h.addLineListener(rebaseConflictDetector); final PushRebaseEditor pushRebaseEditor = new PushRebaseEditor(rebaseEditorService, root, olderCommits, false, h); rebaseEditorNo = pushRebaseEditor.getHandlerNo(); rebaseEditorService.configureHandler(h, rebaseEditorNo); final GitTask rebaseTask = new GitTask(myProject, h, "Reordering commits"); rebaseTask.setProgressIndicator(myProgressIndicator); return executeRebaseTaskInBackground(root, h, rebaseConflictDetector, rebaseTask); } finally { // TODO should be unregistered in the task.success // unregistering rebase service if (rebaseEditorNo != null) { rebaseEditorService.unregisterHandler(rebaseEditorNo); } } } private boolean executeRebaseTaskInBackground(VirtualFile root, GitLineHandler h, GitRebaseProblemDetector rebaseConflictDetector, GitTask rebaseTask) { final AtomicBoolean result = new AtomicBoolean(); final AtomicBoolean failure = new AtomicBoolean(); rebaseTask.executeInBackground(true, new GitTaskResultHandlerAdapter() { @Override protected void onSuccess() { result.set(true); } @Override protected void onCancel() { result.set(false); } @Override protected void onFailure() { failure.set(true); } }); if (failure.get()) { result.set(handleRebaseFailure(root, h, rebaseConflictDetector)); } return result.get(); } /** * @return true if the failure situation was resolved successfully, false if we failed to resolve the problem. */ private boolean handleRebaseFailure(final VirtualFile root, final GitLineHandler h, GitRebaseProblemDetector rebaseConflictDetector) { if (rebaseConflictDetector.isMergeConflict()) { LOG.info("handleRebaseFailure merge conflict"); return new GitConflictResolver(myProject, myGit, ServiceManager.getService(GitPlatformFacade.class), Collections.singleton(root), makeParamsForRebaseConflict()) { @Override protected boolean proceedIfNothingToMerge() { return continueRebase(root, "--continue"); } @Override protected boolean proceedAfterAllMerged() { return continueRebase(root, "--continue"); } }.merge(); } else if (rebaseConflictDetector.isNoChangeError()) { LOG.info("handleRebaseFailure no changes error detected"); try { if (GitUtil.hasLocalChanges(true, myProject, root)) { LOG.error("The rebase detector incorrectly detected 'no changes' situation. Attempting to continue rebase."); return continueRebase(root); } else if (GitUtil.hasLocalChanges(false, myProject, root)) { LOG.warn("No changes from patch were not added to the index. Adding all changes from tracked files."); stageEverything(root); return continueRebase(root); } else { GitRebaseUtils.CommitInfo commit = GitRebaseUtils.getCurrentRebaseCommit(root); LOG.info("no changes confirmed. Skipping commit " + commit); mySkippedCommits.add(commit); return continueRebase(root, "--skip"); } } catch (VcsException e) { LOG.info("Failed to work around 'no changes' error.", e); String message = "Couldn't proceed with rebase. " + e.getMessage(); GitUIUtil.notifyImportantError(myProject, "Error rebasing", message); return false; } } else { LOG.info("handleRebaseFailure error " + h.errors()); GitUIUtil.notifyImportantError(myProject, "Error rebasing", GitUIUtil.stringifyErrors(h.errors())); return false; } } private void stageEverything(@NotNull VirtualFile root) throws VcsException { GitSimpleHandler handler = new GitSimpleHandler(myProject, root, GitCommand.ADD); handler.setSilent(false); handler.addParameters("--update"); handler.run(); } private static GitConflictResolver.Params makeParamsForRebaseConflict() { return new GitConflictResolver.Params(). setReverse(true). setErrorNotificationTitle("Can't continue rebase"). setMergeDescription("Merge conflicts detected. Resolve them before continuing rebase."). setErrorNotificationAdditionalDescription("Then you may <b>continue rebase</b>. <br/> " + "You also may <b>abort rebase</b> to restore the original branch and stop rebasing."); } public static class TrivialEditor extends GitInteractiveRebaseEditorHandler{ public TrivialEditor(@NotNull GitRebaseEditorService service, @NotNull Project project, @NotNull VirtualFile root, @NotNull GitHandler handler) { super(service, project, root, handler); } @Override public int editCommits(String path) { return 0; } } @NotNull public GitUpdateResult handleRebaseFailure(@NotNull GitLineHandler handler, @NotNull VirtualFile root, @NotNull GitRebaseProblemDetector rebaseConflictDetector, @NotNull GitMessageWithFilesDetector untrackedWouldBeOverwrittenDetector, @NotNull GitLocalChangesWouldBeOverwrittenDetector localChangesDetector) { if (rebaseConflictDetector.isMergeConflict()) { LOG.info("handleRebaseFailure merge conflict"); final boolean allMerged = new GitRebaser.ConflictResolver(myProject, myGit, root, this).merge(); return allMerged ? GitUpdateResult.SUCCESS_WITH_RESOLVED_CONFLICTS : GitUpdateResult.INCOMPLETE; } else if (untrackedWouldBeOverwrittenDetector.wasMessageDetected()) { LOG.info("handleRebaseFailure: untracked files would be overwritten by checkout"); UntrackedFilesNotifier.notifyUntrackedFilesOverwrittenBy(myProject, root, untrackedWouldBeOverwrittenDetector.getRelativeFilePaths(), "rebase", null); return GitUpdateResult.ERROR; } else if (localChangesDetector.wasMessageDetected()) { LocalChangesWouldBeOverwrittenHelper.showErrorNotification(myProject, root, "rebase", localChangesDetector.getRelativeFilePaths()); return GitUpdateResult.ERROR; } else { LOG.info("handleRebaseFailure error " + handler.errors()); GitUIUtil.notifyImportantError(myProject, "Rebase error", GitUIUtil.stringifyErrors(handler.errors())); return GitUpdateResult.ERROR; } } public static class ConflictResolver extends GitConflictResolver { @NotNull private final GitRebaser myRebaser; @NotNull private final VirtualFile myRoot; public ConflictResolver(@NotNull Project project, @NotNull Git git, @NotNull VirtualFile root, @NotNull GitRebaser rebaser) { super(project, git, ServiceManager.getService(GitPlatformFacade.class), Collections.singleton(root), makeParams()); myRebaser = rebaser; myRoot = root; } private static Params makeParams() { Params params = new Params(); params.setReverse(true); params.setMergeDescription("Merge conflicts detected. Resolve them before continuing rebase."); params.setErrorNotificationTitle("Can't continue rebase"); params.setErrorNotificationAdditionalDescription("Then you may <b>continue rebase</b>. <br/> You also may <b>abort rebase</b> to restore the original branch and stop rebasing."); return params; } @Override protected boolean proceedIfNothingToMerge() throws VcsException { return myRebaser.continueRebase(myRoot); } @Override protected boolean proceedAfterAllMerged() throws VcsException { return myRebaser.continueRebase(myRoot); } } /** * The rebase editor that just overrides the list of commits */ class PushRebaseEditor extends GitInteractiveRebaseEditorHandler { private final Logger LOG = Logger.getInstance(PushRebaseEditor.class); private final List<String> myCommits; // The reordered commits private final boolean myHasMerges; // true means that the root has merges /** * The constructor from fields that is expected to be * accessed only from {@link git4idea.rebase.GitRebaseEditorService}. * * @param rebaseEditorService * @param root the git repository root * @param commits the reordered commits * @param hasMerges if true, the vcs root has merges */ public PushRebaseEditor(GitRebaseEditorService rebaseEditorService, final VirtualFile root, List<String> commits, boolean hasMerges, GitHandler h) { super(rebaseEditorService, myProject, root, h); myCommits = commits; myHasMerges = hasMerges; } public int editCommits(String path) { if (!myRebaseEditorShown) { myRebaseEditorShown = true; if (myHasMerges) { return 0; } try { TreeMap<String, String> pickLines = new TreeMap<String, String>(); StringScanner s = new StringScanner(new String(FileUtil.loadFileText(new File(path), CharsetToolkit.UTF8))); while (s.hasMoreData()) { if (!s.tryConsume("pick ")) { s.line(); continue; } String commit = s.spaceToken(); pickLines.put(commit, "pick " + commit + " " + s.line()); } PrintWriter w = new PrintWriter(new OutputStreamWriter(new FileOutputStream(path), CharsetToolkit.UTF8)); try { for (String commit : myCommits) { String key = pickLines.headMap(commit + "\u0000").lastKey(); if (key == null || !commit.startsWith(key)) { continue; // commit from merged branch } w.print(pickLines.get(key) + "\n"); } } finally { w.close(); } return 0; } catch (Exception ex) { LOG.error("Editor failed: ", ex); return 1; } } else { return super.editCommits(path); } } } }
/* * Copyright (c) 2009-2011, 2014, AllSeen Alliance. All rights reserved. * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ package org.alljoyn.bus.ifaces; import java.util.Arrays; import org.alljoyn.bus.BusAttachment; import org.alljoyn.bus.BusException; import org.alljoyn.bus.ErrorReplyBusException; import org.alljoyn.bus.Status; import org.alljoyn.bus.ifaces.DBusProxyObj; import org.alljoyn.bus.annotation.BusSignalHandler; import junit.framework.TestCase; public class DBusProxyObjTest extends TestCase { public DBusProxyObjTest(String name) { super(name); } static { System.loadLibrary("alljoyn_java"); } private BusAttachment bus; private DBusProxyObj dbus; public void setUp() throws Exception { bus = new BusAttachment(getClass().getName()); Status status = bus.connect(); assertEquals(Status.OK, status); dbus = bus.getDBusProxyObj(); } public void tearDown() throws Exception { dbus = null; bus.disconnect(); bus = null; } public void testListNames() throws Exception { String[] names = dbus.ListNames(); // all DBus proxyBojects should have the org.freeDesktop.DBus name // and org.alljoyn.Bus name as well as org.alljoyn.Daemon and org.alljoyn.sl // we only check for the first two. assertTrue(Arrays.asList(names).contains("org.freedesktop.DBus")); assertTrue(Arrays.asList(names).contains("org.alljoyn.Bus")); } public void testListActivatableNames() throws Exception { String[] names = dbus.ListActivatableNames(); assertNotNull(names); } public void testRequestReleaseName() throws Exception { String name = "org.alljoyn.bus.ifaces.testRequestReleaseName"; DBusProxyObj.RequestNameResult res1 = dbus.RequestName(name, DBusProxyObj.REQUEST_NAME_NO_FLAGS); assertEquals(DBusProxyObj.RequestNameResult.PrimaryOwner, res1); DBusProxyObj.ReleaseNameResult res2 = dbus.ReleaseName(name); assertEquals(DBusProxyObj.ReleaseNameResult.Released, res2); } public void testRequestNullName() throws Exception { boolean thrown = false; try { /* This shows up an ER_ALLJOYN_BAD_VALUE_TYPE log error. */ dbus.RequestName(null, DBusProxyObj.REQUEST_NAME_NO_FLAGS); } catch (BusException ex) { thrown = true; } finally { assertTrue(thrown); } } public void testNameHasOwner() throws Exception { assertFalse(dbus.NameHasOwner("org.alljoyn.bus.ifaces.DBusProxyObjTest")); assertTrue(dbus.NameHasOwner("org.alljoyn.Bus")); } public void testStartServiceByName() throws Exception { boolean thrown = false; try { DBusProxyObj.StartServiceByNameResult res = dbus.StartServiceByName("UNKNOWN_SERVICE", 0); fail("StartServiceByName returned " + res.name() + " expected ErrorReplyBusException"); } catch (ErrorReplyBusException ex) { thrown = true; } finally { assertTrue(thrown); } } public void testGetNameOwner() throws Exception { boolean thrown = false; try { String owner = dbus.GetNameOwner("name"); fail("Call to GetNameOwner returned " + owner + " expected ErrorReplyBusException."); } catch (ErrorReplyBusException ex) { thrown = true; } finally { assertTrue(thrown); } } public void testGetConnectionUnixUser() throws Exception { if ( System.getProperty("os.name").startsWith("Windows")){ /* * In windows there is no UnixUser. Calling the DBus method * GetConnectionUnixUser will result in a ErrorReplyBusEception when * running in windows. */ String name = "org.alljoyn.bus.ifaces.testGetConnectionUnixUser"; DBusProxyObj.RequestNameResult res1 = dbus.RequestName(name, DBusProxyObj.REQUEST_NAME_NO_FLAGS); assertEquals(DBusProxyObj.RequestNameResult.PrimaryOwner, res1); boolean thrown = false; try { int uid = dbus.GetConnectionUnixUser(name); fail("Got ConnectionUnixUser " + uid + " Expected ErrorReplyBusExcpetion."); } catch (ErrorReplyBusException ex) { thrown=true; } finally { assertTrue(thrown); } DBusProxyObj.ReleaseNameResult res2 = dbus.ReleaseName(name); assertEquals(DBusProxyObj.ReleaseNameResult.Released, res2); } else { String name = "org.alljoyn.bus.ifaces.testGetConnectionUnixUser"; DBusProxyObj.RequestNameResult res1 = dbus.RequestName(name, DBusProxyObj.REQUEST_NAME_NO_FLAGS); assertEquals(DBusProxyObj.RequestNameResult.PrimaryOwner, res1); int uid = dbus.GetConnectionUnixUser(name); assertTrue(uid > 0); DBusProxyObj.ReleaseNameResult res2 = dbus.ReleaseName(name); assertEquals(DBusProxyObj.ReleaseNameResult.Released, res2); } } public void testGetConnectionUnixUserNoName() throws Exception { boolean thrown = false; try { int uid = dbus.GetConnectionUnixUser("name"); fail("Got ConnectionUnixUser " + uid + " Expected ErrorReplyBusExcpetion."); } catch (ErrorReplyBusException ex) { thrown = true; } finally { assertTrue(thrown); } } public void testGetConnectionUnixProcessID() throws Exception { if ( System.getProperty("os.name").startsWith("Windows")){ /* * In windows there is no UnixUser. Calling the DBus method * GetConnectionUnixProcessID will result in a ErrorReplyBusEception * when running in windows. */ String name = "org.alljoyn.bus.ifaces.testGetConnectionUnixProcessID"; DBusProxyObj.RequestNameResult res1 = dbus.RequestName(name, DBusProxyObj.REQUEST_NAME_NO_FLAGS); assertEquals(DBusProxyObj.RequestNameResult.PrimaryOwner, res1); boolean thrown = false; try { int pid = dbus.GetConnectionUnixProcessID(name); fail("Got ConnectionUnixProcessID " + pid + " Expected ErrorReplyBusExcpetion."); } catch (ErrorReplyBusException ex) { thrown = true; } finally { assertTrue(thrown); } DBusProxyObj.ReleaseNameResult res2 = dbus.ReleaseName(name); assertEquals(DBusProxyObj.ReleaseNameResult.Released, res2); } else { String name = "org.alljoyn.bus.ifaces.testGetConnectionUnixProcessID"; DBusProxyObj.RequestNameResult res1 = dbus.RequestName(name, DBusProxyObj.REQUEST_NAME_NO_FLAGS); assertEquals(DBusProxyObj.RequestNameResult.PrimaryOwner, res1); int pid = dbus.GetConnectionUnixProcessID(name); assertTrue(pid > 0); DBusProxyObj.ReleaseNameResult res2 = dbus.ReleaseName(name); assertEquals(DBusProxyObj.ReleaseNameResult.Released, res2); } } public void testGetConnectionUnixProcessIDNoName() throws Exception { boolean thrown = false; try { int pid = dbus.GetConnectionUnixProcessID("name"); fail("Got ConnectionUnitProcessID " + pid + " expected ErrorReplyBusException"); } catch (ErrorReplyBusException ex) { thrown = true; } finally { assertTrue(thrown); } } public void testAddRemoveMatch() throws Exception { dbus.AddMatch("type='signal'"); dbus.RemoveMatch("type='signal'"); } /* * Ignored because DBus daemon returns both a METHOD_RET and ERROR * message for this. The ERROR message is discarded due to the * METHOD_RET (will see ER_ALLJOYN_UNMATCHED_REPLY_SERIAL in output), * so no exception. * * AllJoyn does not return an error. */ /* public void testRemoveUnknownMatch() throws Exception { boolean thrown = false; try { dbus.RemoveMatch("type='signal'"); } catch (BusException ex) { thrown = true; } finally { assertTrue(thrown); } } */ public void testGetId() throws Exception { String id = dbus.GetId(); // since the id is always a random GUID string of type // fe438dc401d2834ecd4f65cf7857196e we have no way of knowing what that // string will be until runtime. We will check that the string is not // empty and that it contains more than 4 letters. I don't know if the // length is always the same for that reason I am checking the length is // at least 4. assertFalse(id.equals("")); assertTrue(id.length() > 4); } private String newOwner; private String nameAcquired; @BusSignalHandler(iface="org.freedesktop.DBus", signal="NameOwnerChanged") public void nameOwnerChanged(String name, String oldOwner, String newOwner) throws BusException { this.newOwner = newOwner; synchronized (this) { notify(); } } @BusSignalHandler(iface="org.freedesktop.DBus", signal="NameLost") public void nameLost(String name) throws BusException { if (nameAcquired.equals(name)) { nameAcquired = ""; } synchronized (this) { notify(); } } @BusSignalHandler(iface="org.freedesktop.DBus", signal="NameAcquired") public void nameAcquired(String name) throws BusException { nameAcquired = name; synchronized (this) { notify(); } } public void testNameSignals() throws Exception { Status status = bus.registerSignalHandlers(this); if (Status.OK != status) { throw new BusException("Cannot register signal handler"); } String name = "org.alljoyn.bus.ifaces.testNameSignals"; newOwner = ""; nameAcquired = ""; int flags = DBusProxyObj.REQUEST_NAME_ALLOW_REPLACEMENT; DBusProxyObj.RequestNameResult res1 = dbus.RequestName(name, flags); assertEquals(DBusProxyObj.RequestNameResult.PrimaryOwner, res1); synchronized (this) { long start = System.currentTimeMillis(); while (newOwner.equals("") || !nameAcquired.equals(name)) { wait(1000); assertTrue("timed out waiting for name signals", (System.currentTimeMillis() - start) < 1000); } } DBusProxyObj.ReleaseNameResult res2 = dbus.ReleaseName(name); assertEquals(DBusProxyObj.ReleaseNameResult.Released, res2); synchronized (this) { long start = System.currentTimeMillis(); while (!newOwner.equals("") || !nameAcquired.equals("")) { wait(1000); assertTrue("timed out waiting for name signals", (System.currentTimeMillis() - start) < 1000); } } bus.unregisterSignalHandlers(this); } public void testListQueuedOwners() throws Exception { String name = "org.alljoyn.bus.ifaces.testListQueuedOwners"; String[] queuedNames; Status status = Status.OK; BusAttachment bus2; bus2 = new BusAttachment(getClass().getName()); status = bus2.connect(); assertEquals(Status.OK, status); BusAttachment bus3; bus3 = new BusAttachment(getClass().getName()); status = bus3.connect(); assertEquals(Status.OK, status); BusAttachment bus4; bus4 = new BusAttachment(getClass().getName()); status = bus4.connect(); assertEquals(Status.OK, status); /* * Test that no errors are returned when calling ListQueuedOwners when * there are no name owners */ queuedNames = dbus.ListQueuedOwners(name); assertEquals(queuedNames.length, 0); /* * Test that no names are returned when only the primary owner has the * name. */ int flags = BusAttachment.ALLJOYN_NAME_FLAG_ALLOW_REPLACEMENT; status = bus.requestName(name, flags); assertEquals(Status.OK, status); queuedNames = dbus.ListQueuedOwners(name); assertEquals(queuedNames.length, 0); /* * Test that names that already have a primary owner are being queued */ flags = 0; status = bus2.requestName(name, flags); assertEquals(Status.DBUS_REQUEST_NAME_REPLY_IN_QUEUE, status); flags = 0; status = bus3.requestName(name, flags); assertEquals(Status.DBUS_REQUEST_NAME_REPLY_IN_QUEUE, status); queuedNames = dbus.ListQueuedOwners(name); assertEquals(queuedNames.length, 2); assertEquals(queuedNames[0], bus2.getUniqueName()); assertEquals(queuedNames[1], bus3.getUniqueName()); /* * Test that the ALLJOYN_NAME_FLAG_ALLOW_REPLACEMENT affecting the queue * as it should */ flags = BusAttachment.ALLJOYN_REQUESTNAME_FLAG_REPLACE_EXISTING; status = bus4.requestName(name, flags); assertEquals(Status.OK, status); queuedNames = dbus.ListQueuedOwners(name); assertEquals(queuedNames.length, 3); assertEquals(queuedNames[0], bus.getUniqueName()); assertEquals(queuedNames[1], bus2.getUniqueName()); assertEquals(queuedNames[2], bus3.getUniqueName()); //cleanup bus2.releaseName(name); bus3.releaseName(name); bus4.releaseName(name); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.utils; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; import java.nio.ByteBuffer; import java.security.SecureRandom; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.TimeUnit; import java.util.function.Function; import java.util.stream.Collectors; import com.google.common.annotations.VisibleForTesting; import com.google.common.hash.Hasher; import com.google.common.hash.Hashing; import com.google.common.primitives.Ints; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.locator.InetAddressAndPort; /** * The goods are here: www.ietf.org/rfc/rfc4122.txt. */ public class UUIDGen { // A grand day! millis at 00:00:00.000 15 Oct 1582. private static final long START_EPOCH = -12219292800000L; private static final long clockSeqAndNode = makeClockSeqAndNode(); public static final int UUID_LEN = 16; /* * The min and max possible lsb for a UUID. * Note that his is not 0 and all 1's because Cassandra TimeUUIDType * compares the lsb parts as a signed byte array comparison. So the min * value is 8 times -128 and the max is 8 times +127. * * Note that we ignore the uuid variant (namely, MIN_CLOCK_SEQ_AND_NODE * have variant 2 as it should, but MAX_CLOCK_SEQ_AND_NODE have variant 0). * I don't think that has any practical consequence and is more robust in * case someone provides a UUID with a broken variant. */ private static final long MIN_CLOCK_SEQ_AND_NODE = 0x8080808080808080L; private static final long MAX_CLOCK_SEQ_AND_NODE = 0x7f7f7f7f7f7f7f7fL; private static final SecureRandom secureRandom = new SecureRandom(); // placement of this singleton is important. It needs to be instantiated *AFTER* the other statics. private static final UUIDGen instance = new UUIDGen(); private AtomicLong lastNanos = new AtomicLong(); private UUIDGen() { // make sure someone didn't whack the clockSeqAndNode by changing the order of instantiation. if (clockSeqAndNode == 0) throw new RuntimeException("singleton instantiation is misplaced."); } /** * Creates a type 1 UUID (time-based UUID). * * @return a UUID instance */ public static UUID getTimeUUID() { return new UUID(instance.createTimeSafe(), clockSeqAndNode); } /** * Creates a type 1 UUID (time-based UUID) with the timestamp of @param when, in milliseconds. * * @return a UUID instance */ public static UUID getTimeUUID(long when) { return new UUID(createTime(fromUnixTimestamp(when)), clockSeqAndNode); } /** * Returns a version 1 UUID using the provided timestamp and the local clock and sequence. * <p> * Note that this method is generally only safe to use if you can guarantee that the provided * parameter is unique across calls (otherwise the returned UUID won't be unique accross calls). * * @param whenInMicros a unix time in microseconds. * @return a new UUID {@code id} such that {@code microsTimestamp(id) == whenInMicros}. Please not that * multiple calls to this method with the same value of {@code whenInMicros} will return the <b>same</b> * UUID. */ public static UUID getTimeUUIDFromMicros(long whenInMicros) { long whenInMillis = whenInMicros / 1000; long nanos = (whenInMicros - (whenInMillis * 1000)) * 10; return getTimeUUID(whenInMillis, nanos); } /** * Similar to {@link #getTimeUUIDFromMicros}, but randomize (using SecureRandom) the clock and sequence. * <p> * If you can guarantee that the {@code whenInMicros} argument is unique (for this JVM instance) for * every call, then you should prefer {@link #getTimeUUIDFromMicros} which is faster. If you can't * guarantee this however, this method will ensure the returned UUID are still unique (accross calls) * through randomization. * * @param whenInMicros a unix time in microseconds. * @return a new UUID {@code id} such that {@code microsTimestamp(id) == whenInMicros}. The UUID returned * by different calls will be unique even if {@code whenInMicros} is not. */ public static UUID getRandomTimeUUIDFromMicros(long whenInMicros) { long whenInMillis = whenInMicros / 1000; long nanos = (whenInMicros - (whenInMillis * 1000)) * 10; return new UUID(createTime(fromUnixTimestamp(whenInMillis, nanos)), secureRandom.nextLong()); } public static UUID getTimeUUID(long when, long nanos) { return new UUID(createTime(fromUnixTimestamp(when, nanos)), clockSeqAndNode); } @VisibleForTesting public static UUID getTimeUUID(long when, long nanos, long clockSeqAndNode) { return new UUID(createTime(fromUnixTimestamp(when, nanos)), clockSeqAndNode); } /** creates a type 1 uuid from raw bytes. */ public static UUID getUUID(ByteBuffer raw) { return new UUID(raw.getLong(raw.position()), raw.getLong(raw.position() + 8)); } public static ByteBuffer toByteBuffer(UUID uuid) { ByteBuffer buffer = ByteBuffer.allocate(UUID_LEN); buffer.putLong(uuid.getMostSignificantBits()); buffer.putLong(uuid.getLeastSignificantBits()); buffer.flip(); return buffer; } /** decomposes a uuid into raw bytes. */ public static byte[] decompose(UUID uuid) { long most = uuid.getMostSignificantBits(); long least = uuid.getLeastSignificantBits(); byte[] b = new byte[16]; for (int i = 0; i < 8; i++) { b[i] = (byte)(most >>> ((7-i) * 8)); b[8+i] = (byte)(least >>> ((7-i) * 8)); } return b; } /** * Returns a 16 byte representation of a type 1 UUID (a time-based UUID), * based on the current system time. * * @return a type 1 UUID represented as a byte[] */ public static byte[] getTimeUUIDBytes() { return createTimeUUIDBytes(instance.createTimeSafe()); } /** * Returns the smaller possible type 1 UUID having the provided timestamp. * * <b>Warning:</b> this method should only be used for querying as this * doesn't at all guarantee the uniqueness of the resulting UUID. */ public static UUID minTimeUUID(long timestamp) { return new UUID(createTime(fromUnixTimestamp(timestamp)), MIN_CLOCK_SEQ_AND_NODE); } /** * Returns the biggest possible type 1 UUID having the provided timestamp. * * <b>Warning:</b> this method should only be used for querying as this * doesn't at all guarantee the uniqueness of the resulting UUID. */ public static UUID maxTimeUUID(long timestamp) { // unix timestamp are milliseconds precision, uuid timestamp are 100's // nanoseconds precision. If we ask for the biggest uuid have unix // timestamp 1ms, then we should not extend 100's nanoseconds // precision by taking 10000, but rather 19999. long uuidTstamp = fromUnixTimestamp(timestamp + 1) - 1; return new UUID(createTime(uuidTstamp), MAX_CLOCK_SEQ_AND_NODE); } /** * @param uuid * @return milliseconds since Unix epoch */ public static long unixTimestamp(UUID uuid) { return (uuid.timestamp() / 10000) + START_EPOCH; } /** * @param uuid * @return seconds since Unix epoch */ public static int unixTimestampInSec(UUID uuid) { return Ints.checkedCast(TimeUnit.MILLISECONDS.toSeconds(unixTimestamp(uuid))); } /** * @param uuid * @return microseconds since Unix epoch */ public static long microsTimestamp(UUID uuid) { return (uuid.timestamp() / 10) + START_EPOCH * 1000; } /** * @param timestamp milliseconds since Unix epoch * @return */ private static long fromUnixTimestamp(long timestamp) { return fromUnixTimestamp(timestamp, 0L); } private static long fromUnixTimestamp(long timestamp, long nanos) { return ((timestamp - START_EPOCH) * 10000) + nanos; } /** * Converts a 100-nanoseconds precision timestamp into the 16 byte representation * of a type 1 UUID (a time-based UUID). * * To specify a 100-nanoseconds precision timestamp, one should provide a milliseconds timestamp and * a number {@code 0 <= n < 10000} such that n*100 is the number of nanoseconds within that millisecond. * * <p><i><b>Warning:</b> This method is not guaranteed to return unique UUIDs; Multiple * invocations using identical timestamps will result in identical UUIDs.</i></p> * * @return a type 1 UUID represented as a byte[] */ public static byte[] getTimeUUIDBytes(long timeMillis, int nanos) { if (nanos >= 10000) throw new IllegalArgumentException(); return createTimeUUIDBytes(instance.createTimeUnsafe(timeMillis, nanos)); } private static byte[] createTimeUUIDBytes(long msb) { long lsb = clockSeqAndNode; byte[] uuidBytes = new byte[16]; for (int i = 0; i < 8; i++) uuidBytes[i] = (byte) (msb >>> 8 * (7 - i)); for (int i = 8; i < 16; i++) uuidBytes[i] = (byte) (lsb >>> 8 * (7 - i)); return uuidBytes; } /** * Returns a milliseconds-since-epoch value for a type-1 UUID. * * @param uuid a type-1 (time-based) UUID * @return the number of milliseconds since the unix epoch * @throws IllegalArgumentException if the UUID is not version 1 */ public static long getAdjustedTimestamp(UUID uuid) { if (uuid.version() != 1) throw new IllegalArgumentException("incompatible with uuid version: "+uuid.version()); return (uuid.timestamp() / 10000) + START_EPOCH; } private static long makeClockSeqAndNode() { long clock = new SecureRandom().nextLong(); long lsb = 0; lsb |= 0x8000000000000000L; // variant (2 bits) lsb |= (clock & 0x0000000000003FFFL) << 48; // clock sequence (14 bits) lsb |= makeNode(); // 6 bytes return lsb; } // needs to return two different values for the same when. // we can generate at most 10k UUIDs per ms. private long createTimeSafe() { long newLastNanos; while (true) { //Generate a candidate value for new lastNanos newLastNanos = (System.currentTimeMillis() - START_EPOCH) * 10000; long originalLastNanos = lastNanos.get(); if (newLastNanos > originalLastNanos) { //Slow path once per millisecond do a CAS if (lastNanos.compareAndSet(originalLastNanos, newLastNanos)) { break; } } else { //Fast path do an atomic increment //Or when falling behind this will move time forward past the clock if necessary newLastNanos = lastNanos.incrementAndGet(); break; } } return createTime(newLastNanos); } private long createTimeUnsafe(long when, int nanos) { long nanosSince = ((when - START_EPOCH) * 10000) + nanos; return createTime(nanosSince); } private static long createTime(long nanosSince) { long msb = 0L; msb |= (0x00000000ffffffffL & nanosSince) << 32; msb |= (0x0000ffff00000000L & nanosSince) >>> 16; msb |= (0xffff000000000000L & nanosSince) >>> 48; msb |= 0x0000000000001000L; // sets the version to 1. return msb; } private static long makeNode() { /* * We don't have access to the MAC address but need to generate a node part * that identify this host as uniquely as possible. * The spec says that one option is to take as many source that identify * this node as possible and hash them together. That's what we do here by * gathering all the ip of this host. * Note that FBUtilities.getJustBroadcastAddress() should be enough to uniquely * identify the node *in the cluster* but it triggers DatabaseDescriptor * instanciation and the UUID generator is used in Stress for instance, * where we don't want to require the yaml. */ Collection<InetAddressAndPort> localAddresses = getAllLocalAddresses(); if (localAddresses.isEmpty()) throw new RuntimeException("Cannot generate the node component of the UUID because cannot retrieve any IP addresses."); // ideally, we'd use the MAC address, but java doesn't expose that. byte[] hash = hash(localAddresses); long node = 0; for (int i = 0; i < Math.min(6, hash.length); i++) node |= (0x00000000000000ff & (long)hash[i]) << (5-i)*8; assert (0xff00000000000000L & node) == 0; // Since we don't use the mac address, the spec says that multicast // bit (least significant bit of the first octet of the node ID) must be 1. return node | 0x0000010000000000L; } private static byte[] hash(Collection<InetAddressAndPort> data) { // Identify the host. Hasher hasher = Hashing.md5().newHasher(); for(InetAddressAndPort addr : data) { hasher.putBytes(addr.addressBytes); hasher.putInt(addr.port); } // Identify the process on the load: we use both the PID and class loader hash. long pid = NativeLibrary.getProcessID(); if (pid < 0) pid = new Random(System.currentTimeMillis()).nextLong(); updateWithLong(hasher, pid); ClassLoader loader = UUIDGen.class.getClassLoader(); int loaderId = loader != null ? System.identityHashCode(loader) : 0; updateWithInt(hasher, loaderId); return hasher.hash().asBytes(); } private static void updateWithInt(Hasher hasher, int val) { hasher.putByte((byte) ((val >>> 24) & 0xFF)); hasher.putByte((byte) ((val >>> 16) & 0xFF)); hasher.putByte((byte) ((val >>> 8) & 0xFF)); hasher.putByte((byte) ((val >>> 0) & 0xFF)); } public static void updateWithLong(Hasher hasher, long val) { hasher.putByte((byte) ((val >>> 56) & 0xFF)); hasher.putByte((byte) ((val >>> 48) & 0xFF)); hasher.putByte((byte) ((val >>> 40) & 0xFF)); hasher.putByte((byte) ((val >>> 32) & 0xFF)); hasher.putByte((byte) ((val >>> 24) & 0xFF)); hasher.putByte((byte) ((val >>> 16) & 0xFF)); hasher.putByte((byte) ((val >>> 8) & 0xFF)); hasher.putByte((byte) ((val >>> 0) & 0xFF)); } /** * Helper function used exclusively by UUIDGen to create **/ public static Collection<InetAddressAndPort> getAllLocalAddresses() { Set<InetAddressAndPort> localAddresses = new HashSet<>(); try { Enumeration<NetworkInterface> nets = NetworkInterface.getNetworkInterfaces(); if (nets != null) { while (nets.hasMoreElements()) { Function<InetAddress, InetAddressAndPort> converter = address -> InetAddressAndPort.getByAddressOverrideDefaults(address, 0); List<InetAddressAndPort> addresses = Collections.list(nets.nextElement().getInetAddresses()).stream().map(converter).collect(Collectors.toList()); localAddresses.addAll(addresses); } } } catch (SocketException e) { throw new AssertionError(e); } if (DatabaseDescriptor.isDaemonInitialized()) { localAddresses.add(FBUtilities.getBroadcastAddressAndPort()); localAddresses.add(FBUtilities.getBroadcastNativeAddressAndPort()); localAddresses.add(FBUtilities.getLocalAddressAndPort()); } return localAddresses; } } // for the curious, here is how I generated START_EPOCH // Calendar c = Calendar.getInstance(TimeZone.getTimeZone("GMT-0")); // c.set(Calendar.YEAR, 1582); // c.set(Calendar.MONTH, Calendar.OCTOBER); // c.set(Calendar.DAY_OF_MONTH, 15); // c.set(Calendar.HOUR_OF_DAY, 0); // c.set(Calendar.MINUTE, 0); // c.set(Calendar.SECOND, 0); // c.set(Calendar.MILLISECOND, 0); // long START_EPOCH = c.getTimeInMillis();
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.android.dexer; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static java.nio.charset.StandardCharsets.UTF_8; import com.android.dex.Dex; import com.android.dex.DexFormat; import com.android.dx.command.DxConsole; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.io.ByteStreams; import com.google.devtools.build.android.Converters.ExistingPathConverter; import com.google.devtools.build.android.Converters.PathConverter; import com.google.devtools.common.options.EnumConverter; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsParser; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import java.util.zip.ZipOutputStream; /** * Tool used by Bazel as a replacement for Android's {@code dx} tool that assembles a single or, if * allowed and necessary, multiple {@code .dex} files from a given archive of {@code .dex} and * {@code .class} files. The tool merges the {@code .dex} files it encounters into a single file * and additionally encodes any {@code .class} files it encounters. If multidex is allowed then the * tool will generate multiple files subject to the {@code .dex} file format's limits on the number * of methods and fields. */ class DexFileMerger { /** * Commandline options. */ public static class Options extends OptionsBase { @Option(name = "input", defaultValue = "null", category = "input", converter = ExistingPathConverter.class, abbrev = 'i', help = "Input file to read to aggregate.") public Path inputArchive; @Option(name = "output", defaultValue = "classes.dex.jar", category = "output", converter = PathConverter.class, abbrev = 'o', help = "Output archive to write.") public Path outputArchive; @Option(name = "multidex", defaultValue = "off", category = "multidex", converter = MultidexStrategyConverter.class, help = "Allow more than one .dex file in the output.") public MultidexStrategy multidexMode; @Option(name = "main-dex-list", defaultValue = "null", category = "multidex", converter = ExistingPathConverter.class, implicitRequirements = "--multidex=minimal", help = "List of classes to be placed into \"main\" classes.dex file.") public Path mainDexListFile; @Option(name = "minimal-main-dex", defaultValue = "false", category = "multidex", implicitRequirements = "--multidex=minimal", help = "If true, *only* classes listed in --main_dex_list file are placed into \"main\" " + "classes.dex file.") public boolean minimalMainDex; @Option(name = "verbose", defaultValue = "false", category = "misc", help = "If true, print information about the merged files and resulting files to stdout.") public boolean verbose; @Option(name = "max-bytes-wasted-per-file", defaultValue = "0", category = "misc", help = "Limit on conservatively allocated but unused bytes per dex file, which can enable " + "faster merging.") public int wasteThresholdPerDex; // Undocumented dx option for testing multidex logic @Option(name = "set-max-idx-number", defaultValue = "" + (DexFormat.MAX_MEMBER_IDX + 1), category = "undocumented", help = "Limit on fields and methods in a single dex file.") public int maxNumberOfIdxPerDex; } public static class MultidexStrategyConverter extends EnumConverter<MultidexStrategy> { public MultidexStrategyConverter() { super(MultidexStrategy.class, "multidex strategy"); } } public static void main(String[] args) throws Exception { OptionsParser optionsParser = OptionsParser.newOptionsParser(Options.class, Dexing.DexingOptions.class); optionsParser.parseAndExitUponError(args); buildMergedDexFiles( optionsParser.getOptions(Options.class), optionsParser.getOptions(Dexing.DexingOptions.class)); } @VisibleForTesting static void buildMergedDexFiles(Options options, Dexing.DexingOptions dexingOptions) throws IOException { ImmutableSet<String> classesInMainDex = options.mainDexListFile != null ? ImmutableSet.copyOf(Files.readAllLines(options.mainDexListFile, UTF_8)) : null; PrintStream originalStdOut = System.out; try (ZipFile zip = new ZipFile(options.inputArchive.toFile()); DexFileAggregator out = createDexFileAggregator(options)) { if (!options.verbose) { // com.android.dx.merge.DexMerger prints tons of debug information to System.out that we // silence here unless it was explicitly requested. System.setOut(DxConsole.noop); } MergingDexer dexer = new MergingDexer( new Dexing(dexingOptions), out, options.multidexMode.isMultidexAllowed(), options.maxNumberOfIdxPerDex); if (classesInMainDex == null) { processClassAndDexFiles(zip, out, dexer, Predicates.<ZipEntry>alwaysTrue()); } else { // Options parser should be making sure of this but let's be extra-safe as other modes // might result in classes from main dex list ending up in files other than classes.dex checkArgument(options.multidexMode == MultidexStrategy.MINIMAL, "Only minimal multidex " + "mode is supported with --main_dex_list, but mode is: %s", options.multidexMode); // To honor --main_dex_list make two passes: // 1. process only the classes listed in the given file // 2. process the remaining files Predicate<ZipEntry> classFileFilter = ZipEntryPredicates.classFileFilter(classesInMainDex); processClassAndDexFiles(zip, out, dexer, classFileFilter); dexer.flush(); // Add any main dex list classes we had to convert on-the-fly // Fail if main_dex_list is too big, following dx's example checkState(out.getDexFilesWritten() == 0, "Too many classes listed in main dex list file " + "%s, main dex capacity exceeded", options.mainDexListFile); if (options.minimalMainDex) { out.flush(); // Start new .dex file if requested } processClassAndDexFiles(zip, out, dexer, Predicates.not(classFileFilter)); } // Add any classes to output archive that we had to convert on-the-fly dexer.finish(); } finally { System.setOut(originalStdOut); } // Use input's timestamp for output file so the output file is stable. Files.setLastModifiedTime(options.outputArchive, Files.getLastModifiedTime(options.inputArchive)); } private static void processClassAndDexFiles( ZipFile zip, DexFileAggregator out, MergingDexer dexer, Predicate<ZipEntry> extraFilter) throws IOException { @SuppressWarnings("unchecked") // Predicates.and uses varargs parameter with generics ArrayList<? extends ZipEntry> filesToProcess = Lists.newArrayList( Iterators.filter( Iterators.forEnumeration(zip.entries()), Predicates.and( Predicates.not(ZipEntryPredicates.isDirectory()), ZipEntryPredicates.suffixes(".class", ".dex"), extraFilter))); Collections.sort(filesToProcess, ZipEntryComparator.LIKE_DX); for (ZipEntry entry : filesToProcess) { String filename = entry.getName(); try (InputStream content = zip.getInputStream(entry)) { if (filename.endsWith(".dex")) { // We don't want to use the Dex(InputStream) constructor because it closes the stream, // which will break the for loop, and it has its own bespoke way of reading the file into // a byte buffer before effectively calling Dex(byte[]) anyway. out.add(new Dex(ByteStreams.toByteArray(content))); } else if (filename.endsWith(".class")) { dexer.add(Dexing.parseClassFile(ByteStreams.toByteArray(content), filename)); } else { throw new IllegalStateException("Shouldn't get here: " + filename); } } } } private static DexFileAggregator createDexFileAggregator(Options options) throws IOException { return new DexFileAggregator( new DexFileArchive( new ZipOutputStream( new BufferedOutputStream(Files.newOutputStream(options.outputArchive)))), options.multidexMode, options.maxNumberOfIdxPerDex, options.wasteThresholdPerDex); } /** * Sorts java class names such that outer classes preceed their inner * classes and "package-info" preceeds all other classes in its package. * * @param a {@code non-null;} first class name * @param b {@code non-null;} second class name * @return {@code compareTo()}-style result */ // Copied from com.android.dx.cf.direct.ClassPathOpener @VisibleForTesting static int compareClassNames(String a, String b) { // Ensure inner classes sort second a = a.replace('$', '0'); b = b.replace('$', '0'); /* * Assuming "package-info" only occurs at the end, ensures package-info * sorts first. */ a = a.replace("package-info", ""); b = b.replace("package-info", ""); return a.compareTo(b); } /** * Comparator that orders {@link ZipEntry ZipEntries} {@link #LIKE_DX like Android's dx tool}. */ private static enum ZipEntryComparator implements Comparator<ZipEntry> { /** * Comparator to order more or less order alphabetically by file name. See * {@link DexFileMerger#compareClassNames} for the exact name comparison. */ LIKE_DX; @Override // Copied from com.android.dx.cf.direct.ClassPathOpener public int compare (ZipEntry a, ZipEntry b) { return compareClassNames(a.getName(), b.getName()); } } private DexFileMerger() { } }
package softwareheadconsulting.krankiesscheduleapp; import android.annotation.TargetApi; import android.app.LoaderManager.LoaderCallbacks; import android.content.DialogInterface; import android.content.Intent; import android.content.Loader; import android.content.pm.PackageManager; import android.database.Cursor; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.design.widget.Snackbar; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatActivity; import android.text.TextUtils; import android.view.KeyEvent; import android.view.View; import android.view.View.OnClickListener; import android.view.inputmethod.EditorInfo; import android.widget.ArrayAdapter; import android.widget.AutoCompleteTextView; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import android.widget.Toast; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.ProtocolException; import java.net.URL; import java.net.URLEncoder; import java.util.ArrayList; import java.util.List; import static android.Manifest.permission.INTERNET; /** * A login screen that offers login via username/password. */ public class LoginActivity extends AppCompatActivity implements LoaderCallbacks<Cursor> { /** * Id to identity READ_CONTACTS permission request. */ private static final int REQUEST_INTERNET = 0; /** * A dummy authentication store containing known user names and passwords. * TODO: remove after connecting to a real authentication system. */ private static final String[] DUMMY_CREDENTIALS = new String[]{ "foo@example.com:hello", "bar@example.com:world" }; /** * Keep track of the login task to ensure we can cancel it if requested. */ private UserLoginTask mAuthTask = null; // UI references. private AutoCompleteTextView mUsernameView; private EditText mPasswordView; private View mProgressView; private View mLoginFormView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_login); // Set up the login form. mUsernameView = (AutoCompleteTextView) findViewById(R.id.txtUsername); mayRequestInternet(); mPasswordView = (EditText) findViewById(R.id.txtPassword); mPasswordView.setOnEditorActionListener(new TextView.OnEditorActionListener() { @Override public boolean onEditorAction(TextView textView, int id, KeyEvent keyEvent) { if (id == R.id.login || id == EditorInfo.IME_NULL) { attemptLogin(); return true; } return false; } }); Button mUsernameSignInButton = (Button) findViewById(R.id.btnSignIn); mUsernameSignInButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { attemptLogin(); } }); Button mForgotPasswordButton = (Button) findViewById(R.id.btnForgotPassword); mForgotPasswordButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { Intent forgotPassword1 = new Intent(LoginActivity.this, ForgotPasswordActivity1.class); startActivity(forgotPassword1); } }); //mLoginFormView = findViewById(R.id.login_form); mProgressView = findViewById(R.id.login_progress); } @Override public void onBackPressed() { // code here to show dialog //super.onBackPressed(); // optional depending on your needs AlertDialog.Builder backAlert = new AlertDialog.Builder(LoginActivity.this); backAlert.setMessage("Are you sure you want to leave?") .setPositiveButton("Yes", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { finish(); } }) .setNegativeButton("No", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { //nothing } }); backAlert.show(); } private void populateAutoComplete() { if (!mayRequestInternet()) { return; } getLoaderManager().initLoader(0, null, this); } private boolean mayRequestInternet() { Toast.makeText(LoginActivity.this, "No", Toast.LENGTH_SHORT); if (checkSelfPermission(INTERNET) == PackageManager.PERMISSION_GRANTED) { return true; } if (shouldShowRequestPermissionRationale(INTERNET)) { Snackbar.make(mUsernameView, R.string.permission_rationale, Snackbar.LENGTH_INDEFINITE) .setAction(android.R.string.ok, new View.OnClickListener() { @Override @TargetApi(Build.VERSION_CODES.M) public void onClick(View v) { requestPermissions(new String[]{INTERNET}, REQUEST_INTERNET); } }); } else { requestPermissions(new String[]{INTERNET}, REQUEST_INTERNET); } return false; } /** * Callback received when a permissions request has been completed. */ @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { if (requestCode == REQUEST_INTERNET) { if (grantResults.length == 1 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { populateAutoComplete(); } } } /** * Attempts to sign in the account specified by the login form. * If there are form errors (invalid email, missing fields, etc.), the * errors are presented and no actual login attempt is made. */ private void attemptLogin() { if (mAuthTask != null) { return; } // Reset errors. mUsernameView.setError(null); mPasswordView.setError(null); // Store values at the time of the login attempt. String user = mUsernameView.getText().toString(); String password = mPasswordView.getText().toString(); boolean cancel = false; View focusView = null; // Check for a valid password, if the user entered one. if (TextUtils.isEmpty(password)) { mPasswordView.setError(getString(R.string.error_field_required)); focusView = mPasswordView; cancel = true; } if (TextUtils.isEmpty(user)) { mUsernameView.setError(getString(R.string.error_field_required)); focusView = mUsernameView; cancel = true; } if (cancel) { focusView.requestFocus(); } else { mAuthTask = new UserLoginTask(user, password); mAuthTask.execute(); } } private boolean isPasswordValid(String password) { //TODO: Replace this with your own logic return password.length() > 4; } @Override public Loader<Cursor> onCreateLoader(int id, Bundle args) { return null; } @Override public void onLoadFinished(Loader<Cursor> cursorLoader, Cursor cursor) { List<String> emails = new ArrayList<>(); cursor.moveToFirst(); while (!cursor.isAfterLast()) { emails.add(cursor.getString(ProfileQuery.ADDRESS)); cursor.moveToNext(); } addEmailsToAutoComplete(emails); } @Override public void onLoaderReset(Loader<Cursor> cursorLoader) { } private void addEmailsToAutoComplete(List<String> emailAddressCollection) { ArrayAdapter<String> adapter = new ArrayAdapter<>(LoginActivity.this, android.R.layout.simple_dropdown_item_1line, emailAddressCollection); mUsernameView.setAdapter(adapter); } private interface ProfileQuery { String[] PROJECTION = { }; int ADDRESS = 0; int IS_PRIMARY = 1; } /** * Represents an asynchronous login/registration task used to authenticate * the user. */ public class UserLoginTask extends AsyncTask<Void, Void, Boolean> { private String mUsername; private String mPassword; UserLoginTask(String user, String password) { mUsername = user; mPassword = password; } String strResult = ""; @Override protected Boolean doInBackground(Void... params) { // TODO: attempt authentication against a network service. URL url = null; String login_URL = "http://167.160.84.186/login.php"; try { url = new URL(login_URL); HttpURLConnection httpUrlConnection = (HttpURLConnection)url.openConnection(); httpUrlConnection.setRequestMethod("POST"); httpUrlConnection.setDoOutput(true); httpUrlConnection.setDoInput(true); OutputStream outputStream = httpUrlConnection.getOutputStream(); BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(outputStream, "UTF-8")); String post_data = URLEncoder.encode("user_name","UTF-8")+"="+URLEncoder.encode(mUsername,"UTF-8")+"&"+URLEncoder.encode("password","UTF-8")+ "="+URLEncoder.encode(mPassword,"UTF-8"); strResult = post_data; bufferedWriter.write(post_data); bufferedWriter.flush(); bufferedWriter.close(); outputStream.close(); InputStream inputStream = httpUrlConnection.getInputStream(); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream, "iso-8859-1")); String result = ""; String line = ""; while((line = bufferedReader.readLine()) != null) { result += line; } bufferedReader.close(); inputStream.close(); httpUrlConnection.disconnect(); strResult = result; } catch (UnsupportedEncodingException e) { e.printStackTrace(); strResult = e.toString(); } catch (ProtocolException e) { e.printStackTrace(); strResult = e.toString(); } catch (MalformedURLException e) { e.printStackTrace(); strResult = e.toString(); } catch (IOException e) { e.printStackTrace(); //strResult = e.toString(); } catch (Exception e) { } return false; } @Override protected void onPostExecute(final Boolean success) { mAuthTask = null; //showProgress(false); if (strResult.equals("1")) { Toast.makeText(LoginActivity.this, "Login Successful", Toast.LENGTH_SHORT).show(); Intent main = new Intent(LoginActivity.this, MainActivity.class); main.putExtra("EXTRA_USERNAME", mUsername); mUsernameView.setText(""); mPasswordView.setText(""); startActivity(main); } else if(strResult.equals("0")) { mPasswordView.setError("Incorrect Username or Password"); mPasswordView.requestFocus(); } else Toast.makeText(LoginActivity.this, strResult, Toast.LENGTH_SHORT).show(); } @Override protected void onCancelled() { mAuthTask = null; //showProgress(false); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v9/resources/customer_feed.proto package com.google.ads.googleads.v9.resources; /** * <pre> * A customer feed. * </pre> * * Protobuf type {@code google.ads.googleads.v9.resources.CustomerFeed} */ public final class CustomerFeed extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v9.resources.CustomerFeed) CustomerFeedOrBuilder { private static final long serialVersionUID = 0L; // Use CustomerFeed.newBuilder() to construct. private CustomerFeed(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CustomerFeed() { resourceName_ = ""; feed_ = ""; placeholderTypes_ = java.util.Collections.emptyList(); status_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new CustomerFeed(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CustomerFeed( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); resourceName_ = s; break; } case 24: { int rawValue = input.readEnum(); if (!((mutable_bitField0_ & 0x00000002) != 0)) { placeholderTypes_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000002; } placeholderTypes_.add(rawValue); break; } case 26: { int length = input.readRawVarint32(); int oldLimit = input.pushLimit(length); while(input.getBytesUntilLimit() > 0) { int rawValue = input.readEnum(); if (!((mutable_bitField0_ & 0x00000002) != 0)) { placeholderTypes_ = new java.util.ArrayList<java.lang.Integer>(); mutable_bitField0_ |= 0x00000002; } placeholderTypes_.add(rawValue); } input.popLimit(oldLimit); break; } case 34: { com.google.ads.googleads.v9.common.MatchingFunction.Builder subBuilder = null; if (matchingFunction_ != null) { subBuilder = matchingFunction_.toBuilder(); } matchingFunction_ = input.readMessage(com.google.ads.googleads.v9.common.MatchingFunction.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(matchingFunction_); matchingFunction_ = subBuilder.buildPartial(); } break; } case 40: { int rawValue = input.readEnum(); status_ = rawValue; break; } case 50: { java.lang.String s = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; feed_ = s; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) != 0)) { placeholderTypes_ = java.util.Collections.unmodifiableList(placeholderTypes_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v9.resources.CustomerFeedProto.internal_static_google_ads_googleads_v9_resources_CustomerFeed_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v9.resources.CustomerFeedProto.internal_static_google_ads_googleads_v9_resources_CustomerFeed_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v9.resources.CustomerFeed.class, com.google.ads.googleads.v9.resources.CustomerFeed.Builder.class); } private int bitField0_; public static final int RESOURCE_NAME_FIELD_NUMBER = 1; private volatile java.lang.Object resourceName_; /** * <pre> * Immutable. The resource name of the customer feed. * Customer feed resource names have the form: * `customers/{customer_id}/customerFeeds/{feed_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ @java.lang.Override public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } } /** * <pre> * Immutable. The resource name of the customer feed. * Customer feed resource names have the form: * `customers/{customer_id}/customerFeeds/{feed_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ @java.lang.Override public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FEED_FIELD_NUMBER = 6; private volatile java.lang.Object feed_; /** * <pre> * Immutable. The feed being linked to the customer. * </pre> * * <code>optional string feed = 6 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the feed field is set. */ @java.lang.Override public boolean hasFeed() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Immutable. The feed being linked to the customer. * </pre> * * <code>optional string feed = 6 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The feed. */ @java.lang.Override public java.lang.String getFeed() { java.lang.Object ref = feed_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); feed_ = s; return s; } } /** * <pre> * Immutable. The feed being linked to the customer. * </pre> * * <code>optional string feed = 6 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for feed. */ @java.lang.Override public com.google.protobuf.ByteString getFeedBytes() { java.lang.Object ref = feed_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); feed_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PLACEHOLDER_TYPES_FIELD_NUMBER = 3; private java.util.List<java.lang.Integer> placeholderTypes_; private static final com.google.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType> placeholderTypes_converter_ = new com.google.protobuf.Internal.ListAdapter.Converter< java.lang.Integer, com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType>() { public com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType convert(java.lang.Integer from) { @SuppressWarnings("deprecation") com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType result = com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType.valueOf(from); return result == null ? com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType.UNRECOGNIZED : result; } }; /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @return A list containing the placeholderTypes. */ @java.lang.Override public java.util.List<com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType> getPlaceholderTypesList() { return new com.google.protobuf.Internal.ListAdapter< java.lang.Integer, com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType>(placeholderTypes_, placeholderTypes_converter_); } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @return The count of placeholderTypes. */ @java.lang.Override public int getPlaceholderTypesCount() { return placeholderTypes_.size(); } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @param index The index of the element to return. * @return The placeholderTypes at the given index. */ @java.lang.Override public com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType getPlaceholderTypes(int index) { return placeholderTypes_converter_.convert(placeholderTypes_.get(index)); } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @return A list containing the enum numeric values on the wire for placeholderTypes. */ @java.lang.Override public java.util.List<java.lang.Integer> getPlaceholderTypesValueList() { return placeholderTypes_; } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @param index The index of the value to return. * @return The enum numeric value on the wire of placeholderTypes at the given index. */ @java.lang.Override public int getPlaceholderTypesValue(int index) { return placeholderTypes_.get(index); } private int placeholderTypesMemoizedSerializedSize; public static final int MATCHING_FUNCTION_FIELD_NUMBER = 4; private com.google.ads.googleads.v9.common.MatchingFunction matchingFunction_; /** * <pre> * Matching function associated with the CustomerFeed. * The matching function is used to filter the set of feed items selected. * Required. * </pre> * * <code>.google.ads.googleads.v9.common.MatchingFunction matching_function = 4;</code> * @return Whether the matchingFunction field is set. */ @java.lang.Override public boolean hasMatchingFunction() { return matchingFunction_ != null; } /** * <pre> * Matching function associated with the CustomerFeed. * The matching function is used to filter the set of feed items selected. * Required. * </pre> * * <code>.google.ads.googleads.v9.common.MatchingFunction matching_function = 4;</code> * @return The matchingFunction. */ @java.lang.Override public com.google.ads.googleads.v9.common.MatchingFunction getMatchingFunction() { return matchingFunction_ == null ? com.google.ads.googleads.v9.common.MatchingFunction.getDefaultInstance() : matchingFunction_; } /** * <pre> * Matching function associated with the CustomerFeed. * The matching function is used to filter the set of feed items selected. * Required. * </pre> * * <code>.google.ads.googleads.v9.common.MatchingFunction matching_function = 4;</code> */ @java.lang.Override public com.google.ads.googleads.v9.common.MatchingFunctionOrBuilder getMatchingFunctionOrBuilder() { return getMatchingFunction(); } public static final int STATUS_FIELD_NUMBER = 5; private int status_; /** * <pre> * Output only. Status of the customer feed. * This field is read-only. * </pre> * * <code>.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus status = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The enum numeric value on the wire for status. */ @java.lang.Override public int getStatusValue() { return status_; } /** * <pre> * Output only. Status of the customer feed. * This field is read-only. * </pre> * * <code>.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus status = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The status. */ @java.lang.Override public com.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus getStatus() { @SuppressWarnings("deprecation") com.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus result = com.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus.valueOf(status_); return result == null ? com.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resourceName_); } if (getPlaceholderTypesList().size() > 0) { output.writeUInt32NoTag(26); output.writeUInt32NoTag(placeholderTypesMemoizedSerializedSize); } for (int i = 0; i < placeholderTypes_.size(); i++) { output.writeEnumNoTag(placeholderTypes_.get(i)); } if (matchingFunction_ != null) { output.writeMessage(4, getMatchingFunction()); } if (status_ != com.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus.UNSPECIFIED.getNumber()) { output.writeEnum(5, status_); } if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, feed_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resourceName_); } { int dataSize = 0; for (int i = 0; i < placeholderTypes_.size(); i++) { dataSize += com.google.protobuf.CodedOutputStream .computeEnumSizeNoTag(placeholderTypes_.get(i)); } size += dataSize; if (!getPlaceholderTypesList().isEmpty()) { size += 1; size += com.google.protobuf.CodedOutputStream .computeUInt32SizeNoTag(dataSize); }placeholderTypesMemoizedSerializedSize = dataSize; } if (matchingFunction_ != null) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, getMatchingFunction()); } if (status_ != com.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus.UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(5, status_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, feed_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v9.resources.CustomerFeed)) { return super.equals(obj); } com.google.ads.googleads.v9.resources.CustomerFeed other = (com.google.ads.googleads.v9.resources.CustomerFeed) obj; if (!getResourceName() .equals(other.getResourceName())) return false; if (hasFeed() != other.hasFeed()) return false; if (hasFeed()) { if (!getFeed() .equals(other.getFeed())) return false; } if (!placeholderTypes_.equals(other.placeholderTypes_)) return false; if (hasMatchingFunction() != other.hasMatchingFunction()) return false; if (hasMatchingFunction()) { if (!getMatchingFunction() .equals(other.getMatchingFunction())) return false; } if (status_ != other.status_) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getResourceName().hashCode(); if (hasFeed()) { hash = (37 * hash) + FEED_FIELD_NUMBER; hash = (53 * hash) + getFeed().hashCode(); } if (getPlaceholderTypesCount() > 0) { hash = (37 * hash) + PLACEHOLDER_TYPES_FIELD_NUMBER; hash = (53 * hash) + placeholderTypes_.hashCode(); } if (hasMatchingFunction()) { hash = (37 * hash) + MATCHING_FUNCTION_FIELD_NUMBER; hash = (53 * hash) + getMatchingFunction().hashCode(); } hash = (37 * hash) + STATUS_FIELD_NUMBER; hash = (53 * hash) + status_; hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v9.resources.CustomerFeed parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.resources.CustomerFeed parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.resources.CustomerFeed parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.resources.CustomerFeed parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.resources.CustomerFeed parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v9.resources.CustomerFeed parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v9.resources.CustomerFeed parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.resources.CustomerFeed parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v9.resources.CustomerFeed parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.resources.CustomerFeed parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v9.resources.CustomerFeed parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v9.resources.CustomerFeed parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v9.resources.CustomerFeed prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * A customer feed. * </pre> * * Protobuf type {@code google.ads.googleads.v9.resources.CustomerFeed} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.resources.CustomerFeed) com.google.ads.googleads.v9.resources.CustomerFeedOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v9.resources.CustomerFeedProto.internal_static_google_ads_googleads_v9_resources_CustomerFeed_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v9.resources.CustomerFeedProto.internal_static_google_ads_googleads_v9_resources_CustomerFeed_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v9.resources.CustomerFeed.class, com.google.ads.googleads.v9.resources.CustomerFeed.Builder.class); } // Construct using com.google.ads.googleads.v9.resources.CustomerFeed.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); resourceName_ = ""; feed_ = ""; bitField0_ = (bitField0_ & ~0x00000001); placeholderTypes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); if (matchingFunctionBuilder_ == null) { matchingFunction_ = null; } else { matchingFunction_ = null; matchingFunctionBuilder_ = null; } status_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v9.resources.CustomerFeedProto.internal_static_google_ads_googleads_v9_resources_CustomerFeed_descriptor; } @java.lang.Override public com.google.ads.googleads.v9.resources.CustomerFeed getDefaultInstanceForType() { return com.google.ads.googleads.v9.resources.CustomerFeed.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v9.resources.CustomerFeed build() { com.google.ads.googleads.v9.resources.CustomerFeed result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v9.resources.CustomerFeed buildPartial() { com.google.ads.googleads.v9.resources.CustomerFeed result = new com.google.ads.googleads.v9.resources.CustomerFeed(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; result.resourceName_ = resourceName_; if (((from_bitField0_ & 0x00000001) != 0)) { to_bitField0_ |= 0x00000001; } result.feed_ = feed_; if (((bitField0_ & 0x00000002) != 0)) { placeholderTypes_ = java.util.Collections.unmodifiableList(placeholderTypes_); bitField0_ = (bitField0_ & ~0x00000002); } result.placeholderTypes_ = placeholderTypes_; if (matchingFunctionBuilder_ == null) { result.matchingFunction_ = matchingFunction_; } else { result.matchingFunction_ = matchingFunctionBuilder_.build(); } result.status_ = status_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v9.resources.CustomerFeed) { return mergeFrom((com.google.ads.googleads.v9.resources.CustomerFeed)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v9.resources.CustomerFeed other) { if (other == com.google.ads.googleads.v9.resources.CustomerFeed.getDefaultInstance()) return this; if (!other.getResourceName().isEmpty()) { resourceName_ = other.resourceName_; onChanged(); } if (other.hasFeed()) { bitField0_ |= 0x00000001; feed_ = other.feed_; onChanged(); } if (!other.placeholderTypes_.isEmpty()) { if (placeholderTypes_.isEmpty()) { placeholderTypes_ = other.placeholderTypes_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensurePlaceholderTypesIsMutable(); placeholderTypes_.addAll(other.placeholderTypes_); } onChanged(); } if (other.hasMatchingFunction()) { mergeMatchingFunction(other.getMatchingFunction()); } if (other.status_ != 0) { setStatusValue(other.getStatusValue()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v9.resources.CustomerFeed parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v9.resources.CustomerFeed) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object resourceName_ = ""; /** * <pre> * Immutable. The resource name of the customer feed. * Customer feed resource names have the form: * `customers/{customer_id}/customerFeeds/{feed_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ public java.lang.String getResourceName() { java.lang.Object ref = resourceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The resource name of the customer feed. * Customer feed resource names have the form: * `customers/{customer_id}/customerFeeds/{feed_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ public com.google.protobuf.ByteString getResourceNameBytes() { java.lang.Object ref = resourceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); resourceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The resource name of the customer feed. * Customer feed resource names have the form: * `customers/{customer_id}/customerFeeds/{feed_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The resourceName to set. * @return This builder for chaining. */ public Builder setResourceName( java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceName_ = value; onChanged(); return this; } /** * <pre> * Immutable. The resource name of the customer feed. * Customer feed resource names have the form: * `customers/{customer_id}/customerFeeds/{feed_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearResourceName() { resourceName_ = getDefaultInstance().getResourceName(); onChanged(); return this; } /** * <pre> * Immutable. The resource name of the customer feed. * Customer feed resource names have the form: * `customers/{customer_id}/customerFeeds/{feed_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for resourceName to set. * @return This builder for chaining. */ public Builder setResourceNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceName_ = value; onChanged(); return this; } private java.lang.Object feed_ = ""; /** * <pre> * Immutable. The feed being linked to the customer. * </pre> * * <code>optional string feed = 6 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the feed field is set. */ public boolean hasFeed() { return ((bitField0_ & 0x00000001) != 0); } /** * <pre> * Immutable. The feed being linked to the customer. * </pre> * * <code>optional string feed = 6 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The feed. */ public java.lang.String getFeed() { java.lang.Object ref = feed_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); feed_ = s; return s; } else { return (java.lang.String) ref; } } /** * <pre> * Immutable. The feed being linked to the customer. * </pre> * * <code>optional string feed = 6 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for feed. */ public com.google.protobuf.ByteString getFeedBytes() { java.lang.Object ref = feed_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); feed_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * Immutable. The feed being linked to the customer. * </pre> * * <code>optional string feed = 6 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The feed to set. * @return This builder for chaining. */ public Builder setFeed( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; feed_ = value; onChanged(); return this; } /** * <pre> * Immutable. The feed being linked to the customer. * </pre> * * <code>optional string feed = 6 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return This builder for chaining. */ public Builder clearFeed() { bitField0_ = (bitField0_ & ~0x00000001); feed_ = getDefaultInstance().getFeed(); onChanged(); return this; } /** * <pre> * Immutable. The feed being linked to the customer. * </pre> * * <code>optional string feed = 6 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @param value The bytes for feed to set. * @return This builder for chaining. */ public Builder setFeedBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bitField0_ |= 0x00000001; feed_ = value; onChanged(); return this; } private java.util.List<java.lang.Integer> placeholderTypes_ = java.util.Collections.emptyList(); private void ensurePlaceholderTypesIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { placeholderTypes_ = new java.util.ArrayList<java.lang.Integer>(placeholderTypes_); bitField0_ |= 0x00000002; } } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @return A list containing the placeholderTypes. */ public java.util.List<com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType> getPlaceholderTypesList() { return new com.google.protobuf.Internal.ListAdapter< java.lang.Integer, com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType>(placeholderTypes_, placeholderTypes_converter_); } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @return The count of placeholderTypes. */ public int getPlaceholderTypesCount() { return placeholderTypes_.size(); } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @param index The index of the element to return. * @return The placeholderTypes at the given index. */ public com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType getPlaceholderTypes(int index) { return placeholderTypes_converter_.convert(placeholderTypes_.get(index)); } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @param index The index to set the value at. * @param value The placeholderTypes to set. * @return This builder for chaining. */ public Builder setPlaceholderTypes( int index, com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType value) { if (value == null) { throw new NullPointerException(); } ensurePlaceholderTypesIsMutable(); placeholderTypes_.set(index, value.getNumber()); onChanged(); return this; } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @param value The placeholderTypes to add. * @return This builder for chaining. */ public Builder addPlaceholderTypes(com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType value) { if (value == null) { throw new NullPointerException(); } ensurePlaceholderTypesIsMutable(); placeholderTypes_.add(value.getNumber()); onChanged(); return this; } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @param values The placeholderTypes to add. * @return This builder for chaining. */ public Builder addAllPlaceholderTypes( java.lang.Iterable<? extends com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType> values) { ensurePlaceholderTypesIsMutable(); for (com.google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType value : values) { placeholderTypes_.add(value.getNumber()); } onChanged(); return this; } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @return This builder for chaining. */ public Builder clearPlaceholderTypes() { placeholderTypes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @return A list containing the enum numeric values on the wire for placeholderTypes. */ public java.util.List<java.lang.Integer> getPlaceholderTypesValueList() { return java.util.Collections.unmodifiableList(placeholderTypes_); } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @param index The index of the value to return. * @return The enum numeric value on the wire of placeholderTypes at the given index. */ public int getPlaceholderTypesValue(int index) { return placeholderTypes_.get(index); } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @param index The index of the value to return. * @return The enum numeric value on the wire of placeholderTypes at the given index. * @return This builder for chaining. */ public Builder setPlaceholderTypesValue( int index, int value) { ensurePlaceholderTypesIsMutable(); placeholderTypes_.set(index, value); onChanged(); return this; } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @param value The enum numeric value on the wire for placeholderTypes to add. * @return This builder for chaining. */ public Builder addPlaceholderTypesValue(int value) { ensurePlaceholderTypesIsMutable(); placeholderTypes_.add(value); onChanged(); return this; } /** * <pre> * Indicates which placeholder types the feed may populate under the connected * customer. Required. * </pre> * * <code>repeated .google.ads.googleads.v9.enums.PlaceholderTypeEnum.PlaceholderType placeholder_types = 3;</code> * @param values The enum numeric values on the wire for placeholderTypes to add. * @return This builder for chaining. */ public Builder addAllPlaceholderTypesValue( java.lang.Iterable<java.lang.Integer> values) { ensurePlaceholderTypesIsMutable(); for (int value : values) { placeholderTypes_.add(value); } onChanged(); return this; } private com.google.ads.googleads.v9.common.MatchingFunction matchingFunction_; private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v9.common.MatchingFunction, com.google.ads.googleads.v9.common.MatchingFunction.Builder, com.google.ads.googleads.v9.common.MatchingFunctionOrBuilder> matchingFunctionBuilder_; /** * <pre> * Matching function associated with the CustomerFeed. * The matching function is used to filter the set of feed items selected. * Required. * </pre> * * <code>.google.ads.googleads.v9.common.MatchingFunction matching_function = 4;</code> * @return Whether the matchingFunction field is set. */ public boolean hasMatchingFunction() { return matchingFunctionBuilder_ != null || matchingFunction_ != null; } /** * <pre> * Matching function associated with the CustomerFeed. * The matching function is used to filter the set of feed items selected. * Required. * </pre> * * <code>.google.ads.googleads.v9.common.MatchingFunction matching_function = 4;</code> * @return The matchingFunction. */ public com.google.ads.googleads.v9.common.MatchingFunction getMatchingFunction() { if (matchingFunctionBuilder_ == null) { return matchingFunction_ == null ? com.google.ads.googleads.v9.common.MatchingFunction.getDefaultInstance() : matchingFunction_; } else { return matchingFunctionBuilder_.getMessage(); } } /** * <pre> * Matching function associated with the CustomerFeed. * The matching function is used to filter the set of feed items selected. * Required. * </pre> * * <code>.google.ads.googleads.v9.common.MatchingFunction matching_function = 4;</code> */ public Builder setMatchingFunction(com.google.ads.googleads.v9.common.MatchingFunction value) { if (matchingFunctionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } matchingFunction_ = value; onChanged(); } else { matchingFunctionBuilder_.setMessage(value); } return this; } /** * <pre> * Matching function associated with the CustomerFeed. * The matching function is used to filter the set of feed items selected. * Required. * </pre> * * <code>.google.ads.googleads.v9.common.MatchingFunction matching_function = 4;</code> */ public Builder setMatchingFunction( com.google.ads.googleads.v9.common.MatchingFunction.Builder builderForValue) { if (matchingFunctionBuilder_ == null) { matchingFunction_ = builderForValue.build(); onChanged(); } else { matchingFunctionBuilder_.setMessage(builderForValue.build()); } return this; } /** * <pre> * Matching function associated with the CustomerFeed. * The matching function is used to filter the set of feed items selected. * Required. * </pre> * * <code>.google.ads.googleads.v9.common.MatchingFunction matching_function = 4;</code> */ public Builder mergeMatchingFunction(com.google.ads.googleads.v9.common.MatchingFunction value) { if (matchingFunctionBuilder_ == null) { if (matchingFunction_ != null) { matchingFunction_ = com.google.ads.googleads.v9.common.MatchingFunction.newBuilder(matchingFunction_).mergeFrom(value).buildPartial(); } else { matchingFunction_ = value; } onChanged(); } else { matchingFunctionBuilder_.mergeFrom(value); } return this; } /** * <pre> * Matching function associated with the CustomerFeed. * The matching function is used to filter the set of feed items selected. * Required. * </pre> * * <code>.google.ads.googleads.v9.common.MatchingFunction matching_function = 4;</code> */ public Builder clearMatchingFunction() { if (matchingFunctionBuilder_ == null) { matchingFunction_ = null; onChanged(); } else { matchingFunction_ = null; matchingFunctionBuilder_ = null; } return this; } /** * <pre> * Matching function associated with the CustomerFeed. * The matching function is used to filter the set of feed items selected. * Required. * </pre> * * <code>.google.ads.googleads.v9.common.MatchingFunction matching_function = 4;</code> */ public com.google.ads.googleads.v9.common.MatchingFunction.Builder getMatchingFunctionBuilder() { onChanged(); return getMatchingFunctionFieldBuilder().getBuilder(); } /** * <pre> * Matching function associated with the CustomerFeed. * The matching function is used to filter the set of feed items selected. * Required. * </pre> * * <code>.google.ads.googleads.v9.common.MatchingFunction matching_function = 4;</code> */ public com.google.ads.googleads.v9.common.MatchingFunctionOrBuilder getMatchingFunctionOrBuilder() { if (matchingFunctionBuilder_ != null) { return matchingFunctionBuilder_.getMessageOrBuilder(); } else { return matchingFunction_ == null ? com.google.ads.googleads.v9.common.MatchingFunction.getDefaultInstance() : matchingFunction_; } } /** * <pre> * Matching function associated with the CustomerFeed. * The matching function is used to filter the set of feed items selected. * Required. * </pre> * * <code>.google.ads.googleads.v9.common.MatchingFunction matching_function = 4;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v9.common.MatchingFunction, com.google.ads.googleads.v9.common.MatchingFunction.Builder, com.google.ads.googleads.v9.common.MatchingFunctionOrBuilder> getMatchingFunctionFieldBuilder() { if (matchingFunctionBuilder_ == null) { matchingFunctionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.ads.googleads.v9.common.MatchingFunction, com.google.ads.googleads.v9.common.MatchingFunction.Builder, com.google.ads.googleads.v9.common.MatchingFunctionOrBuilder>( getMatchingFunction(), getParentForChildren(), isClean()); matchingFunction_ = null; } return matchingFunctionBuilder_; } private int status_ = 0; /** * <pre> * Output only. Status of the customer feed. * This field is read-only. * </pre> * * <code>.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus status = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The enum numeric value on the wire for status. */ @java.lang.Override public int getStatusValue() { return status_; } /** * <pre> * Output only. Status of the customer feed. * This field is read-only. * </pre> * * <code>.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus status = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The enum numeric value on the wire for status to set. * @return This builder for chaining. */ public Builder setStatusValue(int value) { status_ = value; onChanged(); return this; } /** * <pre> * Output only. Status of the customer feed. * This field is read-only. * </pre> * * <code>.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus status = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The status. */ @java.lang.Override public com.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus getStatus() { @SuppressWarnings("deprecation") com.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus result = com.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus.valueOf(status_); return result == null ? com.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus.UNRECOGNIZED : result; } /** * <pre> * Output only. Status of the customer feed. * This field is read-only. * </pre> * * <code>.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus status = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @param value The status to set. * @return This builder for chaining. */ public Builder setStatus(com.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus value) { if (value == null) { throw new NullPointerException(); } status_ = value.getNumber(); onChanged(); return this; } /** * <pre> * Output only. Status of the customer feed. * This field is read-only. * </pre> * * <code>.google.ads.googleads.v9.enums.FeedLinkStatusEnum.FeedLinkStatus status = 5 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return This builder for chaining. */ public Builder clearStatus() { status_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.resources.CustomerFeed) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v9.resources.CustomerFeed) private static final com.google.ads.googleads.v9.resources.CustomerFeed DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v9.resources.CustomerFeed(); } public static com.google.ads.googleads.v9.resources.CustomerFeed getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CustomerFeed> PARSER = new com.google.protobuf.AbstractParser<CustomerFeed>() { @java.lang.Override public CustomerFeed parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CustomerFeed(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CustomerFeed> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CustomerFeed> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v9.resources.CustomerFeed getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.replication; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.replication.ReplicationAdmin; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; @Category(LargeTests.class) public class TestMultiSlaveReplication { private static final Log LOG = LogFactory.getLog(TestReplication.class); private static Configuration conf1; private static Configuration conf2; private static Configuration conf3; private static HBaseTestingUtility utility1; private static HBaseTestingUtility utility2; private static HBaseTestingUtility utility3; private static final long SLEEP_TIME = 500; private static final int NB_RETRIES = 10; private static final byte[] tableName = Bytes.toBytes("test"); private static final byte[] famName = Bytes.toBytes("f"); private static final byte[] row = Bytes.toBytes("row"); private static final byte[] row1 = Bytes.toBytes("row1"); private static final byte[] row2 = Bytes.toBytes("row2"); private static final byte[] row3 = Bytes.toBytes("row3"); private static final byte[] noRepfamName = Bytes.toBytes("norep"); private static HTableDescriptor table; @BeforeClass public static void setUpBeforeClass() throws Exception { conf1 = HBaseConfiguration.create(); conf1.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1"); // smaller block size and capacity to trigger more operations // and test them conf1.setInt("hbase.regionserver.hlog.blocksize", 1024*20); conf1.setInt("replication.source.size.capacity", 1024); conf1.setLong("replication.source.sleepforretries", 100); conf1.setInt("hbase.regionserver.maxlogs", 10); conf1.setLong("hbase.master.logcleaner.ttl", 10); conf1.setBoolean(HConstants.REPLICATION_ENABLE_KEY, true); conf1.setBoolean("dfs.support.append", true); conf1.setLong(HConstants.THREAD_WAKE_FREQUENCY, 100); conf1.setStrings(CoprocessorHost.USER_REGION_COPROCESSOR_CONF_KEY, "org.apache.hadoop.hbase.replication.TestMasterReplication$CoprocessorCounter"); utility1 = new HBaseTestingUtility(conf1); utility1.startMiniZKCluster(); MiniZooKeeperCluster miniZK = utility1.getZkCluster(); new ZooKeeperWatcher(conf1, "cluster1", null, true); conf2 = new Configuration(conf1); conf2.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/2"); conf3 = new Configuration(conf1); conf3.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/3"); utility2 = new HBaseTestingUtility(conf2); utility2.setZkCluster(miniZK); new ZooKeeperWatcher(conf2, "cluster3", null, true); utility3 = new HBaseTestingUtility(conf3); utility3.setZkCluster(miniZK); new ZooKeeperWatcher(conf3, "cluster3", null, true); table = new HTableDescriptor(tableName); HColumnDescriptor fam = new HColumnDescriptor(famName); fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL); table.addFamily(fam); fam = new HColumnDescriptor(noRepfamName); table.addFamily(fam); } @Test(timeout=300000) public void testMultiSlaveReplication() throws Exception { LOG.info("testCyclicReplication"); MiniHBaseCluster master = utility1.startMiniCluster(); utility2.startMiniCluster(); utility3.startMiniCluster(); ReplicationAdmin admin1 = new ReplicationAdmin(conf1); new HBaseAdmin(conf1).createTable(table); new HBaseAdmin(conf2).createTable(table); new HBaseAdmin(conf3).createTable(table); HTable htable1 = new HTable(conf1, tableName); htable1.setWriteBufferSize(1024); HTable htable2 = new HTable(conf2, tableName); htable2.setWriteBufferSize(1024); HTable htable3 = new HTable(conf3, tableName); htable3.setWriteBufferSize(1024); admin1.addPeer("1", utility2.getClusterKey()); // put "row" and wait 'til it got around, then delete putAndWait(row, famName, htable1, htable2); deleteAndWait(row, htable1, htable2); // check it wasn't replication to cluster 3 checkRow(row,0,htable3); putAndWait(row2, famName, htable1, htable2); // now roll the region server's logs new HBaseAdmin(conf1).rollHLogWriter(master.getRegionServer(0).getServerName().toString()); // after the log was rolled put a new row putAndWait(row3, famName, htable1, htable2); admin1.addPeer("2", utility3.getClusterKey()); // put a row, check it was replicated to all clusters putAndWait(row1, famName, htable1, htable2, htable3); // delete and verify deleteAndWait(row1, htable1, htable2, htable3); // make sure row2 did not get replicated after // cluster 3 was added checkRow(row2,0,htable3); // row3 will get replicated, because it was in the // latest log checkRow(row3,1,htable3); Put p = new Put(row); p.add(famName, row, row); htable1.put(p); // now roll the logs again new HBaseAdmin(conf1).rollHLogWriter(master.getRegionServer(0) .getServerName().toString()); // cleanup "row2", also conveniently use this to wait replication // to finish deleteAndWait(row2, htable1, htable2, htable3); // Even if the log was rolled in the middle of the replication // "row" is still replication. checkRow(row, 1, htable2, htable3); // cleanup the rest deleteAndWait(row, htable1, htable2, htable3); deleteAndWait(row3, htable1, htable2, htable3); utility3.shutdownMiniCluster(); utility2.shutdownMiniCluster(); utility1.shutdownMiniCluster(); } private void checkRow(byte[] row, int count, HTable... tables) throws IOException { Get get = new Get(row); for (HTable table : tables) { Result res = table.get(get); assertEquals(count, res.size()); } } private void deleteAndWait(byte[] row, HTable source, HTable... targets) throws Exception { Delete del = new Delete(row); source.delete(del); Get get = new Get(row); for (int i = 0; i < NB_RETRIES; i++) { if (i==NB_RETRIES-1) { fail("Waited too much time for del replication"); } boolean removedFromAll = true; for (HTable target : targets) { Result res = target.get(get); if (res.size() >= 1) { LOG.info("Row not deleted"); removedFromAll = false; break; } } if (removedFromAll) { break; } else { Thread.sleep(SLEEP_TIME); } } } private void putAndWait(byte[] row, byte[] fam, HTable source, HTable... targets) throws Exception { Put put = new Put(row); put.add(fam, row, row); source.put(put); Get get = new Get(row); for (int i = 0; i < NB_RETRIES; i++) { if (i==NB_RETRIES-1) { fail("Waited too much time for put replication"); } boolean replicatedToAll = true; for (HTable target : targets) { Result res = target.get(get); if (res.size() == 0) { LOG.info("Row not available"); replicatedToAll = false; break; } else { assertArrayEquals(res.value(), row); } } if (replicatedToAll) { break; } else { Thread.sleep(SLEEP_TIME); } } } @org.junit.Rule public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); }
/****************************************************************************** ** ** Advanced Distributed Learning Co-Laboratory (ADL Co-Lab) Hub grants you ** ("Licensee") a non-exclusive, royalty free, license to use, modify and ** redistribute this software in source and binary code form, provided that ** i) this copyright notice and license appear on all copies of the software; ** and ii) Licensee does not utilize the software in a manner which is ** disparaging to ADL Co-Lab Hub. ** ** This software is provided "AS IS," without a warranty of any kind. ALL ** EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING ** ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE ** OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. ADL Co-Lab Hub AND ITS LICENSORS ** SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE AS A RESULT OF ** USING, MODIFYING OR DISTRIBUTING THE SOFTWARE OR ITS DERIVATIVES. IN NO ** EVENT WILL ADL Co-Lab Hub OR ITS LICENSORS BE LIABLE FOR ANY LOST REVENUE, ** PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL, ** INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE ** THEORY OF LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE ** SOFTWARE, EVEN IF ADL Co-Lab Hub HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH ** DAMAGES. ** ******************************************************************************/ package org.adl.datamodels.nav; import java.io.Serializable; import java.util.ArrayList; import java.util.Hashtable; import java.util.List; import java.util.Map; import org.adl.datamodels.DMDelimiterDescriptor; import org.adl.datamodels.DMElement; import org.adl.datamodels.DMElementDescriptor; import org.adl.datamodels.DMErrorCodes; import org.adl.datamodels.DMProcessingInfo; import org.adl.datamodels.DMRequest; import org.adl.datamodels.DataModel; import org.adl.datamodels.RequestToken; import org.adl.datamodels.datatypes.URIValidator; import org.adl.datamodels.datatypes.VocabularyValidator; import org.adl.datamodels.ieee.IValidatorFactory; import org.adl.sequencer.IValidRequests; import org.adl.sequencer.SeqNavRequests; /** * <strong>Filename:</strong> SCORM_2004_NAV_DM.java<br><br> * * <strong>Description:</strong> This class implements the set of navigation events * defined in the SCORM 2004. * * @author ADL Technical Team */ public class SCORM_2004_NAV_DM extends DataModel implements Serializable { private static final long serialVersionUID = 1L; /** * Describes the current known 'valid' set of navigation requests. */ private IValidRequests mNavRequests = null; /** * Describes the current navigation request. */ private String mCurRequest = null; /** * Describes the dot-notation binding string for this data model. */ private String mBinding = "adl"; /** * Describes the data model elements managed by this data model. */ private Map<String, DMElement> mElements = null; /** * Default constructor required for serialization support. Creates a * complete set of navigation data model information. */ public SCORM_2004_NAV_DM() { List<DMElementDescriptor> children = null; SCORM_2004_NAV_DMElement element = null; DMElementDescriptor desc = null; DMDelimiterDescriptor del = null; mElements = new Hashtable<String, DMElement>(); children = new ArrayList<DMElementDescriptor>(); // request String[] vocab = { "continue", "previous", "choice", "exit", "exitAll", "abandon", "abandonAll", "suspendAll", "_none_" }; desc = new DMElementDescriptor("request", "_none_", new VocabularyValidator(vocab)); // The 'choice' request will include a delimiter del = new DMDelimiterDescriptor("target", null, new URIValidator()); desc.mDelimiters = new ArrayList<DMDelimiterDescriptor>(); desc.mDelimiters.add(del); children.add(desc); List<DMElementDescriptor> subChildren = new ArrayList<DMElementDescriptor>(); // continue String[] status = { "true", "false", "unknown" }; desc = new DMElementDescriptor("continue", "unknown", new VocabularyValidator(status)); desc.mIsWriteable = false; subChildren.add(desc); // previous desc = new DMElementDescriptor("previous", "unknown", new VocabularyValidator(status)); desc.mIsWriteable = false; subChildren.add(desc); // choice desc = new DMElementDescriptor("choice", "unknown", new VocabularyValidator(status)); desc.mIsWriteable = false; subChildren.add(desc); // request_valid desc = new DMElementDescriptor("request_valid", subChildren); children.add(desc); desc = new DMElementDescriptor("nav", children); // Create and add this element to the data model element = new SCORM_2004_NAV_DMElement(desc, null, this); mElements.put(desc.mBinding, element); } @Override public int equals(DMRequest iRequest) { return equals(iRequest, true); } /** * Processes an equals() request against this data model. Compares two * values of the same data model element for equality. * * @param iRequest The request (<code>DMRequest</code>) being processed. * * @param iValidate Indicates if the provided value should be validated. * * @return An abstract data model error code indicating the result of this * operation. */ @Override public int equals(DMRequest iRequest, boolean iValidate) { // Assume no processing errors int result = DMErrorCodes.NO_ERROR; // Create an 'out' variable DMProcessingInfo pi = new DMProcessingInfo(); // Process this request result = findElement(iRequest, pi); // If we found the 'leaf' elmeent, finish the request if (result == DMErrorCodes.NO_ERROR) { RequestToken tok = iRequest.getNextToken(); // Before processing, make sure this is the last token in the request if (!iRequest.hasMoreTokens()) { // Make sure this is a Value token if (tok.getType() == RequestToken.TOKEN_VALUE) { result = pi.mElement.equals(tok); } else { // Wrong type of token -- value expected result = DMErrorCodes.INVALID_REQUEST; } } else { // Too many tokens result = DMErrorCodes.INVALID_REQUEST; } } return result; } /** * Processes a data model request by finding the target leaf element. * If the requested value is found, it is returned in the parameter * oInfo. * * @param iRequest The (<code>DMRequest</code>) being processed. * * @param oInfo Provides the value returned by this request. * * @return An abstract data model error code indicating the result of this * operation. */ private int findElement(DMRequest iRequest, DMProcessingInfo oInfo) { // Assume no processing errors int result = DMErrorCodes.NO_ERROR; // Get the first specified element RequestToken tok = iRequest.getNextToken(); if (tok != null && tok.getType() == RequestToken.TOKEN_ELEMENT) { DMElement element = mElements.get(tok.getValue()); if (element != null) { oInfo.mElement = element; // Check if we need to stop before the last token tok = iRequest.getCurToken(); boolean done = false; if (tok != null) { if (iRequest.isGetValueRequest()) { if (tok.getType() == RequestToken.TOKEN_ARGUMENT) { // We're done done = true; } else if (tok.getType() == RequestToken.TOKEN_VALUE) { // Get requests cannot have value tokens result = DMErrorCodes.INVALID_REQUEST; done = true; } } else { if (tok.getType() == RequestToken.TOKEN_VALUE) { // We're done done = true; } else if (tok.getType() == RequestToken.TOKEN_ARGUMENT) { // Set requests cannot have argument tokens result = DMErrorCodes.INVALID_REQUEST; done = true; } } } // Process remaining tokens while (!done && iRequest.hasMoreTokens() && result == DMErrorCodes.NO_ERROR) { result = element.processRequest(iRequest, oInfo); // Move to the next element if processing was successful if (result == DMErrorCodes.NO_ERROR) { element = oInfo.mElement; } else { oInfo.mElement = null; } // Check if we need to stop before the last token tok = iRequest.getCurToken(); if (tok != null) { if (iRequest.isGetValueRequest()) { if (tok.getType() == RequestToken.TOKEN_ARGUMENT) { // We're done done = true; } else if (tok.getType() == RequestToken.TOKEN_VALUE) { // Get requests cannot have value tokens result = DMErrorCodes.INVALID_REQUEST; done = true; } } else { if (tok.getType() == RequestToken.TOKEN_VALUE) { // We're done done = true; } else if (tok.getType() == RequestToken.TOKEN_ARGUMENT) { // Set requests cannot have argument tokens result = DMErrorCodes.INVALID_REQUEST; done = true; } } } } } else { // Unknown element result = DMErrorCodes.UNDEFINED_ELEMENT; } } else { // No initial element specified result = DMErrorCodes.INVALID_REQUEST; } return result; } public String getChoiceEvent() { return mCurRequest; } /** * Describes this data model's binding string. * * @return This data model's binding string. */ @Override public String getDMBindingString() { return mBinding; } /** * Provides the requested data model element. * * @param iElement Describes the requested element's dot-notation bound name. * * @return The <code>DMElement</code> corresponding to the requested element * or <code>null</code> if the element does not exist in the data * model. */ @Override public DMElement getDMElement(String iElement) { DMElement element = mElements.get(iElement); return element; } public Map<String, DMElement> getElements() { return mElements; } public int getNavEvent() { int navEvent = -1; if (mCurRequest != null) { if (mCurRequest.equals("continue")) { navEvent = SeqNavRequests.NAV_CONTINUE; } else if (mCurRequest.equals("previous")) { navEvent = SeqNavRequests.NAV_PREVIOUS; } else if (mCurRequest.equals("exit")) { navEvent = SeqNavRequests.NAV_EXIT; } else if (mCurRequest.equals("exitAll")) { navEvent = SeqNavRequests.NAV_EXITALL; } else if (mCurRequest.equals("abandon")) { navEvent = SeqNavRequests.NAV_ABANDON; } else if (mCurRequest.equals("abandonAll")) { navEvent = SeqNavRequests.NAV_ABANDONALL; } else if (mCurRequest.equals("suspendAll")) { navEvent = SeqNavRequests.NAV_SUSPENDALL; } else if (mCurRequest.equals("_none_")) { navEvent = SeqNavRequests.NAV_NONE; } } return navEvent; } /** * Provides the current navigation request communicated by the SCO. * * @return The current navigation request. */ public String getNavRequest() { String request = null; int navEvent = SeqNavRequests.NAV_NONE; if (mCurRequest != null) { navEvent = getNavEvent(); if (navEvent == -1) { // This must be a target for choice request = mCurRequest; } } if (request == null) { request = Integer.toString(navEvent); } return request; } public IValidRequests getNavRequests() { return mNavRequests; } /** * Processes a GetValue() request against this data model. * * @param iRequest The (<code>DMRequest</code>) being processed. * * @param oInfo Provides the value returned by this request. * * @return A data model error code indicating the result of this * operation. */ @Override public int getValue(DMRequest iRequest, DMProcessingInfo oInfo) { // Assume no processing errors int result = DMErrorCodes.NO_ERROR; // Create an 'out' variable DMProcessingInfo pi = new DMProcessingInfo(); // Process this request result = findElement(iRequest, pi); // If we found the 'leaf' elmeent, finish the request if (result == DMErrorCodes.NO_ERROR) { RequestToken tok = iRequest.getNextToken(); // Before processing, make sure this is the last token in the request if (!iRequest.hasMoreTokens()) { result = pi.mElement.getValue(tok, iRequest.isAdminRequest(), iRequest.supplyDefaultDelimiters(), oInfo); } else { // Too many tokens result = DMErrorCodes.INVALID_REQUEST; } } return result; } /** * Performs data model specific initialization. * * @return An abstract data model error code indicating the result of this * operation. */ @Override public int initialize() { return DMErrorCodes.NO_ERROR; } public void setNavRequest(String navRequest) { this.mCurRequest = navRequest; } /** * Sets the current 'known' set of valid navigation requests for the SCO * to the SCO's instance of the SCORM Navigation Data Model. * * @param iValid The current 'known' set of valid navigation requests. */ public void setValidRequests(IValidRequests iValid) { mNavRequests = iValid; } /** * Processes a SetValue() request against this data model. Checks the * request for validity. * * @param iRequest The request (<code>DMRequest</code>) being processed. * * @return A data model error code indicating the result of this * operation. */ @Override public int setValue(DMRequest iRequest, IValidatorFactory validatorFactory) { // Assume no processing errors int result = DMErrorCodes.NO_ERROR; // Create an 'out' variable DMProcessingInfo pi = new DMProcessingInfo(); // Process this request result = findElement(iRequest, pi); // If we found the 'leaf' element, finish the request if (result == DMErrorCodes.NO_ERROR) { RequestToken tok = iRequest.getNextToken(); // Before processing, make sure this is the last token in the requset if (!iRequest.hasMoreTokens()) { // Make sure this is a Value token if (tok.getType() == RequestToken.TOKEN_VALUE) { if (result == DMErrorCodes.NO_ERROR) { result = pi.mElement.setValue(tok, iRequest.isAdminRequest(), validatorFactory); } } else { // Wrong type of token -- value expected result = DMErrorCodes.INVALID_REQUEST; } } else { // Too many tokens result = DMErrorCodes.INVALID_REQUEST; } } return result; } /** * Displays the contents of the entire data model. */ @Override public void showAllElements() { // Not implemented at this time } /** * Performs data model specific termination. * * @return A data model error code indicating the result of this * operation. */ @Override public int terminate(IValidatorFactory validatorFactory) { // Clear the current nav request DMRequest req = new DMRequest("adl.nav.request", "_none_", true); // Remove the data model token, since we do not need the return // value, there is no need to assign it to a local variable req.getNextToken(); // Invoke a SetValue() method call sending in the DMRequest. There is // no need to capture the return from the setValue(), therefore there // is no need to assign it to a local variable setValue(req, validatorFactory); mCurRequest = null; // Clear the current set of valid navigation requests mNavRequests = null; return DMErrorCodes.NO_ERROR; } /** * Processes a validate() request against this data model. * * @param iRequest The (<code>DMRequest</code>) being processed. * * @return A data model error code indicating the result of this * operation. */ @Override public int validate(DMRequest iRequest) { // Assume no processing errors int result = DMErrorCodes.NO_ERROR; // Create an 'out' variable DMProcessingInfo pi = new DMProcessingInfo(); // Process this request result = findElement(iRequest, pi); // If we found the 'leaf' element, finish the request if (result == DMErrorCodes.NO_ERROR) { RequestToken tok = iRequest.getNextToken(); // Before processing, make sure this is the last token in the request if (!iRequest.hasMoreTokens()) { // Make sure this is a Value token if (tok.getType() == RequestToken.TOKEN_VALUE) { result = pi.mElement.validate(tok); } else { // Wrong type of token -- value expected result = DMErrorCodes.INVALID_REQUEST; } } else { // Too many tokens result = DMErrorCodes.INVALID_REQUEST; } } return result; } } // end SCORM_2004_NAV_DM
package com.netflix.conductor.jedis; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import redis.clients.jedis.BinaryClient.LIST_POSITION; import redis.clients.jedis.BitPosParams; import redis.clients.jedis.GeoCoordinate; import redis.clients.jedis.GeoRadiusResponse; import redis.clients.jedis.GeoUnit; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisCommands; import redis.clients.jedis.JedisSentinelPool; import redis.clients.jedis.ScanParams; import redis.clients.jedis.ScanResult; import redis.clients.jedis.SortingParams; import redis.clients.jedis.Tuple; import redis.clients.jedis.params.geo.GeoRadiusParam; import redis.clients.jedis.params.sortedset.ZAddParams; import redis.clients.jedis.params.sortedset.ZIncrByParams; public class JedisClusterSentinel implements JedisCommands { private final JedisSentinelPool jedisPool; public JedisClusterSentinel(JedisSentinelPool jedisPool) { this.jedisPool = jedisPool; } @Override public String set(String key, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.set(key, value); } finally { if (jedis != null) jedis.close(); } } @Override public String set(String key, String value, String nxxx, String expx, long time) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.set(key, value, nxxx, expx, time); } finally { if (jedis != null) jedis.close(); } } @Override public String set(String key, String value, String nxxx) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.set(key, value, nxxx); } finally { if (jedis != null) jedis.close(); } } @Override public String get(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.get(key); } finally { if (jedis != null) jedis.close(); } } @Override public Boolean exists(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.exists(key); } finally { if (jedis != null) jedis.close(); } } @Override public Long persist(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.persist(key); } finally { if (jedis != null) jedis.close(); } } @Override public String type(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.type(key); } finally { if (jedis != null) jedis.close(); } } @Override public Long expire(String key, int seconds) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.expire(key, seconds); } finally { if (jedis != null) jedis.close(); } } @Override public Long pexpire(String key, long milliseconds) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.pexpire(key, milliseconds); } finally { if (jedis != null) jedis.close(); } } @Override public Long expireAt(String key, long unixTime) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.expireAt(key, unixTime); } finally { if (jedis != null) jedis.close(); } } @Override public Long pexpireAt(String key, long millisecondsTimestamp) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.pexpireAt(key, millisecondsTimestamp); } finally { if (jedis != null) jedis.close(); } } @Override public Long ttl(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.ttl(key); } finally { if (jedis != null) jedis.close(); } } @Override public Long pttl(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.pttl(key); } finally { if (jedis != null) jedis.close(); } } @Override public Boolean setbit(String key, long offset, boolean value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.setbit(key, offset, value); } finally { if (jedis != null) jedis.close(); } } @Override public Boolean setbit(String key, long offset, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.setbit(key, offset, value); } finally { if (jedis != null) jedis.close(); } } @Override public Boolean getbit(String key, long offset) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.getbit(key, offset); } finally { if (jedis != null) jedis.close(); } } @Override public Long setrange(String key, long offset, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.setrange(key, offset, value); } finally { if (jedis != null) jedis.close(); } } @Override public String getrange(String key, long startOffset, long endOffset) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.getrange(key, startOffset, endOffset); } finally { if (jedis != null) jedis.close(); } } @Override public String getSet(String key, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.getSet(key, value); } finally { if (jedis != null) jedis.close(); } } @Override public Long setnx(String key, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.setnx(key, value); } finally { if (jedis != null) jedis.close(); } } @Override public String setex(String key, int seconds, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.setex(key, seconds, value); } finally { if (jedis != null) jedis.close(); } } @Override public String psetex(String key, long milliseconds, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.psetex(key, milliseconds, value); } finally { if (jedis != null) jedis.close(); } } @Override public Long decrBy(String key, long integer) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.decrBy(key, integer); } finally { if (jedis != null) jedis.close(); } } @Override public Long decr(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.decr(key); } finally { if (jedis != null) jedis.close(); } } @Override public Long incrBy(String key, long integer) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.incrBy(key, integer); } finally { if (jedis != null) jedis.close(); } } @Override public Double incrByFloat(String key, double value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.incrByFloat(key, value); } finally { if (jedis != null) jedis.close(); } } @Override public Long incr(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.incr(key); } finally { if (jedis != null) jedis.close(); } } @Override public Long append(String key, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.append(key, value); } finally { if (jedis != null) jedis.close(); } } @Override public String substr(String key, int start, int end) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.substr(key, start, end); } finally { if (jedis != null) jedis.close(); } } @Override public Long hset(String key, String field, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hset(key, field, value); } finally { if (jedis != null) jedis.close(); } } @Override public String hget(String key, String field) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hget(key, field); } finally { if (jedis != null) jedis.close(); } } @Override public Long hsetnx(String key, String field, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hsetnx(key, field, value); } finally { if (jedis != null) jedis.close(); } } @Override public String hmset(String key, Map<String, String> hash) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hmset(key, hash); } finally { if (jedis != null) jedis.close(); } } @Override public List<String> hmget(String key, String... fields) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hmget(key, fields); } finally { if (jedis != null) jedis.close(); } } @Override public Long hincrBy(String key, String field, long value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hincrBy(key, field, value); } finally { if (jedis != null) jedis.close(); } } @Override public Double hincrByFloat(String key, String field, double value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hincrByFloat(key, field, value); } finally { if (jedis != null) jedis.close(); } } @Override public Boolean hexists(String key, String field) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hexists(key, field); } finally { if (jedis != null) jedis.close(); } } @Override public Long hdel(String key, String... field) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hdel(key, field); } finally { if (jedis != null) jedis.close(); } } @Override public Long hlen(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hlen(key); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> hkeys(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hkeys(key); } finally { if (jedis != null) jedis.close(); } } @Override public List<String> hvals(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hvals(key); } finally { if (jedis != null) jedis.close(); } } @Override public Map<String, String> hgetAll(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hgetAll(key); } finally { if (jedis != null) jedis.close(); } } @Override public Long rpush(String key, String... string) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.rpush(key, string); } finally { if (jedis != null) jedis.close(); } } @Override public Long lpush(String key, String... string) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.lpush(key, string); } finally { if (jedis != null) jedis.close(); } } @Override public Long llen(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.llen(key); } finally { if (jedis != null) jedis.close(); } } @Override public List<String> lrange(String key, long start, long end) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.lrange(key, start, end); } finally { if (jedis != null) jedis.close(); } } @Override public String ltrim(String key, long start, long end) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.ltrim(key, start, end); } finally { if (jedis != null) jedis.close(); } } @Override public String lindex(String key, long index) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.lindex(key, index); } finally { if (jedis != null) jedis.close(); } } @Override public String lset(String key, long index, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.lset(key, index, value); } finally { if (jedis != null) jedis.close(); } } @Override public Long lrem(String key, long count, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.lrem(key, count, value); } finally { if (jedis != null) jedis.close(); } } @Override public String lpop(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.lpop(key); } finally { if (jedis != null) jedis.close(); } } @Override public String rpop(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.rpop(key); } finally { if (jedis != null) jedis.close(); } } @Override public Long sadd(String key, String... member) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.sadd(key, member); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> smembers(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.smembers(key); } finally { if (jedis != null) jedis.close(); } } @Override public Long srem(String key, String... member) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.srem(key, member); } finally { if (jedis != null) jedis.close(); } } @Override public String spop(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.spop(key); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> spop(String key, long count) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.spop(key, count); } finally { if (jedis != null) jedis.close(); } } @Override public Long scard(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.scard(key); } finally { if (jedis != null) jedis.close(); } } @Override public Boolean sismember(String key, String member) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.sismember(key, member); } finally { if (jedis != null) jedis.close(); } } @Override public String srandmember(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.srandmember(key); } finally { if (jedis != null) jedis.close(); } } @Override public List<String> srandmember(String key, int count) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.srandmember(key, count); } finally { if (jedis != null) jedis.close(); } } @Override public Long strlen(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.strlen(key); } finally { if (jedis != null) jedis.close(); } } @Override public Long zadd(String key, double score, String member) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zadd(key, score, member); } finally { if (jedis != null) jedis.close(); } } @Override public Long zadd(String key, double score, String member, ZAddParams params) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zadd(key, score, member, params); } finally { if (jedis != null) jedis.close(); } } @Override public Long zadd(String key, Map<String, Double> scoreMembers) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zadd(key, scoreMembers); } finally { if (jedis != null) jedis.close(); } } @Override public Long zadd(String key, Map<String, Double> scoreMembers, ZAddParams params) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zadd(key, scoreMembers, params); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrange(String key, long start, long end) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrange(key, start, end); } finally { if (jedis != null) jedis.close(); } } @Override public Long zrem(String key, String... member) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrem(key, member); } finally { if (jedis != null) jedis.close(); } } @Override public Double zincrby(String key, double score, String member) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zincrby(key, score, member); } finally { if (jedis != null) jedis.close(); } } @Override public Double zincrby(String key, double score, String member, ZIncrByParams params) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zincrby(key, score, member, params); } finally { if (jedis != null) jedis.close(); } } @Override public Long zrank(String key, String member) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrank(key, member); } finally { if (jedis != null) jedis.close(); } } @Override public Long zrevrank(String key, String member) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrank(key, member); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrevrange(String key, long start, long end) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrange(key, start, end); } finally { if (jedis != null) jedis.close(); } } @Override public Set<Tuple> zrangeWithScores(String key, long start, long end) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrangeWithScores(key, start, end); } finally { if (jedis != null) jedis.close(); } } @Override public Set<Tuple> zrevrangeWithScores(String key, long start, long end) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrangeWithScores(key, start, end); } finally { if (jedis != null) jedis.close(); } } @Override public Long zcard(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zcard(key); } finally { if (jedis != null) jedis.close(); } } @Override public Double zscore(String key, String member) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zscore(key, member); } finally { if (jedis != null) jedis.close(); } } @Override public List<String> sort(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.sort(key); } finally { if (jedis != null) jedis.close(); } } @Override public List<String> sort(String key, SortingParams sortingParameters) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.sort(key, sortingParameters); } finally { if (jedis != null) jedis.close(); } } @Override public Long zcount(String key, double min, double max) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zcount(key, min, max); } finally { if (jedis != null) jedis.close(); } } @Override public Long zcount(String key, String min, String max) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zcount(key, min, max); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrangeByScore(String key, double min, double max) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrangeByScore(key, min, max); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrangeByScore(String key, String min, String max) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrangeByScore(key, min, max); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrevrangeByScore(String key, double max, double min) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrangeByScore(key, max, min); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrangeByScore(String key, double min, double max, int offset, int count) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrangeByScore(key, min, max, offset, count); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrevrangeByScore(String key, String max, String min) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrangeByScore(key, max, min); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrangeByScore(String key, String min, String max, int offset, int count) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrangeByScore(key, min, max, offset, count); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrevrangeByScore(String key, double max, double min, int offset, int count) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrangeByScore(key, max, min, offset, count); } finally { if (jedis != null) jedis.close(); } } @Override public Set<Tuple> zrangeByScoreWithScores(String key, double min, double max) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrangeByScoreWithScores(key, min, max); } finally { if (jedis != null) jedis.close(); } } @Override public Set<Tuple> zrevrangeByScoreWithScores(String key, double max, double min) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrangeByScoreWithScores(key, max, min); } finally { if (jedis != null) jedis.close(); } } @Override public Set<Tuple> zrangeByScoreWithScores(String key, double min, double max, int offset, int count) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrangeByScoreWithScores(key, min, max, offset, count); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrevrangeByScore(String key, String max, String min, int offset, int count) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrangeByScore(key, max, min, offset, count); } finally { if (jedis != null) jedis.close(); } } @Override public Set<Tuple> zrangeByScoreWithScores(String key, String min, String max) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrangeByScoreWithScores(key, min, max); } finally { if (jedis != null) jedis.close(); } } @Override public Set<Tuple> zrevrangeByScoreWithScores(String key, String max, String min) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrangeByScoreWithScores(key, max, min); } finally { if (jedis != null) jedis.close(); } } @Override public Set<Tuple> zrangeByScoreWithScores(String key, String min, String max, int offset, int count) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrangeByScoreWithScores(key, min, max, offset, count); } finally { if (jedis != null) jedis.close(); } } @Override public Set<Tuple> zrevrangeByScoreWithScores(String key, double max, double min, int offset, int count) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrangeByScoreWithScores(key, max, min, offset, count); } finally { if (jedis != null) jedis.close(); } } @Override public Set<Tuple> zrevrangeByScoreWithScores(String key, String max, String min, int offset, int count) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrangeByScoreWithScores(key, max, min, offset, count); } finally { if (jedis != null) jedis.close(); } } @Override public Long zremrangeByRank(String key, long start, long end) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zremrangeByRank(key, start, end); } finally { if (jedis != null) jedis.close(); } } @Override public Long zremrangeByScore(String key, double start, double end) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zremrangeByScore(key, start, end); } finally { if (jedis != null) jedis.close(); } } @Override public Long zremrangeByScore(String key, String start, String end) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zremrangeByScore(key, start, end); } finally { if (jedis != null) jedis.close(); } } @Override public Long zlexcount(String key, String min, String max) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zlexcount(key, min, max); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrangeByLex(String key, String min, String max) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrangeByLex(key, min, max); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrangeByLex(String key, String min, String max, int offset, int count) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrangeByLex(key, min, max, offset, count); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrevrangeByLex(String key, String max, String min) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrangeByLex(key, max, min); } finally { if (jedis != null) jedis.close(); } } @Override public Set<String> zrevrangeByLex(String key, String max, String min, int offset, int count) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zrevrangeByLex(key, max, min, offset, count); } finally { if (jedis != null) jedis.close(); } } @Override public Long zremrangeByLex(String key, String min, String max) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zremrangeByLex(key, min, max); } finally { if (jedis != null) jedis.close(); } } @Override public Long linsert(String key, LIST_POSITION where, String pivot, String value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.linsert(key, where, pivot, value); } finally { if (jedis != null) jedis.close(); } } @Override public Long lpushx(String key, String... string) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.lpushx(key, string); } finally { if (jedis != null) jedis.close(); } } @Override public Long rpushx(String key, String... string) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.rpushx(key, string); } finally { if (jedis != null) jedis.close(); } } @Override @Deprecated public List<String> blpop(String arg) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.blpop(arg); } finally { if (jedis != null) jedis.close(); } } @Override public List<String> blpop(int timeout, String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.blpop(timeout, key); } finally { if (jedis != null) jedis.close(); } } @Override @Deprecated public List<String> brpop(String arg) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.brpop(arg); } finally { if (jedis != null) jedis.close(); } } @Override public List<String> brpop(int timeout, String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.brpop(timeout, key); } finally { if (jedis != null) jedis.close(); } } @Override public Long del(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.del(key); } finally { if (jedis != null) jedis.close(); } } @Override public String echo(String string) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.echo(string); } finally { if (jedis != null) jedis.close(); } } @Override public Long move(String key, int dbIndex) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.move(key, dbIndex); } finally { if (jedis != null) jedis.close(); } } @Override public Long bitcount(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.bitcount(key); } finally { if (jedis != null) jedis.close(); } } @Override public Long bitcount(String key, long start, long end) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.bitcount(key, start, end); } finally { if (jedis != null) jedis.close(); } } @Override public Long bitpos(String key, boolean value) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.bitpos(key, value); } finally { if (jedis != null) jedis.close(); } } @Override public Long bitpos(String key, boolean value, BitPosParams params) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.bitpos(key, value, params); } finally { if (jedis != null) jedis.close(); } } @Override @Deprecated public ScanResult<Entry<String, String>> hscan(String key, int cursor) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hscan(key, cursor); } finally { if (jedis != null) jedis.close(); } } @Override @Deprecated public ScanResult<String> sscan(String key, int cursor) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.sscan(key, cursor); } finally { if (jedis != null) jedis.close(); } } @Override @Deprecated public ScanResult<Tuple> zscan(String key, int cursor) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zscan(key, cursor); } finally { if (jedis != null) jedis.close(); } } @Override public ScanResult<Entry<String, String>> hscan(String key, String cursor) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hscan(key, cursor); } finally { if (jedis != null) jedis.close(); } } @Override public ScanResult<Entry<String, String>> hscan(String key, String cursor, ScanParams params) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.hscan(key, cursor, params); } finally { if (jedis != null) jedis.close(); } } @Override public ScanResult<String> sscan(String key, String cursor) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.sscan(key, cursor); } finally { if (jedis != null) jedis.close(); } } @Override public ScanResult<String> sscan(String key, String cursor, ScanParams params) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.sscan(key, cursor, params); } finally { if (jedis != null) jedis.close(); } } @Override public ScanResult<Tuple> zscan(String key, String cursor) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zscan(key, cursor); } finally { if (jedis != null) jedis.close(); } } @Override public ScanResult<Tuple> zscan(String key, String cursor, ScanParams params) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.zscan(key, cursor, params); } finally { if (jedis != null) jedis.close(); } } @Override public Long pfadd(String key, String... elements) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.pfadd(key, elements); } finally { if (jedis != null) jedis.close(); } } @Override public long pfcount(String key) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.pfcount(key); } finally { if (jedis != null) jedis.close(); } } @Override public Long geoadd(String key, double longitude, double latitude, String member) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.geoadd(key, longitude, latitude, member); } finally { if (jedis != null) jedis.close(); } } @Override public Long geoadd(String key, Map<String, GeoCoordinate> memberCoordinateMap) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.geoadd(key, memberCoordinateMap); } finally { if (jedis != null) jedis.close(); } } @Override public Double geodist(String key, String member1, String member2) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.geodist(key, member1, member2); } finally { if (jedis != null) jedis.close(); } } @Override public Double geodist(String key, String member1, String member2, GeoUnit unit) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.geodist(key, member1, member2, unit); } finally { if (jedis != null) jedis.close(); } } @Override public List<String> geohash(String key, String... members) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.geohash(key, members); } finally { if (jedis != null) jedis.close(); } } @Override public List<GeoCoordinate> geopos(String key, String... members) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.geopos(key, members); } finally { if (jedis != null) jedis.close(); } } @Override public List<GeoRadiusResponse> georadius(String key, double longitude, double latitude, double radius, GeoUnit unit) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.georadius(key, longitude, latitude, radius, unit); } finally { if (jedis != null) jedis.close(); } } @Override public List<GeoRadiusResponse> georadius(String key, double longitude, double latitude, double radius, GeoUnit unit, GeoRadiusParam param) { return null; } @Override public List<GeoRadiusResponse> georadiusByMember(String key, String member, double radius, GeoUnit unit) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.georadiusByMember(key, member, radius, unit); } finally { if (jedis != null) jedis.close(); } } @Override public List<GeoRadiusResponse> georadiusByMember(String key, String member, double radius, GeoUnit unit, GeoRadiusParam param) { return null; } @Override public List<Long> bitfield(String key, String... arguments) { Jedis jedis = null; try { jedis = jedisPool.getResource(); return jedis.bitfield(key, arguments); } finally { if (jedis != null) jedis.close(); } } }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package android.speech; import android.annotation.SdkConstant; import android.annotation.SdkConstant.SdkConstantType; import android.app.Service; import android.content.Intent; import android.content.pm.PackageManager; import android.os.Binder; import android.os.Bundle; import android.os.Handler; import android.os.IBinder; import android.os.Message; import android.os.RemoteException; import android.util.Log; import java.lang.ref.WeakReference; /** * This class provides a base class for recognition service implementations. This class should be * extended only in case you wish to implement a new speech recognizer. Please note that the * implementation of this service is stateless. */ public abstract class RecognitionService extends Service { /** * The {@link Intent} that must be declared as handled by the service. */ @SdkConstant(SdkConstantType.SERVICE_ACTION) public static final String SERVICE_INTERFACE = "android.speech.RecognitionService"; /** * Name under which a RecognitionService component publishes information about itself. * This meta-data should reference an XML resource containing a * <code>&lt;{@link android.R.styleable#RecognitionService recognition-service}&gt;</code> tag. */ public static final String SERVICE_META_DATA = "android.speech"; /** Log messages identifier */ private static final String TAG = "RecognitionService"; /** Debugging flag */ private static final boolean DBG = false; /** Binder of the recognition service */ private RecognitionServiceBinder mBinder = new RecognitionServiceBinder(this); /** * The current callback of an application that invoked the * {@link RecognitionService#onStartListening(Intent, Callback)} method */ private Callback mCurrentCallback = null; private static final int MSG_START_LISTENING = 1; private static final int MSG_STOP_LISTENING = 2; private static final int MSG_CANCEL = 3; private static final int MSG_RESET = 4; private final Handler mHandler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case MSG_START_LISTENING: StartListeningArgs args = (StartListeningArgs) msg.obj; dispatchStartListening(args.mIntent, args.mListener, args.mCallingUid); break; case MSG_STOP_LISTENING: dispatchStopListening((IRecognitionListener) msg.obj); break; case MSG_CANCEL: dispatchCancel((IRecognitionListener) msg.obj); break; case MSG_RESET: dispatchClearCallback(); break; } } }; private void dispatchStartListening(Intent intent, final IRecognitionListener listener, int callingUid) { if (mCurrentCallback == null) { if (DBG) Log.d(TAG, "created new mCurrentCallback, listener = " + listener.asBinder()); try { listener.asBinder().linkToDeath(new IBinder.DeathRecipient() { @Override public void binderDied() { mHandler.sendMessage(mHandler.obtainMessage(MSG_CANCEL, listener)); } }, 0); } catch (RemoteException re) { Log.e(TAG, "dead listener on startListening"); return; } mCurrentCallback = new Callback(listener, callingUid); RecognitionService.this.onStartListening(intent, mCurrentCallback); } else { try { listener.onError(SpeechRecognizer.ERROR_RECOGNIZER_BUSY); } catch (RemoteException e) { Log.d(TAG, "onError call from startListening failed"); } Log.i(TAG, "concurrent startListening received - ignoring this call"); } } private void dispatchStopListening(IRecognitionListener listener) { try { if (mCurrentCallback == null) { listener.onError(SpeechRecognizer.ERROR_CLIENT); Log.w(TAG, "stopListening called with no preceding startListening - ignoring"); } else if (mCurrentCallback.mListener.asBinder() != listener.asBinder()) { listener.onError(SpeechRecognizer.ERROR_RECOGNIZER_BUSY); Log.w(TAG, "stopListening called by other caller than startListening - ignoring"); } else { // the correct state RecognitionService.this.onStopListening(mCurrentCallback); } } catch (RemoteException e) { // occurs if onError fails Log.d(TAG, "onError call from stopListening failed"); } } private void dispatchCancel(IRecognitionListener listener) { if (mCurrentCallback == null) { if (DBG) Log.d(TAG, "cancel called with no preceding startListening - ignoring"); } else if (mCurrentCallback.mListener.asBinder() != listener.asBinder()) { Log.w(TAG, "cancel called by client who did not call startListening - ignoring"); } else { // the correct state RecognitionService.this.onCancel(mCurrentCallback); mCurrentCallback = null; if (DBG) Log.d(TAG, "canceling - setting mCurrentCallback to null"); } } private void dispatchClearCallback() { mCurrentCallback = null; } private class StartListeningArgs { public final Intent mIntent; public final IRecognitionListener mListener; public final int mCallingUid; public StartListeningArgs(Intent intent, IRecognitionListener listener, int callingUid) { this.mIntent = intent; this.mListener = listener; this.mCallingUid = callingUid; } } /** * Checks whether the caller has sufficient permissions * * @param listener to send the error message to in case of error * @return {@code true} if the caller has enough permissions, {@code false} otherwise */ private boolean checkPermissions(IRecognitionListener listener) { if (DBG) Log.d(TAG, "checkPermissions"); if (RecognitionService.this.checkCallingOrSelfPermission(android.Manifest.permission. RECORD_AUDIO) == PackageManager.PERMISSION_GRANTED) { return true; } try { Log.e(TAG, "call for recognition service without RECORD_AUDIO permissions"); listener.onError(SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS); } catch (RemoteException re) { Log.e(TAG, "sending ERROR_INSUFFICIENT_PERMISSIONS message failed", re); } return false; } /** * Notifies the service that it should start listening for speech. * * @param recognizerIntent contains parameters for the recognition to be performed. The intent * may also contain optional extras, see {@link RecognizerIntent}. If these values are * not set explicitly, default values should be used by the recognizer. * @param listener that will receive the service's callbacks */ protected abstract void onStartListening(Intent recognizerIntent, Callback listener); /** * Notifies the service that it should cancel the speech recognition. */ protected abstract void onCancel(Callback listener); /** * Notifies the service that it should stop listening for speech. Speech captured so far should * be recognized as if the user had stopped speaking at this point. This method is only called * if the application calls it explicitly. */ protected abstract void onStopListening(Callback listener); @Override public final IBinder onBind(final Intent intent) { if (DBG) Log.d(TAG, "onBind, intent=" + intent); return mBinder; } @Override public void onDestroy() { if (DBG) Log.d(TAG, "onDestroy"); mCurrentCallback = null; mBinder.clearReference(); super.onDestroy(); } /** * This class receives callbacks from the speech recognition service and forwards them to the * user. An instance of this class is passed to the * {@link RecognitionService#onStartListening(Intent, Callback)} method. Recognizers may call * these methods on any thread. */ public class Callback { private final IRecognitionListener mListener; private final int mCallingUid; private Callback(IRecognitionListener listener, int callingUid) { mListener = listener; mCallingUid = callingUid; } /** * The service should call this method when the user has started to speak. */ public void beginningOfSpeech() throws RemoteException { if (DBG) Log.d(TAG, "beginningOfSpeech"); mListener.onBeginningOfSpeech(); } /** * The service should call this method when sound has been received. The purpose of this * function is to allow giving feedback to the user regarding the captured audio. * * @param buffer a buffer containing a sequence of big-endian 16-bit integers representing a * single channel audio stream. The sample rate is implementation dependent. */ public void bufferReceived(byte[] buffer) throws RemoteException { mListener.onBufferReceived(buffer); } /** * The service should call this method after the user stops speaking. */ public void endOfSpeech() throws RemoteException { mListener.onEndOfSpeech(); } /** * The service should call this method when a network or recognition error occurred. * * @param error code is defined in {@link SpeechRecognizer} */ public void error(int error) throws RemoteException { Message.obtain(mHandler, MSG_RESET).sendToTarget(); mListener.onError(error); } /** * The service should call this method when partial recognition results are available. This * method can be called at any time between {@link #beginningOfSpeech()} and * {@link #results(Bundle)} when partial results are ready. This method may be called zero, * one or multiple times for each call to {@link SpeechRecognizer#startListening(Intent)}, * depending on the speech recognition service implementation. * * @param partialResults the returned results. To retrieve the results in * ArrayList&lt;String&gt; format use {@link Bundle#getStringArrayList(String)} with * {@link SpeechRecognizer#RESULTS_RECOGNITION} as a parameter */ public void partialResults(Bundle partialResults) throws RemoteException { mListener.onPartialResults(partialResults); } /** * The service should call this method when the endpointer is ready for the user to start * speaking. * * @param params parameters set by the recognition service. Reserved for future use. */ public void readyForSpeech(Bundle params) throws RemoteException { mListener.onReadyForSpeech(params); } /** * The service should call this method when recognition results are ready. * * @param results the recognition results. To retrieve the results in {@code * ArrayList<String>} format use {@link Bundle#getStringArrayList(String)} with * {@link SpeechRecognizer#RESULTS_RECOGNITION} as a parameter */ public void results(Bundle results) throws RemoteException { Message.obtain(mHandler, MSG_RESET).sendToTarget(); mListener.onResults(results); } /** * The service should call this method when the sound level in the audio stream has changed. * There is no guarantee that this method will be called. * * @param rmsdB the new RMS dB value */ public void rmsChanged(float rmsdB) throws RemoteException { mListener.onRmsChanged(rmsdB); } /** * Return the Linux uid assigned to the process that sent you the current transaction that * is being processed. This is obtained from {@link Binder#getCallingUid()}. */ public int getCallingUid() { return mCallingUid; } } /** Binder of the recognition service */ private static final class RecognitionServiceBinder extends IRecognitionService.Stub { private final WeakReference<RecognitionService> mServiceRef; public RecognitionServiceBinder(RecognitionService service) { mServiceRef = new WeakReference<RecognitionService>(service); } @Override public void startListening(Intent recognizerIntent, IRecognitionListener listener) { if (DBG) Log.d(TAG, "startListening called by:" + listener.asBinder()); final RecognitionService service = mServiceRef.get(); if (service != null && service.checkPermissions(listener)) { service.mHandler.sendMessage(Message.obtain(service.mHandler, MSG_START_LISTENING, service.new StartListeningArgs( recognizerIntent, listener, Binder.getCallingUid()))); } } @Override public void stopListening(IRecognitionListener listener) { if (DBG) Log.d(TAG, "stopListening called by:" + listener.asBinder()); final RecognitionService service = mServiceRef.get(); if (service != null && service.checkPermissions(listener)) { service.mHandler.sendMessage(Message.obtain(service.mHandler, MSG_STOP_LISTENING, listener)); } } @Override public void cancel(IRecognitionListener listener) { if (DBG) Log.d(TAG, "cancel called by:" + listener.asBinder()); final RecognitionService service = mServiceRef.get(); if (service != null && service.checkPermissions(listener)) { service.mHandler.sendMessage(Message.obtain(service.mHandler, MSG_CANCEL, listener)); } } public void clearReference() { mServiceRef.clear(); } } }
/* The following code was generated by JFlex 1.4.3 on 03.01.12 11:50 */ /* * Copyright (c) 2008-2012, Matthias Mann * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of Matthias Mann nor the names of its contributors may * be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package de.matthiasmann.twl.textarea; /** * This class is a scanner generated by <a href="http://www.jflex.de/">JFlex</a> * 1.4.3 on 03.01.12 11:50 from the specification file <tt>parser.flex</tt> */ class Parser { /** This character denotes the end of file */ public static final int YYEOF = -1; /** initial size of the lookahead buffer */ private static final int ZZ_BUFFERSIZE = 16384; /** lexical states */ public static final int YYSTRING1 = 6; public static final int YYINITIAL = 0; public static final int YYSTYLE = 2; public static final int YYVALUE = 4; public static final int YYSTRING2 = 8; /** * ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l * ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l at the * beginning of a line l is of the form l = 2*k, k a non negative integer */ // private static final int ZZ_LEXSTATE[] = { // 0, 0, 1, 1, 2, 2, 3, 3, 4, 4 // }; /** * Translates characters to character classes */ private static final String ZZ_CMAP_PACKED = "\11\0\1\3\1\2\1\0\1\3\1\1\22\0\1\3\1\0\1\23" + "\1\14\3\0\1\22\2\0\1\5\1\0\1\12\1\6\1\11\1\4" + "\12\10\1\15\1\21\2\0\1\13\1\0\1\16\32\7\4\0\1\7" + "\1\0\32\7\1\17\1\0\1\20\uff82\0"; /** * Translates characters to character classes */ private static final char[] ZZ_CMAP = zzUnpackCMap(ZZ_CMAP_PACKED); /** * Translates DFA states to action switch labels. */ private static final int[] ZZ_ACTION = zzUnpackAction(); private static final String ZZ_ACTION_PACKED_0 = "\5\0\1\1\1\2\1\1\1\3\1\1\1\4\1\5" + "\1\6\1\7\1\10\1\11\1\12\1\13\2\14\1\1" + "\1\15\1\16\1\17\1\20\1\21\1\22\1\23\1\20" + "\1\24\1\20\1\25\4\0"; private static int[] zzUnpackAction() { int[] result = new int[36]; zzUnpackAction(ZZ_ACTION_PACKED_0, 0, result); return result; } private static int zzUnpackAction(String packed, int offset, int[] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** * Translates a state to a row index in the transition table */ private static final int[] ZZ_ROWMAP = zzUnpackRowMap(); private static final String ZZ_ROWMAP_PACKED_0 = "\0\0\0\24\0\50\0\74\0\120\0\144\0\170\0\214" + "\0\144\0\240\0\264\0\144\0\144\0\144\0\144\0\144" + "\0\144\0\144\0\310\0\144\0\334\0\360\0\144\0\144" + "\0\u0104\0\144\0\144\0\144\0\u0118\0\144\0\u012c\0\144" + "\0\u0140\0\u0154\0\u0168\0\u017c"; private static int[] zzUnpackRowMap() { int[] result = new int[36]; zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, 0, result); return result; } private static int zzUnpackRowMap(String packed, int offset, int[] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int high = packed.charAt(i++) << 16; result[j++] = high | packed.charAt(i++); } return j; } /** * The transition table of the DFA */ private static final int[] ZZ_TRANS = zzUnpackTrans(); private static final String ZZ_TRANS_PACKED_0 = "\1\6\3\7\1\10\1\11\1\12\1\13\1\6\1\14" + "\1\15\1\16\1\17\1\20\1\21\1\22\5\6\1\23" + "\2\24\1\10\1\6\1\25\1\26\5\6\1\27\2\6" + "\1\30\3\6\20\31\1\30\1\32\1\33\1\34\22\35" + "\1\36\1\35\23\37\1\40\25\0\3\7\25\0\1\41" + "\25\0\1\13\22\0\3\13\15\0\1\24\30\0\1\26" + "\22\0\3\26\13\0\20\31\4\0\22\35\1\0\1\35" + "\23\37\1\0\5\42\1\43\23\42\1\44\16\42\4\0" + "\1\24\1\43\16\0\4\42\1\24\1\44\16\42"; private static int[] zzUnpackTrans() { int[] result = new int[400]; zzUnpackTrans(ZZ_TRANS_PACKED_0, 0, result); return result; } private static int zzUnpackTrans(String packed, int offset, int[] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); value--; do result[j++] = value; while (--count > 0); } return j; } /** * ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code> */ private static final int[] ZZ_ATTRIBUTE = zzUnpackAttribute(); private static final String ZZ_ATTRIBUTE_PACKED_0 = "\5\0\1\11\2\1\1\11\2\1\7\11\1\1\1\11" + "\2\1\2\11\1\1\3\11\1\1\1\11\1\1\1\11" + "\4\0"; private static int[] zzUnpackAttribute() { int[] result = new int[36]; zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, 0, result); return result; } private static int zzUnpackAttribute(String packed, int offset, int[] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** the input device */ private java.io.Reader zzReader; /** the current state of the DFA */ private int zzState; /** the current lexical state */ private int zzLexicalState = YYINITIAL; /** * this buffer contains the current text to be matched and is the source of * the yytext() string */ private char zzBuffer[] = new char[ZZ_BUFFERSIZE]; /** the textposition at the last accepting state */ private int zzMarkedPos; /** the current text position in the buffer */ private int zzCurrentPos; /** startRead marks the beginning of the yytext() string in the buffer */ private int zzStartRead; /** * endRead marks the last character in the buffer, that has been read from * input */ private int zzEndRead; /** number of newlines encountered up to the start of the matched text */ private int yyline; /** * the number of characters from the last newline up to the start of the * matched text */ private int yycolumn; /** zzAtEOF == true <=> the scanner is at the EOF */ private boolean zzAtEOF; /* user code: */ static final int EOF = 0; static final int IDENT = 1; static final int STAR = 2; static final int DOT = 3; static final int HASH = 4; static final int GT = 5; static final int COMMA = 6; static final int STYLE_BEGIN = 7; static final int STYLE_END = 8; static final int COLON = 9; static final int SEMICOLON = 10; static final int ATRULE = 11; boolean sawWhitespace; final StringBuilder sb = new StringBuilder(); private void append() { sb.append(zzBuffer, zzStartRead, zzMarkedPos - zzStartRead); } public void unexpected() throws java.io.IOException { throw new java.io.IOException("Unexpected \"" + yytext() + "\" at line " + yyline + ", column " + yycolumn); } public void expect(int token) throws java.io.IOException { if (yylex() != token) unexpected(); } /** * Creates a new scanner There is also a java.io.InputStream version of this * constructor. * * @param in * the java.io.Reader to read input from. */ Parser(java.io.Reader in) { this.zzReader = in; } /** * Unpacks the compressed character translation table. * * @param packed * the packed character translation table * @return the unpacked character translation table */ private static char[] zzUnpackCMap(String packed) { char[] map = new char[0x10000]; int i = 0; /* index in packed string */ int j = 0; /* index in unpacked array */ while (i < 72) { int count = packed.charAt(i++); char value = packed.charAt(i++); do map[j++] = value; while (--count > 0); } return map; } /** * Refills the input buffer. * * @return <code>false</code>, iff there was new input. * * @exception java.io.IOException * if any I/O-Error occurs */ private boolean zzRefill() throws java.io.IOException { /* first: make room (if you can) */ if (zzStartRead > 0) { System.arraycopy(zzBuffer, zzStartRead, zzBuffer, 0, zzEndRead - zzStartRead); /* translate stored positions */ zzEndRead -= zzStartRead; zzCurrentPos -= zzStartRead; zzMarkedPos -= zzStartRead; zzStartRead = 0; } /* is the buffer big enough? */ if (zzCurrentPos >= zzBuffer.length) { /* if not: blow it up */ char newBuffer[] = new char[zzCurrentPos * 2]; System.arraycopy(zzBuffer, 0, newBuffer, 0, zzBuffer.length); zzBuffer = newBuffer; } /* finally: fill the buffer with new input */ int numRead = zzReader.read(zzBuffer, zzEndRead, zzBuffer.length - zzEndRead); if (numRead > 0) { zzEndRead += numRead; return false; } // unlikely but not impossible: read 0 characters, but not at end of // stream if (numRead == 0) { int c = zzReader.read(); if (c == -1) { return true; } else { zzBuffer[zzEndRead++] = (char) c; return false; } } // numRead < 0 return true; } /** * Enters a new lexical state * * @param newState * the new lexical state */ public final void yybegin(int newState) { zzLexicalState = newState; } /** * Returns the text matched by the current regular expression. */ public final String yytext() { return new String(zzBuffer, zzStartRead, zzMarkedPos - zzStartRead); } /** * Reports an error that occured while scanning. * * In a wellformed scanner (no or only correct usage of yypushback(int) and * a match-all fallback rule) this method will only be called with things * that "Can't Possibly Happen". If this method is called, something is * seriously wrong (e.g. a JFlex bug producing a faulty scanner etc.). * * Usual syntax/scanner level error handling should be done in error * fallback rules. * * @param message * the errormessage to display */ private void zzScanError(String message) { throw new Error(message); } /** * Resumes scanning until the next regular expression is matched, the end of * input is encountered or an I/O-Error occurs. * * @return the next token * @exception java.io.IOException * if any I/O-Error occurs */ public int yylex() throws java.io.IOException { int zzInput; int zzAction; // cached fields: int zzCurrentPosL; int zzMarkedPosL; int zzEndReadL = zzEndRead; char[] zzBufferL = zzBuffer; char[] zzCMapL = ZZ_CMAP; int[] zzTransL = ZZ_TRANS; int[] zzRowMapL = ZZ_ROWMAP; int[] zzAttrL = ZZ_ATTRIBUTE; while (true) { zzMarkedPosL = zzMarkedPos; boolean zzR = false; for (zzCurrentPosL = zzStartRead; zzCurrentPosL < zzMarkedPosL; zzCurrentPosL++) { switch (zzBufferL[zzCurrentPosL]) { case '\u000B': case '\u000C': case '\u0085': case '\u2028': case '\u2029': yyline++; yycolumn = 0; zzR = false; break; case '\r': yyline++; yycolumn = 0; zzR = true; break; case '\n': if (zzR) zzR = false; else { yyline++; yycolumn = 0; } break; default: zzR = false; yycolumn++; } } if (zzR) { // peek one character ahead if it is \n (if we have counted one // line too much) boolean zzPeek; if (zzMarkedPosL < zzEndReadL) zzPeek = zzBufferL[zzMarkedPosL] == '\n'; else if (zzAtEOF) zzPeek = false; else { boolean eof = zzRefill(); zzEndReadL = zzEndRead; zzMarkedPosL = zzMarkedPos; zzBufferL = zzBuffer; if (eof) zzPeek = false; else zzPeek = zzBufferL[zzMarkedPosL] == '\n'; } if (zzPeek) yyline--; } zzAction = -1; zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL; // zzState = ZZ_LEXSTATE[zzLexicalState]; zzState = zzLexicalState / 2; zzForAction: { while (true) { if (zzCurrentPosL < zzEndReadL) zzInput = zzBufferL[zzCurrentPosL++]; else if (zzAtEOF) { zzInput = YYEOF; break zzForAction; } else { // store back cached positions zzCurrentPos = zzCurrentPosL; zzMarkedPos = zzMarkedPosL; boolean eof = zzRefill(); // get translated positions and possibly new buffer zzCurrentPosL = zzCurrentPos; zzMarkedPosL = zzMarkedPos; zzBufferL = zzBuffer; zzEndReadL = zzEndRead; if (eof) { zzInput = YYEOF; break zzForAction; } else { zzInput = zzBufferL[zzCurrentPosL++]; } } int zzNext = zzTransL[zzRowMapL[zzState] + zzCMapL[zzInput]]; if (zzNext == -1) break zzForAction; zzState = zzNext; int zzAttributes = zzAttrL[zzState]; if ((zzAttributes & 1) == 1) { zzAction = zzState; zzMarkedPosL = zzCurrentPosL; if ((zzAttributes & 8) == 8) break zzForAction; } } } // store back cached position zzMarkedPos = zzMarkedPosL; switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) { case 6: { return COMMA; } case 22: break; case 20: { yybegin(YYVALUE); sb.append('\''); } case 23: break; case 10: { return ATRULE; } case 24: break; case 3: { sawWhitespace = false; return STAR; } case 25: break; case 18: { yybegin(YYSTRING1); sb.append('\''); } case 26: break; case 19: { yybegin(YYSTRING2); sb.append('\"'); } case 27: break; case 16: { append(); } case 28: break; case 4: { sawWhitespace = false; return IDENT; } case 29: break; case 21: { yybegin(YYVALUE); sb.append('\"'); } case 30: break; case 9: { return COLON; } case 31: break; case 2: { sawWhitespace = true; } case 32: break; case 15: { yybegin(YYINITIAL); return STYLE_END; } case 33: break; case 17: { yybegin(YYSTYLE); return SEMICOLON; } case 34: break; case 14: { yybegin(YYVALUE); sb.setLength(0); return COLON; } case 35: break; case 7: { return GT; } case 36: break; case 11: { yybegin(YYSTYLE); return STYLE_BEGIN; } case 37: break; case 13: { return IDENT; } case 38: break; case 1: { unexpected(); } case 39: break; case 5: { return DOT; } case 40: break; case 8: { return HASH; } case 41: break; case 12: { /* ignore */ } case 42: break; default: if (zzInput == YYEOF && zzStartRead == zzCurrentPos) { zzAtEOF = true; { return EOF; } } else { zzScanError("Error: could not match input"); } } } } }
/* * Copyright 2014 Attila Szegedi, Daniel Dekany, Jonathan Revusky * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package freemarker.core; import java.util.Date; import freemarker.ext.beans.BeanModel; import freemarker.ext.beans._BeansAPI; import freemarker.template.TemplateBooleanModel; import freemarker.template.TemplateCollectionModel; import freemarker.template.TemplateDateModel; import freemarker.template.TemplateException; import freemarker.template.TemplateModel; import freemarker.template.TemplateModelException; import freemarker.template.TemplateNumberModel; import freemarker.template.TemplateScalarModel; import freemarker.template.TemplateSequenceModel; /** * Internally used static utilities for evaluation expressions. */ class EvalUtil { static final int CMP_OP_EQUALS = 1; static final int CMP_OP_NOT_EQUALS = 2; static final int CMP_OP_LESS_THAN = 3; static final int CMP_OP_GREATER_THAN = 4; static final int CMP_OP_LESS_THAN_EQUALS = 5; static final int CMP_OP_GREATER_THAN_EQUALS = 6; // If you add a new operator here, update the "compare" and "cmpOpToString" methods! // Prevents instantination. private EvalUtil() { } /** * @param expr {@code null} is allowed, but may results in less helpful error messages * @param env {@code null} is allowed, but may results in lower performance in classic-compatible mode */ static String modelToString(TemplateScalarModel model, Expression expr, Environment env) throws TemplateModelException { String value = model.getAsString(); if(value == null) { if (env == null) env = Environment.getCurrentEnvironment(); if (env != null && env.isClassicCompatible()) { return ""; } else { throw newModelHasStoredNullException(String.class, model, expr); } } return value; } /** * @param expr {@code null} is allowed, but may results in less helpful error messages */ static Number modelToNumber(TemplateNumberModel model, Expression expr) throws TemplateModelException { Number value = model.getAsNumber(); if(value == null) throw newModelHasStoredNullException(Number.class, model, expr); return value; } /** * @param expr {@code null} is allowed, but may results in less helpful error messages */ static Date modelToDate(TemplateDateModel model, Expression expr) throws TemplateModelException { Date value = model.getAsDate(); if(value == null) throw newModelHasStoredNullException(Date.class, model, expr); return value; } /** Signals the buggy case where we have a non-null model, but its wraps a null. */ private static TemplateModelException newModelHasStoredNullException( Class expected, TemplateModel model, Expression expr) { return new _TemplateModelException(expr, _TemplateModelException.modelHasStoredNullDescription(expected, model)); } /** * Compares two expressions according the rules of the FTL comparator operators. * * @param leftExp not {@code null} * @param operator one of the {@code COMP_OP_...} constants, like {@link #CMP_OP_EQUALS}. * @param operatorString can be null {@code null}; the actual operator used, used for more accurate error message. * @param rightExp not {@code null} * @param env {@code null} is tolerated, but should be avoided */ static boolean compare( Expression leftExp, int operator, String operatorString, Expression rightExp, Expression defaultBlamed, Environment env) throws TemplateException { TemplateModel ltm = leftExp.eval(env); TemplateModel rtm = rightExp.eval(env); return compare( ltm, leftExp, operator, operatorString, rtm, rightExp, defaultBlamed, false, false, false, env); } /** * Compares values according the rules of the FTL comparator operators; if the {@link Expression}-s are * accessible, use {@link #compare(Expression, int, String, Expression, Expression, Environment)} instead, as * that gives better error messages. * * @param leftValue maybe {@code null}, which will usually cause the appropriate {@link TemplateException}. * @param operator one of the {@code COMP_OP_...} constants, like {@link #CMP_OP_EQUALS}. * @param rightValue maybe {@code null}, which will usually cause the appropriate {@link TemplateException}. * @param env {@code null} is tolerated, but should be avoided */ static boolean compare( TemplateModel leftValue, int operator, TemplateModel rightValue, Environment env) throws TemplateException { return compare( leftValue, null, operator, null, rightValue, null, null, false, false, false, env); } /** * Same as {@link #compare(TemplateModel, int, TemplateModel, Environment)}, but if the two types are incompatible, * they are treated as non-equal instead of throwing an exception. Comparing dates of different types will * still throw an exception, however. */ static boolean compareLenient( TemplateModel leftValue, int operator, TemplateModel rightValue, Environment env) throws TemplateException { return compare( leftValue, null, operator, null, rightValue, null, null, true, false, false, env); } private static final String VALUE_OF_THE_COMPARISON_IS_UNKNOWN_DATE_LIKE = "value of the comparison is a date-like value where " + "it's not known if it's a date (no time part), time, or date-time, " + "and thus can't be used in a comparison."; /** * @param leftExp {@code null} is allowed, but may results in less helpful error messages * @param operator one of the {@code COMP_OP_...} constants, like {@link #CMP_OP_EQUALS}. * @param operatorString can be null {@code null}; the actual operator used, used for more accurate error message. * @param rightExp {@code null} is allowed, but may results in less helpful error messages * @param defaultBlamed {@code null} allowed; the expression who to which error will point to if something goes * wrong that is not specific to the left or right side expression, or if that expression is {@code null}. * @param typeMismatchMeansNotEqual If the two types are incompatible, they are treated as non-equal instead * of throwing an exception. Comparing dates of different types will still throw an exception, however. * @param leftNullReturnsFalse if {@code true}, a {@code null} left value will not cause exception, but make the * expression {@code false}. * @param rightNullReturnsFalse if {@code true}, a {@code null} right value will not cause exception, but make the * expression {@code false}. */ static boolean compare( TemplateModel leftValue, Expression leftExp, int operator, String operatorString, TemplateModel rightValue, Expression rightExp, Expression defaultBlamed, boolean typeMismatchMeansNotEqual, boolean leftNullReturnsFalse, boolean rightNullReturnsFalse, Environment env) throws TemplateException { if (leftValue == null) { if (env != null && env.isClassicCompatible()) { leftValue = TemplateScalarModel.EMPTY_STRING; } else { if (leftNullReturnsFalse) { return false; } else { if (leftExp != null) { throw InvalidReferenceException.getInstance(leftExp, env); } else { throw new _MiscTemplateException(defaultBlamed, env, "The left operand of the comparison was undefined or null."); } } } } if (rightValue == null) { if (env != null && env.isClassicCompatible()) { rightValue = TemplateScalarModel.EMPTY_STRING; } else { if (rightNullReturnsFalse) { return false; } else { if (rightExp != null) { throw InvalidReferenceException.getInstance(rightExp, env); } else { throw new _MiscTemplateException(defaultBlamed, env, "The right operand of the comparison was undefined or null."); } } } } final int cmpResult; if (leftValue instanceof TemplateNumberModel && rightValue instanceof TemplateNumberModel) { Number leftNum = EvalUtil.modelToNumber((TemplateNumberModel) leftValue, leftExp); Number rightNum = EvalUtil.modelToNumber((TemplateNumberModel) rightValue, rightExp); ArithmeticEngine ae = env != null ? env.getArithmeticEngine() : (leftExp != null ? leftExp.getTemplate().getArithmeticEngine() : ArithmeticEngine.BIGDECIMAL_ENGINE); try { cmpResult = ae.compareNumbers(leftNum, rightNum); } catch (RuntimeException e) { throw new _MiscTemplateException(defaultBlamed, e, env, new Object[] { "Unexpected error while comparing two numbers: ", e }); } } else if (leftValue instanceof TemplateDateModel && rightValue instanceof TemplateDateModel) { TemplateDateModel leftDateModel = (TemplateDateModel) leftValue; TemplateDateModel rightDateModel = (TemplateDateModel) rightValue; int leftDateType = leftDateModel.getDateType(); int rightDateType = rightDateModel.getDateType(); if (leftDateType == TemplateDateModel.UNKNOWN || rightDateType == TemplateDateModel.UNKNOWN) { String sideName; Expression sideExp; if (leftDateType == TemplateDateModel.UNKNOWN) { sideName = "left"; sideExp = leftExp; } else { sideName = "right"; sideExp = rightExp; } throw new _MiscTemplateException(sideExp != null ? sideExp : defaultBlamed, env, new Object[] { "The ", sideName, " ", VALUE_OF_THE_COMPARISON_IS_UNKNOWN_DATE_LIKE }); } if (leftDateType != rightDateType) { ; throw new _MiscTemplateException(defaultBlamed, env, new Object[] { "Can't compare dates of different types. Left date type is ", TemplateDateModel.TYPE_NAMES.get(leftDateType), ", right date type is ", TemplateDateModel.TYPE_NAMES.get(rightDateType), "." }); } Date leftDate = EvalUtil.modelToDate(leftDateModel, leftExp); Date rightDate = EvalUtil.modelToDate(rightDateModel, rightExp); cmpResult = leftDate.compareTo(rightDate); } else if (leftValue instanceof TemplateScalarModel && rightValue instanceof TemplateScalarModel) { if (operator != CMP_OP_EQUALS && operator != CMP_OP_NOT_EQUALS) { throw new _MiscTemplateException(defaultBlamed, env, new Object[] { "Can't use operator \"", cmpOpToString(operator, operatorString), "\" on string values." }); } String leftString = EvalUtil.modelToString((TemplateScalarModel) leftValue, leftExp, env); String rightString = EvalUtil.modelToString((TemplateScalarModel) rightValue, rightExp, env); // FIXME NBC: Don't use the Collator here. That's locale-specific, but ==/!= should not be. cmpResult = env.getCollator().compare(leftString, rightString); } else if (leftValue instanceof TemplateBooleanModel && rightValue instanceof TemplateBooleanModel) { if (operator != CMP_OP_EQUALS && operator != CMP_OP_NOT_EQUALS) { throw new _MiscTemplateException(defaultBlamed, env, new Object[] { "Can't use operator \"", cmpOpToString(operator, operatorString), "\" on boolean values." }); } boolean leftBool = ((TemplateBooleanModel) leftValue).getAsBoolean(); boolean rightBool = ((TemplateBooleanModel) rightValue).getAsBoolean(); cmpResult = (leftBool ? 1 : 0) - (rightBool ? 1 : 0); } else if (env.isClassicCompatible()) { String leftSting = leftExp.evalAndCoerceToString(env); String rightString = rightExp.evalAndCoerceToString(env); cmpResult = env.getCollator().compare(leftSting, rightString); } else { if (typeMismatchMeansNotEqual) { if (operator == CMP_OP_EQUALS) { return false; } else if (operator == CMP_OP_NOT_EQUALS) { return true; } // Falls through } throw new _MiscTemplateException(defaultBlamed, env, new Object[] { "Can't compare values of these types. ", "Allowed comparisons are between two numbers, two strings, two dates, or two booleans.\n", "Left hand operand is ", new _DelayedAOrAn(new _DelayedFTLTypeDescription(leftValue)), ".\n", "Right hand operand is ", new _DelayedAOrAn(new _DelayedFTLTypeDescription(rightValue)), "." }); } switch (operator) { case CMP_OP_EQUALS: return cmpResult == 0; case CMP_OP_NOT_EQUALS: return cmpResult != 0; case CMP_OP_LESS_THAN: return cmpResult < 0; case CMP_OP_GREATER_THAN: return cmpResult > 0; case CMP_OP_LESS_THAN_EQUALS: return cmpResult <= 0; case CMP_OP_GREATER_THAN_EQUALS: return cmpResult >= 0; default: throw new BugException("Unsupported comparator operator code: " + operator); } } private static String cmpOpToString(int operator, String operatorString) { if (operatorString != null) { return operatorString; } else { switch (operator) { case CMP_OP_EQUALS: return "equals"; case CMP_OP_NOT_EQUALS: return "not-equals"; case CMP_OP_LESS_THAN: return "less-than"; case CMP_OP_GREATER_THAN: return "greater-than"; case CMP_OP_LESS_THAN_EQUALS: return "less-than-equals"; case CMP_OP_GREATER_THAN_EQUALS: return "greater-than-equals"; default: return "???"; } } } static String coerceModelToString(TemplateModel tm, Expression exp, String seqHint, Environment env) throws TemplateException { if (tm instanceof TemplateNumberModel) { return env.formatNumber(modelToNumber((TemplateNumberModel) tm, exp)); } else if (tm instanceof TemplateDateModel) { return env.formatDate((TemplateDateModel) tm, exp); } else if (tm instanceof TemplateScalarModel) { return modelToString((TemplateScalarModel) tm, exp, env); } else if(tm == null) { if (env.isClassicCompatible()) { return ""; } else { if (exp != null) { throw InvalidReferenceException.getInstance(exp, env); } else { throw new InvalidReferenceException( "Null/missing value (no more informatoin avilable)", env); } } } else if (tm instanceof TemplateBooleanModel) { // This should be before TemplateScalarModel, but automatic boolean-to-string is only non-error since 2.3.20 // (and before that when classic_compatible was true), so to keep backward compatibility we couldn't insert // this before TemplateScalarModel. boolean booleanValue = ((TemplateBooleanModel) tm).getAsBoolean(); int compatMode = env.getClassicCompatibleAsInt(); if (compatMode == 0) { return env.formatBoolean(booleanValue, false); } else { if (compatMode == 1) { return booleanValue ? MiscUtil.C_TRUE : ""; } else if (compatMode == 2) { if (tm instanceof BeanModel) { // In 2.1, bean-wrapped booleans where strings, so that has overridden the boolean behavior: return _BeansAPI.getAsClassicCompatibleString((BeanModel) tm); } else { return booleanValue ? MiscUtil.C_TRUE : ""; } } else { throw new BugException("Unsupported classic_compatible variation: " + compatMode); } } } else { if (env.isClassicCompatible() && tm instanceof BeanModel) { return _BeansAPI.getAsClassicCompatibleString((BeanModel) tm); } if (seqHint != null && (tm instanceof TemplateSequenceModel || tm instanceof TemplateCollectionModel)) { throw new NonStringException(exp, tm, seqHint, env); } else { throw new NonStringException(exp, tm, env); } } } }
// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.adwords.axis.utility.extension.util; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import java.util.List; /** * The SelectorField class contains all the possible fields that each service * can use. * * <p>https://developers.google.com/adwords/api/docs/appendix/selectorfields * * @author Julian Toledo */ public final class SelectorFields { /** * Specific FieldType for the API Object/Service fields. */ public interface FieldType<T> { public String getField(); public boolean getFilterable(); } /** * Helper method that generates the String[] * needed by the AdWords Services from the constant list. * * @param enums The list of constant Enums of the <T> type of object * that the Service works with, for example Campaign, AdGroup, etc. * @return the String[] needed by the AdWords Services */ public static <T extends FieldType<?>> String[] getFields(List<T> enums) { return Lists.transform(enums, new Function<FieldType<?>, String>() { public String apply(FieldType<?> selectorField) { return selectorField.getField(); } }).toArray(new String[] {}); } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.mcm.ManagedCustomer}. */ public enum ManagedCustomer implements FieldType<com.google.api.ads.adwords.axis.v201409.mcm.ManagedCustomer> { CAN_MANAGE_CLIENTS("CanManageClients", true), COMPANY_NAME("CompanyName", true), CURRENCY_CODE("CurrencyCode", true), CUSTOMER_ID("CustomerId", true), NAME("Name", true), TEST_ACCOUNT("TestAccount", false); private String field; private boolean filterable; private ManagedCustomer(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<ManagedCustomer> all() { return ImmutableList.copyOf(SelectorFields.ManagedCustomer.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.Campaign}. */ public enum Campaign implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.Campaign> { AD_SERVING_OPTIMIZATION_STATUS("AdServingOptimizationStatus", false), AMOUNT("Amount", true), BID_CEILING("BidCeiling", false), BID_TYPE("BidType", true), BIDDING_STRATEGY_TYPE("BiddingStrategyType", true), BUDGET_ID("BudgetId", true), BUDGET_NAME("BudgetName", true), BUDGET_REFERENCE_COUNT("BudgetReferenceCount", false), BUDGET_STATUS("BudgetStatus", true), DELIVERY_METHOD("DeliveryMethod", false), ELIGIBLE("Eligible", false), END_DATE("EndDate", true), ENHANCED_CPC_ENABLED("EnhancedCpcEnabled", true), FREQUENCY_CAP_MAX_IMPRESSIONS("FrequencyCapMaxImpressions", true), ID("Id", true), IS_BUDGET_EXPLICITLY_SHARED("IsBudgetExplicitlyShared", true), LABELS("Labels", true), LEVEL("Level", true), NAME("Name", true), PERIOD("Period", false), PRICING_MODE("PricingMode", true), REJECTION_REASONS("RejectionReasons", false), SERVING_STATUS("ServingStatus", true), SETTINGS("Settings", false), START_DATE("StartDate", true), STATUS("Status", true), TARGET_CONTENT_NETWORK("TargetContentNetwork", true), TARGET_GOOGLE_SEARCH("TargetGoogleSearch", true), TARGET_PARTNER_SEARCH_NETWORK("TargetPartnerSearchNetwork", true), TARGET_SEARCH_NETWORK("TargetSearchNetwork", true), TIME_UNIT("TimeUnit", true); private String field; private boolean filterable; private Campaign(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<Campaign> all() { return ImmutableList.copyOf(SelectorFields.Campaign.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.AdGroupAd}. */ public enum AdGroupAd implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.AdGroupAd> { ADGROUPAD_TRADEMARK_DISAPPROVED("AdGroupAdTrademarkDisapproved", false), ADGROUP_CREATIVE_APPROVAL_STATUS("AdGroupCreativeApprovalStatus", true), ADGROUP_ID("AdGroupId", true), ADVERTISING_ID("AdvertisingId", true), CREATION_TIME("CreationTime", true), DESCRIPTION1("Description1", true), DESCRIPTION2("Description2", true), DEVICE_PREFERENCE("DevicePreference", true), DIMENSIONS("Dimensions", false), DISPLAY_URL("DisplayUrl", true), EXPANDING_DIRECTIONS("ExpandingDirections", false), EXPERIMENT_DATA_STATUS("ExperimentDataStatus", false), EXPERIMENT_DELTA_STATUS("ExperimentDeltaStatus", false), EXPERIMENT_ID("ExperimentId", true), FILE_SIZE("FileSize", true), HEADLINE("Headline", true), HEIGHT("Height", true), ID("Id", true), IMAGE_CREATIVE_NAME("ImageCreativeName", true), INDUSTRY_STANDARD_COMMERCIAL_IDENTIFIER("IndustryStandardCommercialIdentifier", true), IS_COOKIE_TARGETED("IsCookieTargeted", false), IS_TAGGED("IsTagged", false), IS_USER_INTEREST_TARGETED("IsUserInterestTargeted", false), LABELS("Labels", true), MEDIA_ID("MediaId", true), MIME_TYPE("MimeType", true), MOBILEAD_BUSINESS_NAME("MobileAdBusinessName", false), MOBILEAD_COUNTRY_CODE("MobileAdCountryCode", false), MOBILEAD_DESCRIPTION("MobileAdDescription", false), MOBILEAD_HEADLINE("MobileAdHeadline", false), MOBILEAD_MARKUP_LANGUAGES("MobileAdMarkupLanguages", false), MOBILEAD_MOBILE_CARRIERS("MobileAdMobileCarriers", false), MOBILEAD_PHONE_NUMBER("MobileAdPhoneNumber", false), MOBILEIMAGEAD_MARKUP_LANGUAGES("MobileImageAdMarkupLanguages", false), MOBILEIMAGEAD_MOBILE_CARRIERS("MobileImageAdMobileCarriers", false), PROMOTION_LINE("PromotionLine", true), READY_TO_PLAY_ON_THE_WEB("ReadyToPlayOnTheWeb", false), REFERENCE_ID("ReferenceId", true), RICHMEDIAAD_CERTIFIED_VENDOR_FORMAT_ID("RichMediaAdCertifiedVendorFormatId", false), RICHMEDIAAD_DURATION("RichMediaAdDuration", false), RICHMEDIAAD_IMPRESSION_BEACON_URL("RichMediaAdImpressionBeaconUrl", false), RICHMEDIAAD_NAME("RichMediaAdName", false), RICHMEDIAAD_SNIPPET("RichMediaAdSnippet", false), RICHMEDIAAD_SOURCE_URL("RichMediaAdSourceUrl", false), RICHMEDIAAD_TYPE("RichMediaAdType", false), SOURCE_URL("SourceUrl", true), STATUS("Status", true), TEMPLATEAD_DURATION("TemplateAdDuration", false), TEMPLATEAD_NAME("TemplateAdName", false), TEMPLATEAD_UNIONID("TemplateAdUnionId", false), TEMPLATE_ELEMENT_FIELD_NAME("TemplateElementFieldName", false), TEMPLATE_ELEMENT_FIELD_TEXT("TemplateElementFieldText", false), TEMPLATE_ELEMENT_FIELD_TYPE("TemplateElementFieldType", false), TEMPLATE_ID("TemplateId", false), UNIQUE_NAME("UniqueName", false), URL("Url", true), URLS("Urls", false), VIDEO_TYPES("VideoTypes", false), WIDTH("Width", true), YOUTUBEVIDEO_ID_STRING("YouTubeVideoIdString", true); private String field; private boolean filterable; private AdGroupAd(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<AdGroupAd> all() { return ImmutableList.copyOf(SelectorFields.AdGroupAd.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.AdGroupCriterion}. */ public enum AdGroupCriterion implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.AdGroupCriterion> { ADGROUP_ID("AdGroupId", true), AGE_RANGE_TYPE("AgeRangeType", false), APPROVAL_STATUS("ApprovalStatus", true), ARGUMENT("Argument", false), BID_TYPE("BidType", true), BIDDING_STRATEGY_TYPE("BiddingStrategyType", true), CPC_BID("CpcBid", true), CPC_BID_SOURCE("CpcBidSource", true), CPM_BID("CpmBid", true), CPM_BID_SOURCE("CpmBidSource", true), CRITERIA_COVERAGE("CriteriaCoverage", false), CRITERIA_SAMPLES("CriteriaSamples", false), CRITERIA_TYPE("CriteriaType", true), CRITERION_USE("CriterionUse", true), DESTINATION_URL("DestinationUrl", true), DISPLAY_NAME("DisplayName", true), ENHANCED_CPC_ENABLED("EnhancedCpcEnabled", true), EXPERIMENT_BID_MULTIPLIER("ExperimentBidMultiplier", true), EXPERIMENT_DATA_STATUS("ExperimentDataStatus", false), EXPERIMENT_DELTA_STATUS("ExperimentDeltaStatus", true), EXPERIMENT_ID("ExperimentId", true), FIRST_PAGE_CPC("FirstPageCpc", true), GENDER_TYPE("GenderType", false), ID("Id", true), IS_KEYWORD_AD_RELEVANCE_ACCEPTABLE("IsKeywordAdRelevanceAcceptable", true), IS_LANDING_PAGE_LATENCY_ACCEPTABLE("IsLandingPageLatencyAcceptable", true), IS_LANDING_PAGE_QUALITY_ACCEPTABLE("IsLandingPageQualityAcceptable", true), KEYWORD_MATCH_TYPE("KeywordMatchType", true), KEYWORD_TEXT("KeywordText", true), LABELS("Labels", true), MOBILE_APP_CATEGORY_ID("MobileAppCategoryId", false), OPERAND("Operand", false), PARAMETER("Parameter", true), // Beta PATH("Path", true), PERCENT_CPA_BID("PercentCpaBid", true), PERCENT_CPA_BID_SOURCE("PercentCpaBidSource", true), PLACEMENT_URL("PlacementUrl", true), QUALITY_SCORE("QualityScore", true), STATUS("Status", true), SYSTEM_SERVING_STATUS("SystemServingStatus", true), TEXT("Text", false), TOP_OF_PAGE_CPC("TopOfPageCpc", true), // Beta USER_INTEREST_ID("UserInterestId", false), USER_INTEREST_NAME("UserInterestName", false), USER_LIST_ID("UserListId", false), USER_LIST_MEMBERSHIP_STATUS("UserListMembershipStatus", true), USER_LIST_NAME("UserListName", false), VERTICAL_ID("VerticalId", false), VERTICAL_PARENT_ID("VerticalParentId", false); private String field; private boolean filterable; private AdGroupCriterion(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } // Returns all fields that are not marked as beta public static ImmutableList<AdGroupCriterion> all(boolean includeBetaFields) { if (includeBetaFields) { return ImmutableList.copyOf(SelectorFields.AdGroupCriterion.values()); } else { return ImmutableList.<AdGroupCriterion>builder() .add(ADGROUP_ID) .add(AGE_RANGE_TYPE) .add(APPROVAL_STATUS) .add(ARGUMENT) .add(BID_TYPE) .add(BIDDING_STRATEGY_TYPE) .add(CPC_BID) .add(CPC_BID_SOURCE) .add(CPM_BID) .add(CPM_BID_SOURCE) .add(CRITERIA_COVERAGE) .add(CRITERIA_SAMPLES) .add(CRITERIA_TYPE) .add(CRITERION_USE) .add(DESTINATION_URL) .add(DISPLAY_NAME) .add(ENHANCED_CPC_ENABLED) .add(EXPERIMENT_BID_MULTIPLIER) .add(EXPERIMENT_DATA_STATUS) .add(EXPERIMENT_DELTA_STATUS) .add(EXPERIMENT_ID) .add(FIRST_PAGE_CPC) .add(GENDER_TYPE) .add(ID) .add(IS_KEYWORD_AD_RELEVANCE_ACCEPTABLE) .add(IS_LANDING_PAGE_LATENCY_ACCEPTABLE) .add(IS_LANDING_PAGE_QUALITY_ACCEPTABLE) .add(KEYWORD_MATCH_TYPE) .add(KEYWORD_TEXT) .add(LABELS) .add(MOBILE_APP_CATEGORY_ID) .add(OPERAND) .add(PATH) .add(PERCENT_CPA_BID) .add(PERCENT_CPA_BID_SOURCE) .add(PLACEMENT_URL) .add(QUALITY_SCORE) .add(STATUS) .add(SYSTEM_SERVING_STATUS) .add(TEXT) .add(USER_INTEREST_ID) .add(USER_INTEREST_NAME) .add(USER_LIST_ID) .add(USER_LIST_MEMBERSHIP_STATUS) .add(USER_LIST_NAME) .add(VERTICAL_ID) .add(VERTICAL_PARENT_ID).build(); } } public static String[] allFields(boolean includeBetaFields) { return getFields(all(includeBetaFields)); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.AdGroup}. */ public enum AdGroup implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.AdGroup> { BID_TYPE("BidType", true), BIDDING_STRATEGY_TYPE("BiddingStrategyType", true), CAMPAIGN_ID("CampaignId", true), CAMPAIGN_NAME("CampaignName", true), CONTENT_BID_CRITERION_TYPE_GROUP("ContentBidCriterionTypeGroup", true), CPC_BID("CpcBid", true), CPM_BID("CpmBid", true), ENHANCED_CPC_ENABLED("EnhancedCpcEnabled", true), EXPERIMENT_DELTA_STATUS("ExperimentDeltaStatus", false), EXPERIMENT_ID("ExperimentId", true), ID("Id", true), LABELS("Labels", true), MAX_CONTENT_CPC_MULTIPLIER("MaxContentCpcMultiplier", true), MAX_CPC_MULTIPLIER("MaxCpcMultiplier", true), MAX_CPM_MULTIPLIER("MaxCpmMultiplier", true), NAME("Name", true), PERCENT_CPA_BID("PercentCpaBid", true), SETTINGS("Settings", false), STATUS("Status", true), TARGET_CPA_BID("TargetCpaBid", true); private String field; private boolean filterable; private AdGroup(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<AdGroup> all() { return ImmutableList.copyOf(SelectorFields.AdGroup.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.AdGroupFeed}. */ public enum AdGroupFeed implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.AdGroupFeed> { ADGROUP_ID("AdGroupId", true), FEED_ID("FeedId", true), MATCHING_FUNCTION("MatchingFunction", false), PLACEHOLDER_TYPES("PlaceholderTypes", true), STATUS("Status", true); private String field; private boolean filterable; private AdGroupFeed(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<AdGroupFeed> all() { return ImmutableList.copyOf(SelectorFields.AdGroupFeed.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.AdParam}. */ public enum AdParam implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.AdParam> { ADGROUP_ID("AdGroupId", true), CRITERION_ID("CriterionId", true), INSERTION_TEXT("InsertionText", false), PARAM_INDEX("ParamIndex", false); private String field; private boolean filterable; private AdParam(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<AdParam> all() { return ImmutableList.copyOf(SelectorFields.AdParam.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.Budget}. */ public enum Budget implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.Budget> { AMOUNT("Amount", true), BUDGET_ID("BudgetId", true), BUDGET_NAME("BudgetName", true), BUDGET_REFERENCE_COUNT("BudgetReferenceCount", false), BUDGET_STATUS("BudgetStatus", true), DELIVERY_METHOD("DeliveryMethod", false), IS_BUDGET_EXPLICITLY_SHARED("IsBudgetExplicitlyShared", false), PERIOD("Period", true); private String field; private boolean filterable; private Budget(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<Budget> all() { return ImmutableList.copyOf(SelectorFields.Budget.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.billing.BudgetOrder}. */ public enum BudgetOrder implements FieldType<com.google.api.ads.adwords.axis.v201409.billing.BudgetOrder> { BILLING_ACCOUNT_ID("BillingAccountId", true), BILLING_ACCOUNT_NAME("BillingAccountName", true), //New Billing BUDGET_ORDER_NAME("BudgetOrderName", true), //New Billing END_DATE_TIME("EndDateTime", true), ID("Id", true), LAST_REQUEST("LastRequest", false), //New Billing PO_NUMBER("PoNumber", true), //New Billing PRIMARY_BILLING_ID("PrimaryBillingId", true), //New Billing SECONDARY_BILLING_ID("SecondaryBillingId", true), //New Billing SPENDING_LIMIT("SpendingLimit", true), START_DATE_TIME("StartDateTime", true); private String field; private boolean filterable; private BudgetOrder(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<BudgetOrder> all(boolean newBilling) { if (newBilling) { return ImmutableList.copyOf(SelectorFields.BudgetOrder.values()); } else { return ImmutableList.of( BILLING_ACCOUNT_ID, END_DATE_TIME, ID, SPENDING_LIMIT, START_DATE_TIME); } } public static String[] allFields(boolean newBilling) { return getFields(all(newBilling)); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.CampaignAdExtension}. */ public enum CampaignAdExtension implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.CampaignAdExtension> { AD_EXTENSION_ID("AdExtensionId", true), ADDRESS("Address", false), ADVERTISER_NAME("AdvertiserName", false), APP_ID("AppId", false), APP_STORE("AppStore", false), APPROVAL_STATUS("ApprovalStatus", true), ARGUMENT("Argument", false), CAMPAIGN_ID("CampaignId", true), COMPANY_NAME("CompanyName", false), COUNTRY_CODE("CountryCode", false), DESTINATION_URL("DestinationUrl", false), DISPLAY_TEXT("DisplayText", false), DOMAIN_NAME("DomainName", false), EMAIL("Email", false), ENCODED_LOCATION("EncodedLocation", false), GEO_POINT("GeoPoint", false), GOOGLE_BASE_CUSTOMER_ID("GoogleBaseCustomerId", false), ICON_MEDIA_ID("IconMediaId", false), IMAGE_MEDIA_ID("ImageMediaId", false), IN_APP_LINK_TEXT("InAppLinkText", false), IN_APP_LINK_URL("InAppLinkUrl", false), IS_CALL_ONLY("IsCallOnly", false), IS_CALL_TRACKING_ENABLED("IsCallTrackingEnabled", false), // Beta LANGUAGE_CODE("LanguageCode", false), LOCATION_EXTENSION_SOURCE("LocationExtensionSource", true), OPERAND("Operand", false), PHONE_NUMBER("PhoneNumber", false), SHOULD_SYNC_URL("ShouldSyncUrl", false), STATUS("Status", true); private String field; private boolean filterable; private CampaignAdExtension(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } // Returns all fields that are not marked as beta public static ImmutableList<CampaignAdExtension> all(boolean includeBetaFields) { if (includeBetaFields) { return ImmutableList.copyOf(SelectorFields.CampaignAdExtension.values()); } else { return ImmutableList.<CampaignAdExtension>builder() .add(AD_EXTENSION_ID) .add(ADDRESS) .add(ADVERTISER_NAME) .add(APP_ID) .add(APP_STORE) .add(APPROVAL_STATUS) .add(ARGUMENT) .add(CAMPAIGN_ID) .add(DESTINATION_URL) .add(DISPLAY_TEXT) .add(DOMAIN_NAME) .add(EMAIL) .add(ENCODED_LOCATION) .add(GEO_POINT) .add(GOOGLE_BASE_CUSTOMER_ID) .add(ICON_MEDIA_ID) .add(IMAGE_MEDIA_ID) .add(IN_APP_LINK_TEXT) .add(IN_APP_LINK_URL) .add(IS_CALL_ONLY) .add(LANGUAGE_CODE) .add(LOCATION_EXTENSION_SOURCE) .add(OPERAND) .add(PHONE_NUMBER) .add(SHOULD_SYNC_URL) .add(STATUS).build(); } } public static String[] allFields(boolean includeBetaFields) { return getFields(all(includeBetaFields)); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.CampaignCriterion}. */ public enum CampaignCriterion implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.CampaignCriterion> { ADDRESS("Address", false), AGE_RANGE_TYPE("AgeRangeType", false), ARGUMENT("Argument", false), BID_MODIFIER("BidModifier", true), CAMPAIGN_ID("CampaignId", true), CARRIER_COUNTRY_CODE("CarrierCountryCode", false), CARRIER_NAME("CarrierName", false), CONTENT_LABEL_TYPE("ContentLabelType", true), CRITERIA_TYPE("CriteriaType", true), DAY_OF_WEEK("DayOfWeek", false), DEVICE_NAME("DeviceName", false), DEVICE_TYPE("DeviceType", false), DISPLAY_NAME("DisplayName", true), DISPLAY_TYPE("DisplayType", false), END_HOUR("EndHour", false), END_MINUTE("EndMinute", false), GENDER_TYPE("GenderType", false), GEO_POINT("GeoPoint", false), ID("Id", true), IS_NEGATIVE("IsNegative", true), KEYWORD_MATCH_TYPE("KeywordMatchType", true), KEYWORD_TEXT("KeywordText", true), LANGUAGE_CODE("LanguageCode", false), LANGUAGE_NAME("LanguageName", false), LOCATION_NAME("LocationName", true), MANUFACTURER_NAME("ManufacturerName", false), MOBILE_APP_CATEGORY_ID("MobileAppCategoryId", false), OPERAND("Operand", false), OPERATING_SYSTEM_NAME("OperatingSystemName", false), OPERATOR_TYPE("OperatorType", false), OS_MAJOR_VERSION("OsMajorVersion", false), OS_MINOR_VERSION("OsMinorVersion", false), PARENT_LOCATIONS("ParentLocations", false), PATH("Path", false), PLACEMENT_URL("PlacementUrl", false), PLATFORM_NAME("PlatformName", false), RADIUS_DISTANCE_UNITS("RadiusDistanceUnits", false), RADIUS_IN_UNITS("RadiusInUnits", false), START_HOUR("StartHour", false), START_MINUTE("StartMinute", false), TARGETING_STATUS("TargetingStatus", false), TEXT("Text", false), USER_INTEREST_ID("UserInterestId", false), USER_INTEREST_NAME("UserInterestName", false), USER_LIST_ID("UserListId", false), USER_LIST_MEMBERSHIP_STATUS("UserListMembershipStatus", true), USER_LIST_NAME("UserListName", false), VERTICAL_ID("VerticalId", false), VERTICAL_PARENT_ID("VerticalParentId", false), VERTICES("Vertices", false); private String field; private boolean filterable; private CampaignCriterion(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<CampaignCriterion> all() { return ImmutableList.copyOf(SelectorFields.CampaignCriterion.values()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.CampaignFeed}. */ public enum CampaignFeed implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.CampaignFeed> { CAMPAIGN_ID("CampaignId", true), FEED_ID("FeedId", true), MATCHING_FUNCTION("MatchingFunction", false), PLACEHOLDER_TYPES("PlaceholderTypes", true), STATUS("Status", true); private String field; private boolean filterable; private CampaignFeed(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<CampaignFeed> all() { return ImmutableList.copyOf(SelectorFields.CampaignFeed.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.ConversionTracker}. */ public enum ConversionTracker implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.ConversionTracker> { APP_ID("AppId", true), APP_PLATFORM("AppPlatform", true), BACKGROUND_COLOR("BackgroundColor", true), CATEGORY("Category", true), CONVERSION_PAGE_LANGUAGE("ConversionPageLanguage", true), CONVERSION_VALUE("ConversionValue", true), ID("Id", true), IS_PRODUCT_ADS_CHARGEABLE("IsProductAdsChargeable", true), MARKUP_LANGUAGE("MarkupLanguage", true), MOST_RECENT_CONVERSION_DATE("MostRecentConversionDate", true), NAME("Name", true), NUM_CONVERSION_EVENTS("NumConversionEvents", true), NUM_CONVERTED_CLICKS("NumConvertedClicks", true), PRODUCT_ADS_CHARGEABLE_CONVERSION_WINDOW("ProductAdsChargeableConversionWindow", true), STATUS("Status", true), TEXT_FORMAT("TextFormat", true), TRACKING_CODE_TYPE("TrackingCodeType", true), VIEWTHROUGH_LOOKBACK_WINDOW("ViewthroughLookbackWindow", true); private String field; private boolean filterable; private ConversionTracker(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<ConversionTracker> all() { return ImmutableList.copyOf(SelectorFields.ConversionTracker.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.AdGroupBidLandscape}. */ public enum AdGroupBidLandscape implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.AdGroupBidLandscape> { ADGROUP_ID("AdGroupId", true), BID("Bid", true), CAMPAIGN_ID("CampaignId", true), END_DATE("EndDate", false), LANDSCAPE_CURRENT("LandscapeCurrent", true), LANDSCAPE_TYPE("LandscapeType", true), LOCAL_CLICKS("LocalClicks", true), LOCAL_COST("LocalCost", true), LOCAL_IMPRESSIONS("LocalImpressions", true), MARGINAL_CPC("MarginalCpc", true), PROMOTED_IMPRESSIONS("PromotedImpressions", true), START_DATE("StartDate", false); private String field; private boolean filterable; private AdGroupBidLandscape(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<AdGroupBidLandscape> all() { return ImmutableList.copyOf(SelectorFields.AdGroupBidLandscape.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.CriterionBidLandscape}. */ public enum CriterionBidLandscape implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.CriterionBidLandscape> { ADGROUP_ID("AdGroupId", true), BID("Bid", true), CAMPAIGN_ID("CampaignId", true), CRITERION_ID("CriterionId", true), END_DATE("EndDate", false), LOCAL_CLICKS("LocalClicks", true), LOCAL_COST("LocalCost", true), LOCAL_IMPRESSIONS("LocalImpressions", true), MARGINAL_CPC("MarginalCpc", true), PROMOTED_IMPRESSIONS("PromotedImpressions", true), START_DATE("StartDate", false); private String field; private boolean filterable; private CriterionBidLandscape(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<CriterionBidLandscape> all() { return ImmutableList.copyOf(SelectorFields.CriterionBidLandscape.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.Experiment}. */ public enum Experiment implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.Experiment> { ADGROUP_ADS_COUNT("AdGroupAdsCount", false), ADGROUP_CRITERIA_COUNT("AdGroupCriteriaCount", false), ADGROUPS_COUNT("AdGroupsCount", false), CAMPAIGN_ID("CampaignId", true), CONTROL_ID("ControlId", true), END_DATE_TIME("EndDateTime", true), ID("Id", true), LAST_MODIFIED_DATE_TIME("LastModifiedDateTime", true), NAME("Name", true), QUERY_PERCENTAGE("QueryPercentage", true), SERVING_STATUS("ServingStatus", false), START_DATE_TIME("StartDateTime", true), STATUS("Status", true); private String field; private boolean filterable; private Experiment(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<Experiment> all() { return ImmutableList.copyOf(SelectorFields.Experiment.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.Feed}. */ public enum Feed implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.Feed> { ATTRIBUTES("Attributes", false), FEED_STATUS("FeedStatus", true), ID("Id", true), NAME("Name", false), ORIGIN("Origin", true); private String field; private boolean filterable; private Feed(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<Feed> all() { return ImmutableList.copyOf(SelectorFields.Feed.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.FeedItem}. */ public enum FeedItem implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.FeedItem> { ATTRIBUTE_VALUES("AttributeValues", false), DEVICE_PREFERENCE("DevicePreference", true), END_TIME("EndTime", false), FEED_ID("FeedId", true), FEED_ITEM_ID("FeedItemId", true), START_TIME("StartTime", false), STATUS("Status", true), VALIDATION_DETAILS("ValidationDetails", false); private String field; private boolean filterable; private FeedItem(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<FeedItem> all() { return ImmutableList.copyOf(SelectorFields.FeedItem.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.FeedMapping}. */ public enum FeedMapping implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.FeedMapping> { ATTRIBUTE_FIELD_MAPPINGS("AttributeFieldMappings", true), FEED_ID("FeedId", true), FEED_MAPPING_ID("FeedMappingId", true), STATUS("Status", true); private String field; private boolean filterable; private FeedMapping(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<FeedMapping> all() { return ImmutableList.copyOf(SelectorFields.FeedMapping.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.LocationCriterion}. */ public enum LocationCriterion implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.LocationCriterion> { CANONICAL_NAME("CanonicalName", false), CRITERIA_TYPE("CriteriaType", true), DISPLAY_TYPE("DisplayType", false), ID("Id", true), LOCATION_NAME("LocationName", true), PARENT_LOCATIONS("ParentLocations", false), REACH("Reach", false), TARGETING_STATUS("TargetingStatus", false); private String field; private boolean filterable; private LocationCriterion(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<LocationCriterion> all() { return ImmutableList.copyOf(SelectorFields.LocationCriterion.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.Media}. */ public enum Media implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.Media> { ADVERTISING_ID("AdvertisingId", true), CREATION_TIME("CreationTime", true), DIMENSIONS("Dimensions", false), DURATION_MILLIS("DurationMillis", true), FILE_SIZE("FileSize", true), INDUSTRY_STANDARD_COMMERCIAL_IDENTIFIER("IndustryStandardCommercialIdentifier", true), MEDIA_ID("MediaId", true), MIME_TYPE("MimeType", true), NAME("Name", true), READY_TO_PLAY_ON_THE_WEB("ReadyToPlayOnTheWeb", false), REFERENCE_ID("ReferenceId", true), SOURCE_URL("SourceUrl", true), STREAMING_URL("StreamingUrl", false), TYPE("Type", true), URLS("Urls", false), YOUTUBE_VIDEO_ID_STRING("YouTubeVideoIdString", true); private String field; private boolean filterable; private Media(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<Media> all() { return ImmutableList.copyOf(SelectorFields.Media.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.rm.UserList}. */ public enum UserList implements FieldType<com.google.api.ads.adwords.axis.v201409.rm.UserList> { ACCESS_REASON("AccessReason", true), ACCOUNT_USER_LIST_STATUS("AccountUserListStatus", true), CONVERSION_TYPES("ConversionTypes", false), DESCRIPTION("Description", false), ID("Id", true), IS_READ_ONLY("IsReadOnly", false), MEMBERSHIP_LIFE_SPAN("MembershipLifeSpan", true), NAME("Name", true), RULES("Rules", false), SIZE("Size", true), SIZE_RANGE("SizeRange", false), STATUS("Status", true), LIST_TYPE("ListType", true); private String field; private boolean filterable; private UserList(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<UserList> all() { return ImmutableList.copyOf(SelectorFields.UserList.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.AdGroupBidModifier}. */ public enum AdGroupBidModifier implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.AdGroupBidModifier> { ADGROUP_ID("AdGroupId", true), BID_MODIFIER("BidModifier", true), BID_MODIFIER_SORUCE("BidModifierSource", true), CAMPAIGN_ID("CampaignId", true), CRITERIA_TYPE("CriteriaType", true), ID("Id", true), PLATFORM_NAME("PlatformName", false); private String field; private boolean filterable; private AdGroupBidModifier(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<AdGroupBidModifier> all() { return ImmutableList.copyOf(SelectorFields.AdGroupBidModifier.values()); } public static String[] allFields() { return getFields(all()); } } /** * SelectorFields for {@link com.google.api.ads.adwords.axis.v201409.cm.Label}. */ public enum Label implements FieldType<com.google.api.ads.adwords.axis.v201409.cm.Label> { ID("LabelId", true), NAME("LabelName", true), STATUS("LabelStatus", true); private String field; private boolean filterable; private Label(String field, boolean filterable) { this.field = field; this.filterable = filterable; } public String getField() { return field; } public boolean getFilterable() { return filterable; } public static ImmutableList<Label> all() { return ImmutableList.copyOf(SelectorFields.Label.values()); } public static String[] allFields() { return getFields(all()); } } }
/************************************************************************* * * * Open Dynamics Engine, Copyright (C) 2001,2002 Russell L. Smith. * * All rights reserved. Email: russ@q12.org Web: www.q12.org * * Open Dynamics Engine 4J, Copyright (C) 2009-2014 Tilmann Zaeschke * * All rights reserved. Email: ode4j@gmx.de Web: www.ode4j.org * * * * This library is free software; you can redistribute it and/or * * modify it under the terms of EITHER: * * (1) The GNU Lesser General Public License as published by the Free * * Software Foundation; either version 2.1 of the License, or (at * * your option) any later version. The text of the GNU Lesser * * General Public License is included with this library in the * * file LICENSE.TXT. * * (2) The BSD-style license that is included with this library in * * the file ODE-LICENSE-BSD.TXT and ODE4J-LICENSE-BSD.TXT. * * * * This library is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the files * * LICENSE.TXT, ODE-LICENSE-BSD.TXT and ODE4J-LICENSE-BSD.TXT for more * * details. * * * *************************************************************************/ package org.ode4j.ode; import org.ode4j.math.DVector3; /** * In real life a joint is something like a hinge, that is used to connect two * objects. * In ODE a joint is very similar: It is a relationship that is enforced between * two bodies so that they can only have certain positions and orientations * relative to each other. * This relationship is called a constraint -- the words joint and * constraint are often used interchangeably. * <p> * A joint has a set of parameters that can be set. These include: * <p> * * <li> dParamLoStop Low stop angle or position. Setting this to * -dInfinity (the default value) turns off the low stop. * For rotational joints, this stop must be greater than -pi to be * effective. * <li> dParamHiStop High stop angle or position. Setting this to * dInfinity (the default value) turns off the high stop. * For rotational joints, this stop must be less than pi to be * effective. * If the high stop is less than the low stop then both stops will * be ineffective. * <li> dParamVel Desired motor velocity (this will be an angular or * linear velocity). * <li> dParamFMax The maximum force or torque that the motor will use to * achieve the desired velocity. * This must always be greater than or equal to zero. * Setting this to zero (the default value) turns off the motor. * <li> dParamFudgeFactor The current joint stop/motor implementation has * a small problem: * when the joint is at one stop and the motor is set to move it away * from the stop, too much force may be applied for one time step, * causing a ``jumping'' motion. * This fudge factor is used to scale this excess force. * It should have a value between zero and one (the default value). * If the jumping motion is too visible in a joint, the value can be * reduced. * Making this value too small can prevent the motor from being able to * move the joint away from a stop. * <li> dParamBounce The bouncyness of the stops. * This is a restitution parameter in the range 0..1. * 0 means the stops are not bouncy at all, 1 means maximum bouncyness. * <li> dParamCFM The constraint force mixing (CFM) value used when not * at a stop. * <li> dParamStopERP The error reduction parameter (ERP) used by the * stops. * <li> dParamStopCFM The constraint force mixing (CFM) value used by the * stops. Together with the ERP value this can be used to get spongy or * soft stops. * Note that this is intended for unpowered joints, it does not really * work as expected when a powered joint reaches its limit. * <li> dParamSuspensionERP Suspension error reduction parameter (ERP). * Currently this is only implemented on the hinge-2 joint. * <li> dParamSuspensionCFM Suspension constraint force mixing (CFM) value. * Currently this is only implemented on the hinge-2 joint. * * If a particular parameter is not implemented by a given joint, setting it * will have no effect. * These parameter names can be optionally followed by a digit (2 or 3) * to indicate the second or third set of parameters, e.g. for the second axis * in a hinge-2 joint, or the third axis in an AMotor joint. */ public interface DJoint { public static class DJointFeedback { /** force applied to body 1 */ public DVector3 f1 = new DVector3(); /** torque applied to body 1 */ public DVector3 t1 = new DVector3(); /** force applied to body 2 */ public DVector3 f2 = new DVector3(); /** torque applied to body 2 */ public DVector3 t2 = new DVector3(); } //virtual ~dJoint() // : void DESTRUCTOR(); // /** // * Destroy a joint. // * // * disconnects it from its attached bodies and removing it from the world. // * However, if the joint is a member of a group then this function has no // * effect - to destroy that joint the group must be emptied or destroyed. // */ // void destroy(); /** * Return the number of bodies attached to the joint. */ int getNumBodies(); /** * Attach the joint to some new bodies. * <p> * If the joint is already attached, it will be detached from the old bodies * first. * To attach this joint to only one body, set body1 or body2 to zero - a zero * body refers to the static environment. * Setting both bodies to zero puts the joint into "limbo", i.e. it will * have no effect on the simulation. * * <p>REMARK: * Some joints, like hinge-2 need to be attached to two bodies to work. */ void attach (DBody body1, DBody body2); /** * Set the user-data pointer. */ void setData (Object data); /** * Get the user-data pointer. */ Object getData(); //public dJointType getType(); /** * Return the bodies that this joint connects. * * <p> REMARK: * If one of these returned body IDs is zero, the joint connects the other body * to the static environment. * If both body IDs are zero, the joint is in ``limbo'' and has no effect on * the simulation. * * @param index return the first (0) or second (1) body. */ DBody getBody (int index); /** * Sets the datastructure that is to receive the feedback. * <p> * The feedback can be used by the user, so that it is known how * much force an individual joint exerts. */ void setFeedback(DJoint.DJointFeedback fb); /** * Gets the datastructure that is to receive the feedback. */ DJoint.DJointFeedback getFeedback(); /** If not implemented it will do nothing as describe in the doc. */ void setParam (PARAM_N type, double value); /** * Get joint parameter. * <p> * If not implemented it will do nothing as describe in the doc. */ double getParam (PARAM_N type); /** * Manually enable a joint. */ void enable(); /** * Manually disable a joint. * <p> REMARK: * A disabled joint will not affect the simulation, but will maintain the anchors and * axes so it can be enabled later. */ void disable(); /** * Check whether a joint is enabled. * @return 1 if a joint is currently enabled or 0 if it is disabled. */ boolean isEnabled(); // //// If not implemented it will do nothing as describe in the doc //virtual void setParam (int, dReal) {}; //virtual dReal getParam (int) const { return 0; } static final int P_OFS_1 = 0x000; static final int P_OFS_2 = 0x100; static final int P_OFS_3 = 0x200; public enum PARAM { // dParamGroup(0), // /* parameters for limits and motors */ \ dParamLoStop(0), dParamHiStop(1), dParamVel(2), dParamLoVel(3), dParamHiVel(4), dParamFMax (5), dParamFudgeFactor (6), dParamBounce (7), dParamCFM (8), dParamStopERP (9), dParamStopCFM (10), /* parameters for suspension */ dParamSuspensionERP (11), dParamSuspensionCFM(12), dParamERP(13); // public static int START = 0x000; private final int _x; private PARAM(int x) { _x = x; } public PARAM and(int i) { int n = _x & i; for (PARAM param: values()) { if (param._x == n) { return param; } } throw new IllegalArgumentException(name() + "->"+ _x + " & " + i + " = n"); } public static PARAM toEnum(int n) { for (PARAM param: values()) { if (param._x == n) { return param; } } throw new IllegalArgumentException("n = " + n); } } public enum PARAM_N { // dParamGroup(0), // // /* parameters for limits and motors */ \ // dParamLoStop(0), // dParamHiStop(1), // dParamVel(2), // dParamFMax (3), // dParamFudgeFactor (4), // dParamBounce (5), // dParamCFM (6), // dParamStopERP (7), // dParamStopCFM (8), // /* parameters for suspension */ // dParamSuspensionERP (9), // dParamSuspensionCFM(10), // dParamERP(11), // dParamGroup1(0, P_OFS_1), // /* parameters for limits and motors */ \ dParamLoStop1(0, P_OFS_1), dParamHiStop1(1, P_OFS_1), dParamVel1(2, P_OFS_1), dParamLoVel(3, P_OFS_1), dParamHiVel(4, P_OFS_1), dParamFMax1 (5, P_OFS_1), dParamFudgeFactor1 (6, P_OFS_1), dParamBounce1 (7, P_OFS_1), dParamCFM1 (8, P_OFS_1), dParamStopERP1 (9, P_OFS_1), dParamStopCFM1 (10, P_OFS_1), /* parameters for suspension */ dParamSuspensionERP1 (11, P_OFS_1), dParamSuspensionCFM1(12, P_OFS_1), dParamERP1(13, P_OFS_1), // dParamGroup2(0, P_OFS_2), // /* parameters for limits and motors */ \ dParamLoStop2(0, P_OFS_2), dParamHiStop2(1, P_OFS_2), dParamVel2(2, P_OFS_2), dParamLoVel2(3, P_OFS_2), dParamHiVel2(4, P_OFS_2), dParamFMax2 (5, P_OFS_2), dParamFudgeFactor2 (6, P_OFS_2), dParamBounce2 (7, P_OFS_2), dParamCFM2 (8, P_OFS_2), dParamStopERP2 (9, P_OFS_2), dParamStopCFM2 (10, P_OFS_2), /* parameters for suspension */ dParamSuspensionERP2 (11, P_OFS_2), dParamSuspensionCFM2(12, P_OFS_2), dParamERP2(13, P_OFS_2), // dParamGroup3(0, P_OFS_3), // /* parameters for limits and motors */ \ dParamLoStop3(0, P_OFS_3), dParamHiStop3(1, P_OFS_3), dParamVel3(2, P_OFS_3), dParamLoVel3(3, P_OFS_3), dParamHiVel3(4, P_OFS_3), dParamFMax3 (5, P_OFS_3), dParamFudgeFactor3 (6, P_OFS_3), dParamBounce3 (7, P_OFS_3), dParamCFM3 (8, P_OFS_3), dParamStopERP3 (9, P_OFS_3), dParamStopCFM3 (10, P_OFS_3), /* parameters for suspension */ dParamSuspensionERP3 (11, P_OFS_3), dParamSuspensionCFM3(12, P_OFS_3), dParamERP3(13, P_OFS_3); // public static int START = 0x000; private final int _x; private final PARAM_GROUPS _group; private final PARAM _sub; private PARAM_N(int x, int g) { _x = x + g; switch (g) { case P_OFS_1: _group = PARAM_GROUPS.dParamGroup1; break; case P_OFS_2: _group = PARAM_GROUPS.dParamGroup2; break; case P_OFS_3: _group = PARAM_GROUPS.dParamGroup3; break; default: throw new IllegalArgumentException(name() + " g=" + g); } _sub = PARAM.toEnum(x); } public PARAM_GROUPS toGROUP() { return _group; } public PARAM toSUB() { return _sub; } public boolean isGroup1() { return _group == PARAM_GROUPS.dParamGroup1; } public boolean isGroup2() { return _group == PARAM_GROUPS.dParamGroup2; } public boolean isGroup3() { return _group == PARAM_GROUPS.dParamGroup3; } public static PARAM_N toEnum(int n) { for (PARAM_N param: values()) { if (param._x == n) { return param; } } throw new IllegalArgumentException("n = " + n); } } public enum PARAM_GROUPS { dParamGroup1(P_OFS_1, 0), dParamGroup2(P_OFS_2, 1), dParamGroup3(P_OFS_3, 2); private final int _index; private PARAM_GROUPS(int x, int index) { _index = index; } public int getIndex() { return _index; } } /* transmission joint mode numbers */ public enum TRANSMISSION { dTransmissionParallelAxes, // = 0, dTransmissionIntersectingAxes, // = 1, dTransmissionChainDrive, // = 2 } void destroy(); }
/* * Hibernate Validator, declare and validate application constraints * * License: Apache License, Version 2.0 * See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>. */ package org.hibernate.validator.test.cfg; import java.lang.annotation.ElementType; import java.util.Set; import javax.validation.ConstraintDeclarationException; import javax.validation.ConstraintViolation; import javax.validation.ConstraintViolationException; import javax.validation.Validator; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import javax.validation.groups.Default; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import org.hibernate.validator.HibernateValidator; import org.hibernate.validator.HibernateValidatorConfiguration; import org.hibernate.validator.cfg.ConstraintMapping; import org.hibernate.validator.cfg.GenericConstraintDef; import org.hibernate.validator.cfg.defs.NotNullDef; import org.hibernate.validator.cfg.defs.SizeDef; import org.hibernate.validator.testutil.TestForIssue; import static org.hibernate.validator.testutil.ConstraintViolationAssert.assertCorrectConstraintViolationMessages; import static org.hibernate.validator.testutil.ConstraintViolationAssert.assertCorrectPropertyPaths; import static org.hibernate.validator.testutil.ConstraintViolationAssert.assertThat; import static org.hibernate.validator.testutil.ConstraintViolationAssert.pathWith; import static org.hibernate.validator.testutil.ValidatorUtil.getConfiguration; import static org.hibernate.validator.testutil.ValidatorUtil.getValidatingProxy; import static org.testng.Assert.assertNull; import static org.testng.Assert.fail; /** * Tests the definition of method constraints with the programmatic API. * * @author Kevin Pollet &lt;kevin.pollet@serli.com&gt; (C) 2011 SERLI */ public class MethodConstraintMappingTest { private HibernateValidatorConfiguration config; private GreetingService wrappedObject; @BeforeClass public void setUp() { wrappedObject = new GreetingServiceImpl(); } @BeforeMethod public void setUpTest() { config = getConfiguration( HibernateValidator.class ); } @Test public void testCascadingMethodReturnDefinition() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", User.class ) .returnValue() .valid(); config.addMapping( mapping ); GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); try { service.greet( new User( "foo" ) ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "may not be null" ); assertCorrectPropertyPaths( e, "greet.<return value>.message" ); } } @Test public void testCascadingMethodReturnDefinitionWithGroupConversion() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", User.class ) .returnValue() .valid() .convertGroup( Default.class ).to( TestGroup.class ) .type( Message.class ) .property( "message", ElementType.FIELD ) .constraint( new NotNullDef() .message( "message must not be null" ) .groups( TestGroup.class ) ); config.addMapping( mapping ); GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); try { service.greet( new User( "foo" ) ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "message must not be null" ); assertCorrectPropertyPaths( e, "greet.<return value>.message" ); } } @Test public void testCascadingMethodParameterDefinition() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", User.class ) .parameter( 0 ) .valid(); config.addMapping( mapping ); GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); try { service.greet( new User( null ) ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "may not be null" ); assertCorrectPropertyPaths( e, "greet.arg0.name" ); } } @Test( expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "HV[0-9]*: Type .*GreetingService doesn't have a method greet().*" ) public void testCascadingDefinitionOnMissingMethod() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet" ) .returnValue() .valid(); config.buildValidatorFactory().getValidator(); } @Test( expectedExceptions = IllegalArgumentException.class, expectedExceptionsMessageRegExp = "HV000056.*" ) public void testCascadingDefinitionOnInvalidMethodParameter() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", User.class ) .parameter( 1 ) .valid(); config.buildValidatorFactory().getValidator(); } @Test public void testOverridingMethodMayDefineSameConstraintsAsOverriddenMethod() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", String.class ) .parameter( 0 ) .constraint( new SizeDef().min( 5 ).max( 10 ) ) .type( GreetingServiceImpl.class ) .method( "greet", String.class ) .parameter( 0 ) .constraint( new SizeDef().min( 5 ).max( 10 ) ); config.addMapping( mapping ); GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); try { service.greet( "Hi" ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "size must be between 5 and 10" ); assertCorrectPropertyPaths( e, "greet.arg0" ); } } @Test public void testParameterCanMarkedAsCascadedSeveralTimesInTheHierarchy() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", User.class ) .parameter( 0 ) .valid() .type( GreetingServiceImpl.class ) .method( "greet", User.class ) .parameter( 0 ) .valid(); config.addMapping( mapping ); GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); try { service.greet( new User( null ) ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "may not be null" ); assertCorrectPropertyPaths( e, "greet.arg0.name" ); } } @Test(expectedExceptions = ConstraintDeclarationException.class, expectedExceptionsMessageRegExp = "HV000151.*") public void testCascadingMethodParameterDefinedOnlyOnSubType() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingServiceImpl.class ) .method( "greet", User.class ) .parameter( 0 ) .valid(); config.addMapping( mapping ); GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.greet( new User( null ) ); } @Test public void testParameterConstraint() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", User.class ) .parameter( 0 ) .constraint( new NotNullDef() ); config.addMapping( mapping ); try { GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.greet( (User) null ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "may not be null" ); assertCorrectPropertyPaths( e, "greet.arg0" ); } } @Test public void testGenericParameterConstraint() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", String.class ) .parameter( 0 ) .constraint( new GenericConstraintDef<Size>( Size.class ).param( "min", 1 ).param( "max", 10 ) ); config.addMapping( mapping ); try { GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.greet( "" ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "size must be between 1 and 10" ); assertCorrectPropertyPaths( e, "greet.arg0" ); } } @Test public void testMultipleParameterConstraintsAtSameParameter() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", String.class ) .parameter( 0 ) .constraint( new SizeDef().min( 1 ).max( 10 ) ) .constraint( new SizeDef().min( 2 ).max( 10 ) ); config.addMapping( mapping ); try { GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.greet( "" ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "size must be between 1 and 10", "size must be between 2 and 10" ); assertCorrectPropertyPaths( e, "greet.arg0", "greet.arg0" ); } } @Test public void testMultipleParameterConstraintsAtDifferentParameters() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", String.class, String.class ) .parameter( 0 ) .constraint( new SizeDef().min( 1 ).max( 10 ) ) .parameter( 1 ) .constraint( new SizeDef().min( 1 ).max( 10 ) ); config.addMapping( mapping ); try { GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.greet( "", "" ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "size must be between 1 and 10", "size must be between 1 and 10" ); assertCorrectPropertyPaths( e, "greet.arg0", "greet.arg1" ); } } @Test public void testProgrammaticAndAnnotationParameterConstraintsAddUp() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "sayHello", String.class ) .parameter( 0 ) .constraint( new SizeDef().min( 2 ).max( 10 ) ); config.addMapping( mapping ); try { GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.sayHello( "" ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "size must be between 1 and 10", "size must be between 2 and 10" ); assertCorrectPropertyPaths( e, "sayHello.arg0", "sayHello.arg0" ); } } @Test public void testConstraintAtCascadedParameter() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", User.class ) .parameter( 0 ) .constraint( new NotNullDef() ) .valid(); config.addMapping( mapping ); GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); try { service.greet( (User) null ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "may not be null" ); assertCorrectPropertyPaths( e, "greet.arg0" ); } try { service.greet( new User( null ) ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "may not be null" ); assertCorrectPropertyPaths( e, "greet.arg0.name" ); } } @Test public void testReturnValueConstraint() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", String.class ) .returnValue() .constraint( new SizeDef().min( 1 ).max( 10 ) ); config.addMapping( mapping ); try { GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.greet( "Hello" ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "size must be between 1 and 10" ); assertCorrectPropertyPaths( e, "greet.<return value>" ); } } @Test public void testMultipleReturnValueConstraints() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", String.class ) .returnValue() .constraint( new SizeDef().min( 1 ).max( 10 ) ) .constraint( new SizeDef().min( 2 ).max( 10 ) ); config.addMapping( mapping ); try { GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.greet( "Hello" ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "size must be between 1 and 10", "size must be between 2 and 10" ); assertCorrectPropertyPaths( e, "greet.<return value>", "greet.<return value>" ); } } @Test public void testGenericReturnValueConstraint() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", String.class ) .returnValue() .constraint( new GenericConstraintDef<Size>( Size.class ).param( "min", 1 ).param( "max", 10 ) ); config.addMapping( mapping ); try { GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.greet( "" ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "size must be between 1 and 10" ); assertCorrectPropertyPaths( e, "greet.<return value>" ); } } @Test public void testProgrammaticAndAnnotationReturnValueConstraintsAddUp() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", String.class, String.class ) .returnValue() .constraint( new SizeDef().min( 2 ).max( 10 ) ); config.addMapping( mapping ); try { GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.greet( "Hello", "World" ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "size must be between 1 and 10", "size must be between 2 and 10" ); assertCorrectPropertyPaths( e, "greet.<return value>", "greet.<return value>" ); } } @Test public void constraintConfiguredOnPropertyIsEvaluatedByMethodValidation() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .property( "hello", ElementType.METHOD ) .constraint( new NotNullDef() ); config.addMapping( mapping ); try { GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.getHello(); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "may not be null" ); assertCorrectPropertyPaths( e, "getHello.<return value>" ); } } @Test public void cascadeConfiguredOnPropertyIsEvaluatedByMethodValidation() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .property( "user", ElementType.METHOD ) .valid(); config.addMapping( mapping ); try { GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.getUser(); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "may not be null" ); assertCorrectPropertyPaths( e, "getUser.<return value>.name" ); } } @Test public void constraintConfiguredOnFieldIsNotEvaluatedByMethodValidation() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingServiceImpl.class ) .property( "hello", ElementType.FIELD ) .constraint( new NotNullDef() ); config.addMapping( mapping ); GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); assertNull( service.getHello() ); } @Test public void cascadeConfiguredOnFieldIsNotEvaluatedByMethodValidation() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingServiceImpl.class ) .property( "user", ElementType.FIELD ) .valid(); config.addMapping( mapping ); GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); assertNull( service.getUser().getName() ); } @Test public void constraintConfiguredOnMethodIsEvaluatedByPropertyValidation() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "getHello" ) .returnValue() .constraint( new NotNullDef() ); config.addMapping( mapping ); Validator validator = config.buildValidatorFactory().getValidator(); Set<ConstraintViolation<GreetingServiceImpl>> violations = validator.validateProperty( new GreetingServiceImpl(), "hello" ); assertCorrectConstraintViolationMessages( violations, "may not be null" ); assertCorrectPropertyPaths( violations, "hello" ); } @Test public void cascadeConfiguredOnMethodIsEvaluatedByPropertyValidation() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "getUser" ) .returnValue() .valid(); config.addMapping( mapping ); Validator validator = config.buildValidatorFactory().getValidator(); Set<ConstraintViolation<GreetingServiceImpl>> violations = validator.validate( new GreetingServiceImpl() ); assertCorrectConstraintViolationMessages( violations, "may not be null" ); assertCorrectPropertyPaths( violations, "user.name" ); } @Test @TestForIssue(jiraKey = "HV-769") public void shouldDetermineConstraintTargetForReturnValueConstraint() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", String.class, String.class ) .returnValue() .constraint( new GenericConstraintDef<GenericAndCrossParameterConstraint>( GenericAndCrossParameterConstraint.class ) ); config.addMapping( mapping ); Validator validator = config.buildValidatorFactory().getValidator(); GreetingService service = getValidatingProxy( wrappedObject, validator ); try { service.greet( null, null ); fail( "Expected exception wasn't thrown" ); } catch ( ConstraintViolationException cve ) { assertThat( cve.getConstraintViolations() ).containsOnlyPaths( pathWith().method( "greet" ).returnValue() ); } } @Test @TestForIssue(jiraKey = "HV-642") public void crossParameterConstraint() { ConstraintMapping mapping = config.createConstraintMapping(); mapping.type( GreetingService.class ) .method( "greet", String.class, String.class ) .crossParameter() .constraint( new GenericConstraintDef<GenericAndCrossParameterConstraint>( GenericAndCrossParameterConstraint.class ) ); config.addMapping( mapping ); try { GreetingService service = getValidatingProxy( wrappedObject, config.buildValidatorFactory().getValidator() ); service.greet( "", "" ); fail( "Expected exception wasn't thrown." ); } catch ( ConstraintViolationException e ) { assertCorrectConstraintViolationMessages( e, "default message" ); assertCorrectPropertyPaths( e, "greet.<cross-parameter>" ); } } private interface TestGroup { } public class User { @NotNull private final String name; public User(String name) { this.name = name; } public String getName() { return name; } } public class Message { @NotNull private final String message; public Message(String message) { this.message = message; } } public interface GreetingService { Message greet(User user); String greet(String string); @Size(min = 1, max = 10) String greet(String string1, String string2); Message sayHello(@Size(min = 1, max = 10) String name); Message getHello(); User getUser(); } public class GreetingServiceImpl implements GreetingService { @SuppressWarnings("unused") private Message hello; @SuppressWarnings("unused") private User user; @Override public Message greet(User user) { return new Message( null ); } @Override public String greet(String string) { return ""; } @Override public String greet(String string1, String string2) { return ""; } @Override public Message sayHello(String name) { return null; } @Override public Message getHello() { return null; } @Override public User getUser() { return new User( null ); } } }
package com.tom.factory.tileentity; import net.minecraft.block.state.IBlockState; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.init.Blocks; import net.minecraft.init.Items; import net.minecraft.inventory.ISidedInventory; import net.minecraft.inventory.InventoryBasic; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.util.EnumFacing; import net.minecraft.util.math.BlockPos; import net.minecraft.util.text.ITextComponent; import net.minecraft.util.text.TextComponentString; import com.tom.api.tileentity.TileEntityTomsMod; import com.tom.factory.FactoryInit; import com.tom.factory.block.BlockBlastFurnace; import com.tom.recipes.handler.MachineCraftingHandler; import com.tom.recipes.handler.MachineCraftingHandler.ItemStackChecker; import com.tom.util.TomsModUtils; public class TileEntityBlastFurnace extends TileEntityTomsMod implements ISidedInventory { private InventoryBasic inv = new InventoryBasic("", false, getSizeInventory()); protected int progress = -1; protected int burnTime = 0; private int totalBurnTime = 0; private int maxProgress = 0; private static final int[] SLOTS = new int[]{0, 1, 2}; @Override public NBTTagCompound writeToNBT(NBTTagCompound tag) { super.writeToNBT(tag); tag.setTag("inventory", TomsModUtils.saveAllItems(inv)); tag.setInteger("progress", progress); tag.setInteger("burnTime", burnTime); tag.setInteger("maxProgress", maxProgress); tag.setInteger("totalBurnTime", totalBurnTime); return tag; } @Override public void readFromNBT(NBTTagCompound tag) { super.readFromNBT(tag); TomsModUtils.loadAllItems(tag.getTagList("inventory", 10), inv); this.progress = tag.getInteger("progress"); this.burnTime = tag.getInteger("burnTime"); this.maxProgress = tag.getInteger("maxProgress"); this.totalBurnTime = tag.getInteger("totalBurnTime"); } @Override public int getSizeInventory() { return 3; } @Override public int getInventoryStackLimit() { return 64; } @Override public boolean isUsableByPlayer(EntityPlayer player) { return TomsModUtils.isUsable(pos, player, world, this); } @Override public void openInventory(EntityPlayer player) { } @Override public void closeInventory(EntityPlayer player) { } @Override public boolean isItemValidForSlot(int index, ItemStack stack) { return true; } @Override public int getField(int id) { return id == 0 ? progress : id == 1 ? maxProgress : id == 2 ? burnTime : id == 3 ? totalBurnTime : 0; } @Override public void setField(int id, int value) { if (id == 0) burnTime = value; else if (id == 1) progress = value; // else if(id == 1)maxProgress = value; } @Override public int getFieldCount() { return 1; } @Override public String getName() { return "blastFurnace"; } @Override public boolean hasCustomName() { return false; } @Override public ITextComponent getDisplayName() { return new TextComponentString(getName()); } @Override public int[] getSlotsForFace(EnumFacing side) { return SLOTS; } @Override public boolean canInsertItem(int index, ItemStack is, EnumFacing direction) { return index == 0 || (index == 2 && is != null && (is.getItem() == FactoryInit.coalCoke || (is.getItem() == Items.COAL && is.getMetadata() == 1))); } @Override public boolean canExtractItem(int index, ItemStack stack, EnumFacing direction) { return index == 1; } @Override public void updateEntity(IBlockState state) { if (!world.isRemote) { if (checkIfMerged(state)) { if (progress > 0) { if (burnTime > 0) { updateProgress(); } else { if ((inv.getStackInSlot(2).getItem() == FactoryInit.coalCoke || (inv.getStackInSlot(2).getItem() == Items.COAL && inv.getStackInSlot(2).getMetadata() == 1) || inv.getStackInSlot(2).getItem() == Item.getItemFromBlock(FactoryInit.blockCoalCoke))) { totalBurnTime = burnTime = inv.getStackInSlot(2).getItem() == FactoryInit.coalCoke ? 3200 : (inv.getStackInSlot(2).getItem() == Item.getItemFromBlock(FactoryInit.blockCoalCoke) ? 28800 : 1600); decrStackSize(2, 1); } } } else if (progress == 0) { ItemStackChecker s = MachineCraftingHandler.getBlastFurnaceOutput(inv.getStackInSlot(0), ItemStack.EMPTY, 0); if (s != null) { if (!inv.getStackInSlot(1).isEmpty()) { if (TomsModUtils.areItemStacksEqual(inv.getStackInSlot(1), s.getStack(), true, true, false) && inv.getStackInSlot(1).getCount() + s.getStack().getCount() <= s.getStack().getMaxStackSize() && inv.getStackInSlot(0).getCount() >= s.getExtra()) { inv.getStackInSlot(1).grow(s.getStack().getCount()); progress = -1; maxProgress = 0; decrStackSize(0, s.getExtra()); } } else { progress = -1; maxProgress = 0; inv.setInventorySlotContents(1, s.getStack()); decrStackSize(0, s.getExtra()); } } else { progress = -1; maxProgress = 0; } } else { ItemStackChecker s = MachineCraftingHandler.getBlastFurnaceOutput(inv.getStackInSlot(0), ItemStack.EMPTY, 0); if (s != null) { if (!inv.getStackInSlot(1).isEmpty()) { if (TomsModUtils.areItemStacksEqual(inv.getStackInSlot(1), s.getStack(), true, true, false) && inv.getStackInSlot(1).getCount() + s.getStack().getCount() <= s.getStack().getMaxStackSize() && inv.getStackInSlot(0).getCount() >= s.getExtra()) { maxProgress = s.getExtra3(); progress = maxProgress; } } else { maxProgress = s.getExtra3(); progress = maxProgress; } } TomsModUtils.setBlockStateWithCondition(world, pos, BlockBlastFurnace.STATE, progress > 0 ? 2 : 1); } } else { TomsModUtils.setBlockStateWithCondition(world, pos, BlockBlastFurnace.STATE, 0); } } } protected void updateProgress() { progress--; burnTime--; } public boolean checkIfMerged(IBlockState state) { EnumFacing facing = state.getValue(BlockBlastFurnace.FACING); BlockPos center = pos.offset(facing, 2); boolean isValid = check3x3(center.up(3)) && check3x3(center); isValid = isValid && checkMid(center.up()) && checkMid(center.up(2)); return isValid; } private boolean check3x3(BlockPos center) { boolean ret = isWall(center); if (ret) { for (EnumFacing f : EnumFacing.HORIZONTALS) { ret = ret && isWall(center.offset(f)) && isWall(center.offset(f).offset(f.rotateY())); if (!ret) return false; } } return ret; } private boolean checkMid(BlockPos center) { boolean ret = world.getBlockState(center).getBlock() == Blocks.LAVA; if (ret) { for (EnumFacing f : EnumFacing.HORIZONTALS) { ret = ret && isWall(center.offset(f)) && isWall(center.offset(f).offset(f.rotateY())); if (!ret) return false; } } return ret; } private boolean isWall(BlockPos pos) { return world.getBlockState(pos).getBlock() == FactoryInit.blastFurnaceWall; } @Override public ItemStack getStackInSlot(int index) { return inv.getStackInSlot(index); } @Override public ItemStack decrStackSize(int index, int count) { return inv.decrStackSize(index, count); } @Override public ItemStack removeStackFromSlot(int index) { return inv.removeStackFromSlot(index); } @Override public void setInventorySlotContents(int index, ItemStack stack) { inv.setInventorySlotContents(index, stack); } @Override public boolean isEmpty() { return inv.isEmpty(); } @Override public void clear() { inv.clear(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.arrow.vector; import org.apache.arrow.memory.BufferAllocator; import org.apache.arrow.vector.complex.impl.UInt1ReaderImpl; import org.apache.arrow.vector.complex.reader.FieldReader; import org.apache.arrow.vector.holders.NullableUInt1Holder; import org.apache.arrow.vector.holders.UInt1Holder; import org.apache.arrow.vector.types.Types.MinorType; import org.apache.arrow.vector.types.pojo.FieldType; import org.apache.arrow.vector.util.TransferPair; /** * UInt1Vector implements a fixed width (1 bytes) vector of * integer values which could be null. A validity buffer (bit vector) is * maintained to track which elements in the vector are null. */ public class UInt1Vector extends BaseFixedWidthVector { private static final byte TYPE_WIDTH = 1; private final FieldReader reader; public UInt1Vector(String name, BufferAllocator allocator) { this(name, FieldType.nullable(MinorType.UINT1.getType()), allocator); } public UInt1Vector(String name, FieldType fieldType, BufferAllocator allocator) { super(name, allocator, fieldType, TYPE_WIDTH); reader = new UInt1ReaderImpl(UInt1Vector.this); } @Override public FieldReader getReader() { return reader; } @Override public MinorType getMinorType() { return MinorType.UINT1; } /*----------------------------------------------------------------* | | | vector value retrieval methods | | | *----------------------------------------------------------------*/ /** * Get the element at the given index from the vector. * * @param index position of element * @return element at given index */ public byte get(int index) throws IllegalStateException { if (isSet(index) == 0) { throw new IllegalStateException("Value at index is null"); } return valueBuffer.getByte(index * TYPE_WIDTH); } /** * Get the element at the given index from the vector and * sets the state in holder. If element at given index * is null, holder.isSet will be zero. * * @param index position of element */ public void get(int index, NullableUInt1Holder holder) { if (isSet(index) == 0) { holder.isSet = 0; return; } holder.isSet = 1; holder.value = valueBuffer.getByte(index * TYPE_WIDTH); } /** * Same as {@link #get(int)}. * * @param index position of element * @return element at given index */ public Byte getObject(int index) { if (isSet(index) == 0) { return null; } else { return valueBuffer.getByte(index * TYPE_WIDTH); } } public void copyFrom(int fromIndex, int thisIndex, UInt1Vector from) { BitVectorHelper.setValidityBit(validityBuffer, thisIndex, from.isSet(fromIndex)); final byte value = from.valueBuffer.getByte(fromIndex * TYPE_WIDTH); valueBuffer.setByte(thisIndex * TYPE_WIDTH, value); } public void copyFromSafe(int fromIndex, int thisIndex, UInt1Vector from) { handleSafe(thisIndex); copyFrom(fromIndex, thisIndex, from); } /*----------------------------------------------------------------* | | | vector value setter methods | | | *----------------------------------------------------------------*/ private void setValue(int index, int value) { valueBuffer.setByte(index * TYPE_WIDTH, value); } private void setValue(int index, byte value) { valueBuffer.setByte(index * TYPE_WIDTH, value); } /** * Set the element at the given index to the given value. * * @param index position of element * @param value value of element */ public void set(int index, int value) { BitVectorHelper.setValidityBitToOne(validityBuffer, index); setValue(index, value); } /** * Set the element at the given index to the given value. * * @param index position of element * @param value value of element */ public void set(int index, byte value) { BitVectorHelper.setValidityBitToOne(validityBuffer, index); setValue(index, value); } /** * Set the element at the given index to the value set in data holder. * If the value in holder is not indicated as set, element in the * at the given index will be null. * * @param index position of element * @param holder nullable data holder for value of element */ public void set(int index, NullableUInt1Holder holder) throws IllegalArgumentException { if (holder.isSet < 0) { throw new IllegalArgumentException(); } else if (holder.isSet > 0) { BitVectorHelper.setValidityBitToOne(validityBuffer, index); setValue(index, holder.value); } else { BitVectorHelper.setValidityBit(validityBuffer, index, 0); } } /** * Set the element at the given index to the value set in data holder. * * @param index position of element * @param holder data holder for value of element */ public void set(int index, UInt1Holder holder) { BitVectorHelper.setValidityBitToOne(validityBuffer, index); setValue(index, holder.value); } /** * Same as {@link #set(int, int)} except that it handles the * case when index is greater than or equal to existing * value capacity {@link #getValueCapacity()}. * * @param index position of element * @param value value of element */ public void setSafe(int index, int value) { handleSafe(index); set(index, value); } /** * Same as {@link #set(int, byte)} except that it handles the * case when index is greater than or equal to existing * value capacity {@link #getValueCapacity()}. * * @param index position of element * @param value value of element */ public void setSafe(int index, byte value) { handleSafe(index); set(index, value); } /** * Same as {@link #set(int, NullableUInt1Holder)} except that it handles the * case when index is greater than or equal to existing * value capacity {@link #getValueCapacity()}. * * @param index position of element * @param holder nullable data holder for value of element */ public void setSafe(int index, NullableUInt1Holder holder) throws IllegalArgumentException { handleSafe(index); set(index, holder); } /** * Same as {@link #set(int, UInt1Holder)} except that it handles the * case when index is greater than or equal to existing * value capacity {@link #getValueCapacity()}. * * @param index position of element * @param holder data holder for value of element */ public void setSafe(int index, UInt1Holder holder) { handleSafe(index); set(index, holder); } /** * Set the element at the given index to null. * * @param index position of element */ public void setNull(int index) { handleSafe(index); // not really needed to set the bit to 0 as long as // the buffer always starts from 0. BitVectorHelper.setValidityBit(validityBuffer, index, 0); } public void set(int index, int isSet, byte value) { if (isSet > 0) { set(index, value); } else { BitVectorHelper.setValidityBit(validityBuffer, index, 0); } } public void setSafe(int index, int isSet, byte value) { handleSafe(index); set(index, isSet, value); } /*----------------------------------------------------------------* | | | vector transfer | | | *----------------------------------------------------------------*/ @Override public TransferPair getTransferPair(String ref, BufferAllocator allocator) { return new TransferImpl(ref, allocator); } @Override public TransferPair makeTransferPair(ValueVector to) { return new TransferImpl((UInt1Vector) to); } private class TransferImpl implements TransferPair { UInt1Vector to; public TransferImpl(String ref, BufferAllocator allocator) { to = new UInt1Vector(ref, field.getFieldType(), allocator); } public TransferImpl(UInt1Vector to) { this.to = to; } @Override public UInt1Vector getTo() { return to; } @Override public void transfer() { transferTo(to); } @Override public void splitAndTransfer(int startIndex, int length) { splitAndTransferTo(startIndex, length, to); } @Override public void copyValueSafe(int fromIndex, int toIndex) { to.copyFromSafe(fromIndex, toIndex, UInt1Vector.this); } } }
/* * Copyright (c) 2013 Cosmin Stejerean, Karl Heinz Marbaise, and contributors. * * Distributed under the MIT license: http://opensource.org/licenses/MIT */ package com.offbytwo.jenkins; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.BDDMockito.given; import static org.mockito.Matchers.anyMap; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import java.io.IOException; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import org.apache.http.entity.ContentType; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import com.offbytwo.jenkins.client.JenkinsHttpClient; import com.offbytwo.jenkins.client.JenkinsHttpConnection; import com.offbytwo.jenkins.model.FolderJob; import com.offbytwo.jenkins.model.Job; import com.offbytwo.jenkins.model.JobWithDetails; import com.offbytwo.jenkins.model.MainView; import com.offbytwo.jenkins.model.View; import java.net.URI; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class JenkinsServerTest extends BaseUnitTest { private JenkinsHttpConnection client = mock(JenkinsHttpClient.class); private JenkinsServer server = new JenkinsServer(client); private MainView mainView = new MainView(new Job("Hello", "http://localhost/job/Hello/")); @Before public void setUp() throws Exception { given(client.get("/", MainView.class)).willReturn(mainView); } @Test public void shouldReturnListOfJobs() throws Exception { assertTrue(server.getJobs().containsKey("Hello")); } @Test public void testGetJobXml() throws Exception { // given String xmlString = "<xml>some xml goes here</xml>"; String jobName = "pr"; given(client.get(anyString())).willReturn(xmlString); // when String xmlReturn = server.getJobXml(jobName); // then verify(client).get("/job/pr/config.xml"); assertEquals(xmlString, xmlReturn); } @Test public void testFolderGetJobs() throws Exception { String[] jobNames = { "job-the-first", "Job-The-Next", "Job-the-Next" }; // given String path = "http://localhost/jobs/someFolder/"; Job someJob = new Job("jobname", path + "jobname"); FolderJob folderJob = new FolderJob("someFolder", path); List<Job> someJobs = createTestJobs(path, jobNames); MainView mv = createTestView(someJobs); given(client.get(eq(path), eq(MainView.class))).willReturn(mv); // when Map<String, Job> map = server.getJobs(folderJob); // then verify(client).get(path, MainView.class); for (String name : jobNames) assertTrue(someJobs.contains(map.get(name))); assertEquals(jobNames.length, map.size()); } @Test public void testFolderGetJob() throws Exception { // given String path = "http://localhost/jobs/someFolder/"; JobWithDetails someJob = mock(JobWithDetails.class); FolderJob folderJob = new FolderJob("someFolder", path); given(client.get(eq(path + "job/jobname"), eq(JobWithDetails.class))).willReturn(someJob); // when Job jobResult = server.getJob(folderJob, "jobname"); // then verify(client).get(path + "job/jobname", JobWithDetails.class); assertEquals(someJob, jobResult); } @Test public void testFolderGetView() throws Exception { // given String path = "http://localhost/jobs/someFolder/"; FolderJob folderJob = new FolderJob("someFolder", path); View someView = mock(View.class); given(client.get(eq(path + "view/viewname/"), eq(View.class))).willReturn(someView); // when View viewResult = server.getView(folderJob, "viewname"); // then verify(client).get(path + "view/viewname/", View.class); assertEquals(someView, viewResult); } @Test public void testGetFolderJob() throws Exception { // given String path = "http://localhost/jobs/someFolder/"; Job someJob = new Job("someFolder", path); FolderJob folderJob = mock(FolderJob.class); given(folderJob.isFolder()).willReturn(true); given(client.get(eq(path), eq(FolderJob.class))).willReturn(folderJob); // when Optional<FolderJob> oj = server.getFolderJob(someJob); // then verify(client).get(path, FolderJob.class); assertEquals(folderJob, oj.get()); } @Test public void testGetFolderJobInvalidFolder() throws Exception { // given String path = "http://localhost/jobs/someFolder/"; Job someJob = new Job("someFolder", path); FolderJob folderJob = mock(FolderJob.class); given(folderJob.isFolder()).willReturn(false); given(client.get(eq(path), eq(FolderJob.class))).willReturn(folderJob); // when Optional<FolderJob> oj = server.getFolderJob(someJob); // then verify(client).get(path, FolderJob.class); assertEquals(false, oj.isPresent()); } @Test public void testCreateFolderJob() throws Exception { // when server.createFolder("someFolder"); // then verify(client).post_form(eq("/createItem?"), anyMap(), eq(false)); } @Test public void testCreateSubFolderJob() throws Exception { // given String path = "http://localhost/jobs/someFolder/"; FolderJob folderJob = mock(FolderJob.class); given(folderJob.getUrl()).willReturn(path); // when server.createFolder(folderJob, "someFolder"); // then verify(client).post_form(eq(path + "createItem?"), anyMap(), eq(false)); } @Test public void testUpdateJobXml() throws Exception { // given String jobName = "pr"; String xmlString = "<xml>some xml goes here</xml>"; given(client.post_xml(anyString(), eq(xmlString))).willReturn(xmlString); // when server.updateJob(jobName, xmlString); // then ArgumentCaptor<String> captureString = ArgumentCaptor.forClass(String.class); verify(client).post_xml(eq("/job/pr/config.xml"), captureString.capture(), eq(true)); assertEquals(xmlString, captureString.getValue()); } @Test public void testCreateJob() throws Exception { // given String jobName = "test-job-" + UUID.randomUUID().toString(); String xmlString = "<xml>some xml goes here</xml>"; // when server.createJob(jobName, xmlString); // then ArgumentCaptor<String> captureString = ArgumentCaptor.forClass(String.class); verify(client).post_xml(eq("/createItem?name=" + jobName), captureString.capture(), eq(false)); String xmlReturn = captureString.getValue(); assertEquals(xmlReturn, xmlString); } @Test public void testJenkinsConnectivity() throws IOException { // given given(client.get("/")).willReturn("<xml>not a real response</xml>"); // then assertEquals(server.isRunning(), true); } @Test public void testJenkinsConnectivityBroken() throws IOException { // given given(client.get("/")).willThrow(IOException.class); // then assertEquals(server.isRunning(), false); } @Test public void testQuietDown() throws IOException { server.quietDown(); server.cancelQuietDown(); } @Test public void testScriptPosts() throws IOException, URISyntaxException { given(client.post_text("/scriptText", "script=script", ContentType.APPLICATION_FORM_URLENCODED, false)) .willReturn("result"); String result = server.runScript("script"); verify(client).post_text("/scriptText", "script=script", ContentType.APPLICATION_FORM_URLENCODED, false); assertEquals("result", result); } public void testJenkinsPathEncoding() throws IOException { given(client.get("/job/encoded%2Fproperly%3F/config.xml")).willReturn("<xml>not a real response</xml>"); assertEquals("<xml>not a real response</xml>", server.getJobXml("encoded/properly?")); } private MainView createTestView(List<Job> jobs) { return new MainView(jobs.toArray(new Job[0])); } private MainView createTestView(String baseUrl, String... jobNames) { return createTestView(createTestJobs(baseUrl, jobNames)); } private List<Job> createTestJobs(String baseUrl, String... jobNames) { List<Job> jobs = new ArrayList<Job>(); for (String name : jobNames) { jobs.add(new Job(name, baseUrl + name)); } return jobs; } @Test public void testReturnSingleJob() throws Exception { shouldReturnListOfJobs("hello"); } @Test public void testReturnListOfJobs() throws Exception { shouldReturnListOfJobs("hello", "Hello", "HeLLo"); } @Test public void testFolderGetSingleJob() throws Exception { shouldGetFolderJobs("jobname"); } private void shouldReturnListOfJobs(String... jobNames) throws Exception { MainView mainView = createTestView("http://localhost/job/", jobNames); given(client.get("/", MainView.class)).willReturn(mainView); Map<String, Job> jobs = server.getJobs(); for (String name : jobNames) assertTrue(jobs.containsKey(name)); assertEquals(jobNames.length, jobs.size()); } @Test public void testGetJobXmls() throws Exception { shouldGetJobXml("pr"); shouldGetJobXml("hello"); shouldGetJobXml("Hello"); shouldGetJobXml("HeLLo"); } @Test public void getVersionShouldNotFailWithNPE() throws Exception { when (client.get( "/" )).thenReturn( "TheAnswer"); when (client.getJenkinsVersion()).thenReturn( "1.23"); JenkinsServer server = new JenkinsServer( client); server.getVersion(); verify( client, times( 1 )).isJenkinsVersionSet(); verify( client, times( 1 )).get( "/" ); verify( client, times( 1 )).getJenkinsVersion(); } @Test(expected=IllegalStateException.class) public void testClose() throws Exception { final String uri = "http://localhost/jenkins"; JenkinsServer srv = new JenkinsServer(new URI(uri)); srv.close(); srv.close(); //check multiple calls yield no errors srv.getComputers(); } private void shouldGetFolderJobs(String... jobNames) throws IOException { // given String path = "http://localhost/jobs/someFolder/"; FolderJob folderJob = new FolderJob("someFolder", path); List<Job> someJobs = createTestJobs(path, jobNames); MainView mv = createTestView(someJobs); given(client.get(eq(path), eq(MainView.class))).willReturn(mv); // when Map<String, Job> map = server.getJobs(folderJob); // then verify(client).get(path, MainView.class); for (String name : jobNames) assertTrue(someJobs.contains(map.get(name))); assertEquals(jobNames.length, map.size()); } private void shouldGetJobXml(String jobName) throws Exception { // given String xmlString = "<xml>some xml goes here</xml>"; given(client.get(anyString())).willReturn(xmlString); // when String xmlReturn = server.getJobXml(jobName); // then verify(client).get("/job/" + jobName + "/config.xml"); verify(client).get("/job/" + jobName + "/config.xml"); assertEquals(xmlString, xmlReturn); } }
/** * DownloadComparisonResults.java * @author Vagisha Sharma * May 4, 2009 * @version 1.0 */ package org.yeastrc.www.compare; import java.io.PrintWriter; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; import org.apache.struts.action.Action; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.yeastrc.ms.dao.ProteinferDAOFactory; import org.yeastrc.ms.dao.protinfer.ibatis.ProteinferPeptideDAO; import org.yeastrc.ms.dao.protinfer.ibatis.ProteinferProteinDAO; import org.yeastrc.ms.domain.protinfer.PeptideDefinition; import org.yeastrc.ms.domain.protinfer.ProteinferIon; import org.yeastrc.ms.domain.protinfer.ProteinferPeptide; import org.yeastrc.ms.domain.protinfer.ProteinferRun; import org.yeastrc.ms.domain.protinfer.ProteinferSpectrumMatch; import org.yeastrc.ms.domain.search.MsSearchResult; import org.yeastrc.ms.domain.search.Program; import org.yeastrc.ms.service.ModifiedSequenceBuilderException; import org.yeastrc.ms.util.StringUtils; import org.yeastrc.ms.util.TimeUtils; import org.yeastrc.nr_seq.listing.ProteinReference; import org.yeastrc.www.compare.dataset.Dataset; import org.yeastrc.www.compare.dataset.DatasetProteinInformation; import org.yeastrc.www.compare.dataset.DatasetSource; import org.yeastrc.www.compare.graph.ComparisonProteinGroup; import org.yeastrc.www.proteinfer.MsResultLoader; /** * */ public class DownloadComparisonResults extends Action { private static final Logger log = Logger.getLogger(DownloadComparisonResults.class.getName()); @Override public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { log.info("Downloading comparison results"); long startTime = System.currentTimeMillis(); ProteinSetComparisonForm myForm = (ProteinSetComparisonForm) request.getAttribute("comparisonForm"); if(myForm == null) { ActionErrors errors = new ActionErrors(); errors.add(ActionErrors.GLOBAL_ERROR, new ActionMessage("error.general.errorMessage", "Comparison form not found in request")); saveErrors( request, errors ); return mapping.findForward("Failure"); } response.setContentType("text/plain"); response.setHeader("Content-Disposition","attachment; filename=\"ProteinSetComparison.txt\""); response.setHeader("cache-control", "no-cache"); PrintWriter writer = response.getWriter(); writer.write("\n\n"); writer.write("Date: "+new Date()+"\n\n"); // Is the data clustered if(myForm.isCluster()) { writer.write("Clustered Spectrum Counts = TRUE\n\n"); } if(!myForm.getGroupIndistinguishableProteins()) { ProteinComparisonDataset comparison = (ProteinComparisonDataset) request.getAttribute("comparisonDataset"); if(comparison == null) { ActionErrors errors = new ActionErrors(); errors.add(ActionErrors.GLOBAL_ERROR, new ActionMessage("error.general.errorMessage", "Comparison dataset not found in request")); saveErrors( request, errors ); return mapping.findForward("Failure"); } comparison.setDisplayColumns(myForm.getDisplayColumns()); // columns we will print comparison.setDatasetOrder(myForm.getAllSelectedRunIdsOrdered()); // dataset order long s = System.currentTimeMillis(); writeResults(writer, comparison, myForm.getSelectedBooleanFilters(), myForm); writer.close(); long e = System.currentTimeMillis(); log.info("Results written in: "+TimeUtils.timeElapsedMinutes(s,e)+" minutes"); } else { ProteinGroupComparisonDataset grpComparison = (ProteinGroupComparisonDataset) request.getAttribute("comparisonGroupDataset"); if(grpComparison == null) { ActionErrors errors = new ActionErrors(); errors.add(ActionErrors.GLOBAL_ERROR, new ActionMessage("error.general.errorMessage", "Comparison dataset not found in request")); saveErrors( request, errors ); return mapping.findForward("Failure"); } grpComparison.setDisplayColumns(myForm.getDisplayColumns()); // columns we will print grpComparison.setDatasetOrder(myForm.getAllSelectedRunIdsOrdered()); // dataset order long s = System.currentTimeMillis(); writeResults(writer, grpComparison, myForm.getSelectedBooleanFilters(), myForm); writer.close(); long e = System.currentTimeMillis(); log.info("Results written in: "+TimeUtils.timeElapsedMinutes(s,e)+" minutes"); } long e = System.currentTimeMillis(); log.info("DownloadComparisonResults results in: "+TimeUtils.timeElapsedMinutes(startTime,e)+" minutes"); return null; } private void writeResults(PrintWriter writer, ProteinComparisonDataset comparison, DatasetBooleanFilters filters, ProteinSetComparisonForm form) { writer.write("Total protein count: "+comparison.getTotalProteinCount()+"\n"); writer.write("Filtered protein count: "+comparison.getFilteredProteinCount()+"\n"); writer.write("\n\n"); // Boolean Filters writeBooleanFilters(writer, filters); // Protein filters writeProteinFilters(writer, form.getProteinPropertiesFilters()); // Datasets writer.write("Datasets: \n"); int idx = 0; for(Dataset dataset: comparison.getDatasets()) { String dsString = getDatasetStringWithSource(dataset); writer.write(dsString+ ": Proteins "+comparison.getProteinCount(idx++)+ "; SpectrumCount(max.) "+dataset.getSpectrumCount()+"("+dataset.getMaxProteinSpectrumCount()+")\n"); } writer.write("\n\n"); // Common protein groups writer.write("Common Proteins:\n"); writer.write("\t"); for(Dataset dataset: comparison.getDatasets()) { String dsString = getDatasetString(dataset); writer.write(dsString+"\t"); } writer.write("\n"); for(int i = 0; i < comparison.getDatasetCount(); i++) { Dataset dataset = comparison.getDatasets().get(i); String dsString = getDatasetString(dataset); writer.write(dsString+"\t"); for(int j = 0; j < comparison.getDatasetCount(); j++) writer.write(comparison.getCommonProteinCount(i, j)+"\t"); writer.write("\n"); } writer.write("\n\n"); // legend writeLegend(writer); // write the header writeHeader(writer, comparison.getDatasets(), comparison.getDisplayColumns(), form.isIncludeDescriptions(), false, form.isIncludePeptides()); // Remove any sorting criteria so that all fields get initialized properly. comparison.setSortBy(null); comparison.setSortOrder(null); // write information for each protein for(ComparisonProtein protein: comparison.getProteins()) { comparison.initializeProteinInfo(protein); writeComparisonProtein(writer, comparison.getDatasets(), comparison.getDisplayColumns(), form.isIncludeDescriptions(), protein, false, form.isIncludePeptides()); } writer.write("\n\n"); } private String getDatasetString(Dataset dataset) { String dsString = "ID_"+dataset.getDatasetId(); String dsName = dataset.getDatasetName(); if(dsName != null && dsName.length() > 0) { dsString += "("+dataset.getDatasetName()+")"; } return dsString; } private String getDatasetStringWithSource(Dataset dataset) { String dsString = dataset.getSourceString()+" ID "+dataset.getDatasetId(); String dsName = dataset.getDatasetName(); if(dsName != null && dsName.length() > 0) { dsString += " ("+dataset.getDatasetName()+")"; } return dsString; } private void writeHeader(PrintWriter writer, List<? extends Dataset> datasets, DisplayColumns displayColumns, boolean printDescription, boolean writeProteinGroupsHeader, boolean printPeptides) { // print the header writer.write("ProteinID\t"); if(writeProteinGroupsHeader) writer.write("ProteinGroupID\t"); if(displayColumns.isShowPresent()) { for(Dataset dataset: datasets) { writer.write(dataset.getSourceString()+"("+getDatasetString(dataset)+")\t"); } } if(displayColumns.isShowFastaId()) writer.write("Fasta ID\t"); if(displayColumns.isShowCommonName()) writer.write("CommonName\t"); if(displayColumns.isShowMolWt()) writer.write("Mol.Wt.\t"); if(displayColumns.isShowPi()) writer.write("pI\t"); if(displayColumns.isShowTotalSeq()) writer.write("NumSeq\t"); // sequence, ion, unique ion, spectrum count column headers. for(Dataset dataset: datasets) { if(displayColumns.isShowNumSeq()) writer.write("#Seq("+getDatasetString(dataset)+")\t"); if(displayColumns.isShowNumIons()) writer.write("#Ion("+getDatasetString(dataset)+")\t"); if(displayColumns.isShowNumUniqIons()) writer.write("#U.Ion("+getDatasetString(dataset)+")\t"); if(displayColumns.isShowSpectrumCount()) { writer.write("SC("+getDatasetString(dataset)+")\t"); writer.write("SC_Norm("+getDatasetString(dataset)+")\t"); } if(displayColumns.isShowNsaf()) // NSAF column headers. writer.write("NSAF("+getDatasetString(dataset)+")\t"); } if(printPeptides) writer.write("Peptides\t"); if(printDescription) writer.write("Description"); writer.write("\n"); } private void writeLegend(PrintWriter writer) { // legend // * Present and Parsimonious // = Present and NOT parsimonious // g group protein // - NOT present writer.write("\n\n"); writer.write("* Protein present and parsimonious\n"); writer.write("= Protein present and NOT parsimonious\n"); writer.write("- Protein NOT present\n"); writer.write("g Group protein\n"); writer.write("NOTE: for ProteinProphet data-sets parsimonious = NOT subsumed."); writer.write("\n\n"); } private void writeProteinFilters(PrintWriter writer, ProteinPropertiesFilters filters) { // Accession string filter if(filters.hasAccessionFilter()) { writer.write("Filtering for FASTA ID(s): "+filters.getAccessionLike()+"\n\n"); } // Description string filter if(filters.hasDescriptionLikeFilter()) { writer.write("Filtering for description term(s) LIKE: "+filters.getDescriptionLike()+"\n\n"); } // Description string filter if(filters.hasDescriptionNotLikeFilter()) { writer.write("Filtering for description term(s) NOT LIKE: "+filters.getDescriptionNotLike()+"\n\n"); } // Was "Search All" checked for description search if(filters.hasDescriptionLikeFilter() || filters.hasDescriptionNotLikeFilter()) { if(filters.isSearchAllDescriptions()) { writer.write("Descriptions in Swiss-Prot and NCBI-NR were searched\n\n"); } } // Molecular wt. filter if(filters.hasMolecularWtFilter()) { writer.write("Molecular Wt. Min: "+filters.getMinMolecularWt()); if(filters.getMaxMolecularWt() < Double.MAX_VALUE) writer.write("\tMax: "+filters.getMaxMolecularWt()); writer.write("\n\n"); } // pI filter if(filters.hasPiFilter()) { writer.write("pI Min: "+filters.getMinPi()); if(filters.getMaxPi() < Double.MAX_VALUE) writer.write("\tMax: "+filters.getMaxPi()); writer.write("\n\n"); } // Min and max peptides if(filters.hasPeptideCountFilter()) { writer.write("# Peptides Min: "+filters.getMinPeptideCount()); if(filters.getMaxPeptideCount()< Integer.MAX_VALUE) writer.write("\tMax: "+filters.getMaxPeptideCount()); writer.write("\n"); } if(filters.hasUniquePeptideCountFilter()) { writer.write("# Uniqie Peptides Min: "+filters.getMinUniqPeptideCount()); if(filters.getMaxUniqPeptideCount() < Integer.MAX_VALUE) writer.write("\tMax: "+filters.getMaxUniqPeptideCount()); writer.write("\n"); } //ProteinProphetFilters if(filters.getHasProteinProphetFilters()) { writer.write("\nProteinProphet FILTERS\n"); writer.write("ProteinProphetError: "+filters.getProteinProphetError()+"\n"); writer.write("Use ProteinProphet group probability: "+filters.isUseGroupProbability()+"\n"); writer.write("Min. peptide probability: "+filters.getPeptideProbability()+"\n"); writer.write("Apply peptide probability to: "); if(filters.isApplyToPeptide()) writer.write("# Peptides"); if(filters.isApplyToUniqPeptide()) writer.write(" # Uniq. Peptides"); writer.write("\n\n"); } } private void writeBooleanFilters(PrintWriter writer, DatasetBooleanFilters filters) { boolean filtersFound = false; writer.write("Boolean Filters: \n"); if(filters.getAndFilters().size() > 0) { filtersFound = true; writer.write("AND:\n"); for(Dataset ds: filters.getAndFilters()) { writer.write("\t"+ds.getDatasetId()+" "+ds.getDatasetComments()+"\n"); } } if(filters.getOrFilters().size() > 0) { filtersFound = true; writer.write("OR:\n"); for(Dataset ds: filters.getOrFilters()) { writer.write("\t"+ds.getDatasetId()+" "+ds.getDatasetComments()+"\n"); } } if(filters.getNotFilters().size() > 0) { filtersFound = true; writer.write("NOT:\n"); for(Dataset ds: filters.getNotFilters()) { writer.write("\t"+ds.getDatasetId()+" "+ds.getDatasetComments()+"\n"); } } if(filters.getXorFilters().size() > 0) { filtersFound = true; writer.write("XOR:\n"); for(Dataset ds: filters.getXorFilters()) { writer.write("\t"+ds.getDatasetId()+" "+ds.getDatasetComments()+"\n"); } } if(filtersFound) writer.write("\n\n"); else writer.write("No filters found\n\n"); } private void writeResults(PrintWriter writer, ProteinGroupComparisonDataset comparison, DatasetBooleanFilters filters, ProteinSetComparisonForm form) { writer.write("Total Protein Groups (Total Proteins): "+comparison.getTotalProteinGroupCount()+" ("+comparison.getTotalProteinCount()+")\n"); writer.write("\n\n"); // Boolean Filters writeBooleanFilters(writer, filters); // Protein filters writeProteinFilters(writer, form.getProteinPropertiesFilters()); // Datasets writer.write("Datasets: \n"); int idx = 0; for(Dataset dataset: comparison.getDatasets()) { String dsString = getDatasetStringWithSource(dataset); writer.write(dsString+ ": Proteins Groups (# Proteins) "+comparison.getProteinGroupCount(idx)+" ("+comparison.getProteinCount(idx++)+") "+ "; SpectrumCount(max.) "+dataset.getSpectrumCount()+"("+dataset.getMaxProteinSpectrumCount()+")\n"); } writer.write("\n\n"); // Common protein groups writer.write("Common Proteins:\n"); writer.write("\t"); for(Dataset dataset: comparison.getDatasets()) { String dsString = getDatasetString(dataset); writer.write(dsString+"\t"); } writer.write("\n"); for(int i = 0; i < comparison.getDatasetCount(); i++) { Dataset dataset = comparison.getDatasets().get(i); String dsString = getDatasetString(dataset); writer.write(dsString+"\t"); for(int j = 0; j < comparison.getDatasetCount(); j++) { writer.write(comparison.getCommonProteinGroupCount(i, j)+" ("+comparison.getCommonProteinGroupsPerc(i, j)+"%)\t"); } writer.write("\n"); } writer.write("\n\n"); // legend writeLegend(writer); // print the proteins in each protein group writeHeader(writer, comparison.getDatasets(), comparison.getDisplayColumns(), form.isIncludeDescriptions(), true, form.isIncludePeptides()); // Remove any sorting criteria so that all fields get initialized properly. comparison.setSortBy(null); comparison.setSortOrder(null); if(!form.isCollapseProteinGroups()) writeSplitProteinGroup(writer, comparison, form.isIncludeDescriptions(), form.isIncludePeptides()); else writeCollapsedProteinGroup(writer, comparison, form.isIncludeDescriptions(), form.isIncludePeptides()); writer.write("\n\n"); } private void writeSplitProteinGroup(PrintWriter writer, ProteinGroupComparisonDataset comparison, boolean printDescription, boolean printPeptides) { for(ComparisonProteinGroup grpProtein: comparison.getProteinsGroups()) { for(ComparisonProtein protein: grpProtein.getProteins()) { comparison.initializeProteinInfo(protein); writeComparisonProtein(writer, comparison.getDatasets(), comparison.getDisplayColumns(), printDescription, protein, true, printPeptides); } } } private void writeComparisonProtein(PrintWriter writer, List<? extends Dataset> datasets, DisplayColumns displayColumns, boolean printDescription, ComparisonProtein protein, boolean printGroupId, boolean printPeptides) { writer.write(protein.getNrseqId()+"\t"); if(printGroupId) writer.write(protein.getGroupId()+"\t"); if(displayColumns.isShowPresent()) { for(Dataset dataset: datasets) { DatasetProteinInformation dpi = protein.getDatasetProteinInformation(dataset); if(dpi == null || !dpi.isPresent()) { writer.write("-"); } else { if(dpi.isParsimonious()) { writer.write("*"); } else { writer.write("="); } if(dpi.isGrouped()) writer.write("g"); } writer.write("\t"); } } if(displayColumns.isShowFastaId()) { try { writer.write(protein.getAccessionsCommaSeparated()+"\t"); } catch (SQLException e) { log.error("Error getting accession", e); writer.write("ERROR\t"); } } if(displayColumns.isShowCommonName()) { try { writer.write(protein.getCommonNamesCommaSeparated()+"\t"); } catch (SQLException e) { log.error("Error getting common name", e); writer.write("ERROR\t"); } } if(displayColumns.isShowMolWt()) writer.write(protein.getMolecularWeight()+"\t"); if(displayColumns.isShowPi()) writer.write(protein.getPi()+"\t"); if(displayColumns.isShowTotalSeq()) writer.write(protein.getTotalPeptideSeqCount()+"\t"); // writer.write(protein.getMaxPeptideIonCount()+"\t"); // spectrum count information for(Dataset dataset: datasets) { DatasetProteinInformation dpi = protein.getDatasetProteinInformation(dataset); if(dpi == null || !dpi.isPresent()) { if(displayColumns.isShowNumSeq()) writer.write("0\t"); // # seq. if(displayColumns.isShowNumIons()) writer.write("0\t"); // #ions if(displayColumns.isShowNumUniqIons()) writer.write("0\t"); // # uniq ions if(displayColumns.isShowSpectrumCount()) { writer.write("0\t"); // SC writer.write("0\t"); // SC_Norm } } else { if(displayColumns.isShowNumSeq()) writer.write(dpi.getSequenceCount()+"\t"); if(displayColumns.isShowNumIons()) writer.write(dpi.getIonCount()+"\t"); if(displayColumns.isShowNumUniqIons()) writer.write(dpi.getUniqueIonCount()+"\t"); if(displayColumns.isShowSpectrumCount()) { writer.write(dpi.getSpectrumCount()+"\t"); writer.write(dpi.getNormalizedSpectrumCountRounded()+"\t"); } } // NSAF information if(displayColumns.isShowNsaf()) { if(!dataset.getSource().isIdPicker()) { writer.write("-1\t"); continue; // NSAF information is available only for IDPicker results. } else { if(dpi == null || !dpi.isPresent()) { writer.write("0\t"); } else { writer.write(dpi.getNsafFormatted()+"\t"); } } } } if(printPeptides) { Set<String> peptides = getPeptides(protein.getNrseqId(), datasets); if(peptides == null) writer.write("ERROR_GETTTING_PEPTIDES"); else { writer.write(StringUtils.makeCommaSeparated(peptides)); } writer.write("\t"); } if(printDescription) { List<ProteinReference> descRefs = protein.getProteinListing().getDescriptionReferences(); if(descRefs != null && descRefs.size() > 0) { // TODO Which descriptions do we want to print?? writer.write(descRefs.get(0).getDescription()); } } writer.write("\n"); } private void writeCollapsedProteinGroup(PrintWriter writer, ProteinGroupComparisonDataset comparison, boolean includeDescription, boolean includePeptides) { DisplayColumns displayColumns = comparison.getDisplayColumns(); for(ComparisonProteinGroup grpProtein: comparison.getProteinsGroups()) { String nrseqIdString = ""; String nameString = ""; String commonNameString = ""; String molWtString = ""; String piString = ""; String nsafStrings[] = new String[comparison.getDatasetCount()]; for(int i = 0; i < comparison.getDatasetCount(); i++) nsafStrings[i] = ""; String descriptionString = ""; for(ComparisonProtein protein: grpProtein.getProteins()) { comparison.initializeProteinInfo(protein); nrseqIdString += ","+protein.getNrseqId(); try { nameString += ","+protein.getAccessionsCommaSeparated(); } catch (SQLException e1) { log.error("Error getting accession", e1); nameString += ",ERROR"; } try { String cn = protein.getCommonNamesCommaSeparated(); if(cn != null && cn.trim().length() > 0) commonNameString += ","+cn; } catch (SQLException e1) { log.error("Error getting common name", e1); commonNameString += ",ERROR"; } molWtString += ","+protein.getMolecularWeight(); piString += ","+protein.getPi(); if(includeDescription) { List<ProteinReference> descRefs = protein.getProteinListing().getDescriptionReferences(); if(descRefs != null && descRefs.size() > 0) { // TODO Which descriptions do we want to print?? descriptionString += ","+descRefs.get(0).getDescription(); } } // NSAF information int dsIdx = 0; for(Dataset dataset: comparison.getDatasets()) { if(!dataset.getSource().isIdPicker()) { nsafStrings[dsIdx] = ",-1"; // NSAF information is available only for IDPicker results } else { DatasetProteinInformation dpi = protein.getDatasetProteinInformation(dataset); if(dpi == null || !dpi.isPresent()) { nsafStrings[dsIdx] += ",0"; } else { nsafStrings[dsIdx] += ","+dpi.getNsafFormatted(); } } dsIdx++; } } writer.write(nrseqIdString.substring(1)+"\t"); writer.write(grpProtein.getGroupId()+"\t"); ComparisonProtein oneProtein = grpProtein.getProteins().get(0); // The value of isParsimonious will be the same for all proteins in a group if(displayColumns.isShowPresent()) { for(Dataset dataset: comparison.getDatasets()) { DatasetProteinInformation dpi = oneProtein.getDatasetProteinInformation(dataset); if(dpi == null || !dpi.isPresent()) { writer.write("-"); } else { if(dpi.isParsimonious()) { writer.write("*"); } else { writer.write("="); } if(dpi.isGrouped()) writer.write("g"); } writer.write("\t"); } } if(displayColumns.isShowFastaId()) writer.write(nameString.substring(1)+"\t"); if(displayColumns.isShowCommonName()) { if(commonNameString.length() > 0) writer.write(commonNameString.substring(1)+"\t"); else writer.write("\t"); } if(displayColumns.isShowMolWt()) writer.write(molWtString.substring(1)+"\t"); if(displayColumns.isShowPi()) writer.write(piString.substring(1)+"\t"); if(displayColumns.isShowTotalSeq()) writer.write(grpProtein.getTotalPeptideSeqCount()+"\t"); // The spectrum count information will be the same for all proteins in a group int dsIndex = 0; for(Dataset dataset: comparison.getDatasets()) { DatasetProteinInformation dpi = oneProtein.getDatasetProteinInformation(dataset); if(dpi == null || !dpi.isPresent()) { if(displayColumns.isShowNumSeq()) writer.write("0\t"); // # seq. if(displayColumns.isShowNumIons()) writer.write("0\t"); // #ions if(displayColumns.isShowNumUniqIons()) writer.write("0\t"); // # uniq ions if(displayColumns.isShowSpectrumCount()) { writer.write("0\t"); // SC writer.write("0\t"); // SC_Norm } } else { if(displayColumns.isShowNumSeq()) writer.write(dpi.getSequenceCount()+"\t"); if(displayColumns.isShowNumIons()) writer.write(dpi.getIonCount()+"\t"); if(displayColumns.isShowNumUniqIons()) writer.write(dpi.getUniqueIonCount()+"\t"); if(displayColumns.isShowSpectrumCount()) { writer.write(dpi.getSpectrumCount()+"\t"); writer.write(dpi.getNormalizedSpectrumCountRounded()+"\t"); } } if(displayColumns.isShowNsaf()) { String nsafStr = nsafStrings[dsIndex++]; writer.write(nsafStr.substring(1)+"\t"); } } // print peptides, if required // peptide information will be the same for all proteins in a group if(includePeptides) { Set<String> peptides = getPeptides(oneProtein.getNrseqId(), comparison.getDatasets()); if(peptides == null) writer.write("ERROR_GETTTING_PEPTIDES"); else { writer.write(StringUtils.makeCommaSeparated(peptides)); } writer.write("\t"); } // print description, if required if(includeDescription) writer.write(descriptionString.substring(1)+"\n"); else writer.write("\n"); } } private Set<String> getPeptides(int nrseqProteinId, List<? extends Dataset> datasets) { Set<String> allPeptides = new HashSet<String>(); ProteinferDAOFactory daoFactory = ProteinferDAOFactory.instance(); ProteinferProteinDAO protDao = daoFactory.getProteinferProteinDao(); ProteinferPeptideDAO peptDao = daoFactory.getProteinferPeptideDao(); ArrayList<Integer> nrseqIds = new ArrayList<Integer>(1); nrseqIds.add(nrseqProteinId); MsResultLoader resLoader = MsResultLoader.getInstance(); PeptideDefinition peptDef = new PeptideDefinition(); peptDef.setUseCharge(true); peptDef.setUseMods(true); for(Dataset dataset: datasets) { ProteinferRun run = daoFactory.getProteinferRunDao().loadProteinferRun(dataset.getDatasetId()); Program inputGenerator = run.getInputGenerator(); if(dataset.getSource() != DatasetSource.DTA_SELECT) { List<Integer> piProteinIds = protDao.getProteinIdsForNrseqIds(dataset.getDatasetId(), nrseqIds); for(int piProteinId: piProteinIds) { List<ProteinferPeptide> peptides = peptDao.loadPeptidesForProteinferProtein(piProteinId); for(ProteinferPeptide pept: peptides) { for(ProteinferIon ion: pept.getIonList()) { ProteinferSpectrumMatch psm = ion.getBestSpectrumMatch(); int resultId = psm.getResultId(); MsSearchResult result = resLoader.getResult(resultId, inputGenerator); try { allPeptides.add(result.getResultPeptide().getModifiedPeptide()); } catch (ModifiedSequenceBuilderException e) { log.error("Error building modified sequence for ion: "+ion.getId()); return null; } } } } } } return allPeptides; } }
package org.knowm.xchange.globitex; import java.math.BigDecimal; import java.util.*; import org.knowm.xchange.currency.Currency; import org.knowm.xchange.currency.CurrencyPair; import org.knowm.xchange.dto.Order; import org.knowm.xchange.dto.account.AccountInfo; import org.knowm.xchange.dto.account.Balance; import org.knowm.xchange.dto.account.Fee; import org.knowm.xchange.dto.account.Wallet; import org.knowm.xchange.dto.marketdata.OrderBook; import org.knowm.xchange.dto.marketdata.Ticker; import org.knowm.xchange.dto.marketdata.Trade; import org.knowm.xchange.dto.marketdata.Trades; import org.knowm.xchange.dto.meta.CurrencyMetaData; import org.knowm.xchange.dto.meta.CurrencyPairMetaData; import org.knowm.xchange.dto.meta.ExchangeMetaData; import org.knowm.xchange.dto.meta.FeeTier; import org.knowm.xchange.dto.trade.LimitOrder; import org.knowm.xchange.dto.trade.OpenOrders; import org.knowm.xchange.dto.trade.UserTrade; import org.knowm.xchange.dto.trade.UserTrades; import org.knowm.xchange.globitex.dto.account.GlobitexAccounts; import org.knowm.xchange.globitex.dto.marketdata.*; import org.knowm.xchange.globitex.dto.trade.GlobitexActiveOrders; import org.knowm.xchange.globitex.dto.trade.GlobitexUserTrade; import org.knowm.xchange.globitex.dto.trade.GlobitexUserTrades; import org.knowm.xchange.utils.jackson.CurrencyPairDeserializer; public class GlobitexAdapters { public static String adaptCurrencyPairToGlobitexSymbol(CurrencyPair currencyPair) { String symbol = currencyPair.toString().replace("/", ""); return convertBTCtoXBT(symbol); } private static CurrencyPair adaptGlobitexSymbolToCurrencyPair(GlobitexSymbol globitexSymbol) { return new CurrencyPair( convertXBTtoBTC(globitexSymbol.getCommodity()), convertXBTtoBTC(globitexSymbol.getCurrency())); } private static String convertBTCtoXBT(String symbol) { return (symbol.contains("BTC")) ? symbol.replace("BTC", "XBT") : symbol; } private static String convertXBTtoBTC(String symbol) { return (symbol.contains("XBT")) ? symbol.replace("XBT", "BTC") : symbol; } public static String adaptOrderType(Order.OrderType orderType) { return (orderType.equals(Order.OrderType.ASK)) ? "sell" : "buy"; } // private static CurrencyPair adaptGlobitexSymbolToCurrencyPair( // String symbol, Map<Currency, CurrencyMetaData> currencies) { // String counter = ""; // // for (Currency currency : currencies.keySet()) { // if (symbol.endsWith(currency.toString())) { // counter = currency.toString(); // } // } // if (counter.equals("")) { // throw new ExchangeException("The symbol " + symbol + " doesn't exists in the exchange!"); // } // // return new CurrencyPair(symbol.substring(0, symbol.length() - counter.length()), counter); // } public static Trades adaptToTrades(GlobitexTrades globitexTrades, CurrencyPair currencyPair) { List<Trade> trades = new ArrayList<>(); globitexTrades .getRecentTrades() .forEach( globitexTrade -> { trades.add(adaptToTrade(globitexTrade, currencyPair)); }); return new Trades(trades, Trades.TradeSortType.SortByTimestamp); } private static Trade adaptToTrade(GlobitexTrade globitexTrade, CurrencyPair currencyPair) { return new Trade( (globitexTrade.getSide().equals("sell") ? Order.OrderType.ASK : Order.OrderType.BID), globitexTrade.getAmount(), currencyPair, globitexTrade.getPrice(), new Date(globitexTrade.getTimestamp()), String.valueOf(globitexTrade.getTid())); } public static Ticker adaptToTicker(GlobitexTicker globitexTicker) { String symbol = convertXBTtoBTC(globitexTicker.getSymbol()); return new Ticker.Builder() .ask(globitexTicker.getAsk()) .bid(globitexTicker.getBid()) .high(globitexTicker.getHigh()) .low(globitexTicker.getLow()) .open(globitexTicker.getOpen()) .quoteVolume(globitexTicker.getVolumeQuote()) .volume(globitexTicker.getVolume()) .currencyPair(CurrencyPairDeserializer.getCurrencyPairFromString(symbol)) .build(); } public static List<Ticker> adaptToListTicker(GlobitexTickers globitexTickers) { List<Ticker> tickers = new ArrayList<>(); globitexTickers .getGlobitexTickerList() .forEach( globitexTicker -> { tickers.add(adaptToTicker(globitexTicker)); }); return tickers; } public static OrderBook adaptToOrderBook( GlobitexOrderBook globitexOrderBook, CurrencyPair currencyPair) { List<LimitOrder> asks = new ArrayList<>(); List<LimitOrder> bids = new ArrayList<>(); globitexOrderBook .getAsks() .forEach( globitexOrder -> { asks.add(adaptToLimitOrder(globitexOrder, Order.OrderType.ASK, currencyPair)); }); globitexOrderBook .getBids() .forEach( globitexOrder -> { bids.add(adaptToLimitOrder(globitexOrder, Order.OrderType.BID, currencyPair)); }); return new OrderBook(null, asks, bids, true); } private static LimitOrder adaptToLimitOrder( GlobitexOrder globitexOrder, Order.OrderType orderType, CurrencyPair currencyPair) { return new LimitOrder( orderType, globitexOrder.getVolume(), currencyPair, null, null, globitexOrder.getPrice()); } public static AccountInfo adaptToAccountInfo(GlobitexAccounts globitexAccounts) { return new AccountInfo( globitexAccounts.getAccounts().get(0).getAccount(), adaptToWallet(globitexAccounts)); } private static Wallet adaptToWallet(GlobitexAccounts globitexAccounts) { List<Balance> balances = new ArrayList<>(); globitexAccounts .getAccounts() .get(0) .getBalance() .forEach( globitexBalance -> { Balance balance = new Balance( new Currency(convertXBTtoBTC(globitexBalance.getCurrency())), globitexBalance.getAvailable().add(globitexBalance.getReserved()), globitexBalance.getAvailable(), globitexBalance.getReserved()); balances.add(balance); }); return new Wallet( globitexAccounts.getAccounts().get(0).getAccount(), globitexAccounts.getAccounts().get(0).getAccount(), balances); } public static UserTrades adaptToUserTrades(GlobitexUserTrades globitexUserTrades) { List<UserTrade> userTrades = new ArrayList<>(); globitexUserTrades .getUserTrades() .forEach( globitexUserTrade -> { userTrades.add(adaptToUserTrade(globitexUserTrade)); }); return new UserTrades(userTrades, Trades.TradeSortType.SortByTimestamp); } private static UserTrade adaptToUserTrade(GlobitexUserTrade globitexUserTrade) { return new UserTrade( (globitexUserTrade.getSide().equals("sell") ? Order.OrderType.ASK : Order.OrderType.BID), globitexUserTrade.getQuantity(), CurrencyPairDeserializer.getCurrencyPairFromString( convertXBTtoBTC(globitexUserTrade.getSymbol())), globitexUserTrade.getPrice(), new Date(globitexUserTrade.getTimestamp()), String.valueOf(globitexUserTrade.getOriginalOrderId()), globitexUserTrade.getClientOrderId(), globitexUserTrade.getFee(), new Currency(convertXBTtoBTC(globitexUserTrade.getFeeCurrency()))); } public static OpenOrders adaptToOpenOrders(GlobitexActiveOrders globitexActiveOrders) { List<LimitOrder> openOrders = new ArrayList<>(); globitexActiveOrders .getOrders() .forEach( globitexActiveOrder -> { openOrders.add( new LimitOrder( (globitexActiveOrder.getSide().equals("sell") ? Order.OrderType.ASK : Order.OrderType.BID), globitexActiveOrder.getOrderQuantity(), CurrencyPairDeserializer.getCurrencyPairFromString( globitexActiveOrder.getSymbol()), globitexActiveOrder.getClientOrderId(), new Date(globitexActiveOrder.getLastTimestamp()), globitexActiveOrder.getOrderPrice(), globitexActiveOrder.getAvgPrice(), globitexActiveOrder.getCumQuantity(), null, Order.OrderStatus.valueOf( globitexActiveOrder.getOrderStatus().toUpperCase()))); }); return new OpenOrders(openOrders); } public static ExchangeMetaData adaptToExchangeMetaData(GlobitexSymbols globitexSymbols) { Map<CurrencyPair, CurrencyPairMetaData> currencyPairs = new HashMap<>(); Map<Currency, CurrencyMetaData> currencies = new HashMap<>(); List<FeeTier> resultFeeTiers = new ArrayList<FeeTier>(); resultFeeTiers.add( new FeeTier(BigDecimal.ONE, new Fee(BigDecimal.valueOf(0.001), BigDecimal.valueOf(0.002)))); globitexSymbols .getSymbols() .forEach( globitexSymbol -> { currencyPairs.put( adaptGlobitexSymbolToCurrencyPair(globitexSymbol), new CurrencyPairMetaData( BigDecimal.valueOf(0.002), globitexSymbol.getSizeMin(), null, globitexSymbol.getPriceIncrement().scale(), resultFeeTiers.toArray(new FeeTier[resultFeeTiers.size()]))); currencies.put( new Currency(convertXBTtoBTC(globitexSymbol.getCurrency())), new CurrencyMetaData(globitexSymbol.getPriceIncrement().scale(), null)); currencies.put( new Currency(convertXBTtoBTC(globitexSymbol.getCommodity())), new CurrencyMetaData(globitexSymbol.getSizeIncrement().scale(), null)); }); return new ExchangeMetaData(currencyPairs, currencies, null, null, null); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.cql3; import java.nio.ByteBuffer; import org.apache.cassandra.db.ColumnFamily; import org.apache.cassandra.db.marshal.CollectionType; import org.apache.cassandra.db.marshal.CounterColumnType; import org.apache.cassandra.db.marshal.ListType; import org.apache.cassandra.exceptions.InvalidRequestException; /** * An UPDATE or DELETE operation. * * For UPDATE this includes: * - setting a constant * - counter operations * - collections operations * and for DELETE: * - deleting a column * - deleting an element of collection column * * Fine grained operation are obtained from their raw counterpart (Operation.Raw, which * correspond to a parsed, non-checked operation) by provided the receiver for the operation. */ public abstract class Operation { // Name of the column the operation applies to public final ColumnIdentifier columnName; // Term involved in the operation. In theory this should not be here since some operation // may require none of more than one term, but most need 1 so it simplify things a bit. protected final Term t; protected Operation(ColumnIdentifier columnName, Term t) { this.columnName = columnName; this.t = t; } /** * @return whether the operation requires a read of the previous value to be executed * (only lists setterByIdx, discard and discardByIdx requires that). */ public boolean requiresRead() { return false; } /** * Collects the column specification for the bind variables of this operation. * * @param boundNames the list of column specification where to collect the * bind variables of this term in. */ public void collectMarkerSpecification(VariableSpecifications boundNames) { if (t != null) t.collectMarkerSpecification(boundNames); } /** * Execute the operation. * * @param rowKey row key for the update. * @param cf the column family to which to add the updates generated by this operation. * @param namePrefix the prefix that identify the CQL3 row this operation applies to (callers should not reuse * the ColumnNameBuilder they pass here). * @param params parameters of the update. */ public abstract void execute(ByteBuffer rowKey, ColumnFamily cf, ColumnNameBuilder namePrefix, UpdateParameters params) throws InvalidRequestException; /** * A parsed raw UPDATE operation. * * This can be one of: * - Setting a value: c = v * - Setting an element of a collection: c[x] = v * - An addition/substraction to a variable: c = c +/- v (where v can be a collection literal) * - An prepend operation: c = v + c */ public interface RawUpdate { /** * This method validates the operation (i.e. validate it is well typed) * based on the specification of the receiver of the operation. * * It returns an Operation which can be though as post-preparation well-typed * Operation. * * @param receiver the "column" this operation applies to. Note that * contrarly to the method of same name in Term.Raw, the receiver should always * be a true column. * @return the prepared update operation. */ public Operation prepare(CFDefinition.Name receiver) throws InvalidRequestException; /** * @return whether this operation can be applied alongside the {@code * other} update (in the same UPDATE statement for the same column). */ public boolean isCompatibleWith(RawUpdate other); } /** * A parsed raw DELETE operation. * * This can be one of: * - Deleting a column * - Deleting an element of a collection */ public interface RawDeletion { /** * The name of the column affected by this delete operation. */ public ColumnIdentifier affectedColumn(); /** * This method validates the operation (i.e. validate it is well typed) * based on the specification of the column affected by the operation (i.e the * one returned by affectedColumn()). * * It returns an Operation which can be though as post-preparation well-typed * Operation. * * @param receiver the "column" this operation applies to. * @return the prepared delete operation. */ public Operation prepare(ColumnSpecification receiver) throws InvalidRequestException; } public static class SetValue implements RawUpdate { private final Term.Raw value; public SetValue(Term.Raw value) { this.value = value; } public Operation prepare(CFDefinition.Name receiver) throws InvalidRequestException { Term v = value.prepare(receiver); if (receiver.type instanceof CounterColumnType) throw new InvalidRequestException(String.format("Cannot set the value of counter column %s (counters can only be incremented/decremented, not set)", receiver)); if (!(receiver.type instanceof CollectionType)) return new Constants.Setter(receiver.kind == CFDefinition.Name.Kind.VALUE_ALIAS ? null : receiver.name, v); switch (((CollectionType)receiver.type).kind) { case LIST: return new Lists.Setter(receiver.name, v); case SET: return new Sets.Setter(receiver.name, v); case MAP: return new Maps.Setter(receiver.name, v); } throw new AssertionError(); } protected String toString(ColumnSpecification column) { return String.format("%s = %s", column, value); } public boolean isCompatibleWith(RawUpdate other) { // We don't allow setting multiple time the same column, because 1) // it's stupid and 2) the result would seem random to the user. return false; } } public static class SetElement implements RawUpdate { private final Term.Raw selector; private final Term.Raw value; public SetElement(Term.Raw selector, Term.Raw value) { this.selector = selector; this.value = value; } public Operation prepare(CFDefinition.Name receiver) throws InvalidRequestException { if (!(receiver.type instanceof CollectionType)) throw new InvalidRequestException(String.format("Invalid operation (%s) for non collection column %s", toString(receiver), receiver)); switch (((CollectionType)receiver.type).kind) { case LIST: Term idx = selector.prepare(Lists.indexSpecOf(receiver)); Term lval = value.prepare(Lists.valueSpecOf(receiver)); return new Lists.SetterByIndex(receiver.name, idx, lval); case SET: throw new InvalidRequestException(String.format("Invalid operation (%s) for set column %s", toString(receiver), receiver)); case MAP: Term key = selector.prepare(Maps.keySpecOf(receiver)); Term mval = value.prepare(Maps.valueSpecOf(receiver)); return new Maps.SetterByKey(receiver.name, key, mval); } throw new AssertionError(); } protected String toString(ColumnSpecification column) { return String.format("%s[%s] = %s", column, selector, value); } public boolean isCompatibleWith(RawUpdate other) { // TODO: we could check that the other operation is not setting the same element // too (but since the index/key set may be a bind variables we can't always do it at this point) return !(other instanceof SetValue); } } public static class Addition implements RawUpdate { private final Term.Raw value; public Addition(Term.Raw value) { this.value = value; } public Operation prepare(CFDefinition.Name receiver) throws InvalidRequestException { Term v = value.prepare(receiver); if (!(receiver.type instanceof CollectionType)) { if (!(receiver.type instanceof CounterColumnType)) throw new InvalidRequestException(String.format("Invalid operation (%s) for non counter column %s", toString(receiver), receiver)); return new Constants.Adder(receiver.kind == CFDefinition.Name.Kind.VALUE_ALIAS ? null : receiver.name, v); } switch (((CollectionType)receiver.type).kind) { case LIST: return new Lists.Appender(receiver.name, v); case SET: return new Sets.Adder(receiver.name, v); case MAP: return new Maps.Putter(receiver.name, v); } throw new AssertionError(); } protected String toString(ColumnSpecification column) { return String.format("%s = %s + %s", column, column, value); } public boolean isCompatibleWith(RawUpdate other) { return !(other instanceof SetValue); } } public static class Substraction implements RawUpdate { private final Term.Raw value; public Substraction(Term.Raw value) { this.value = value; } public Operation prepare(CFDefinition.Name receiver) throws InvalidRequestException { Term v = value.prepare(receiver); if (!(receiver.type instanceof CollectionType)) { if (!(receiver.type instanceof CounterColumnType)) throw new InvalidRequestException(String.format("Invalid operation (%s) for non counter column %s", toString(receiver), receiver)); return new Constants.Substracter(receiver.kind == CFDefinition.Name.Kind.VALUE_ALIAS ? null : receiver.name, v); } switch (((CollectionType)receiver.type).kind) { case LIST: return new Lists.Discarder(receiver.name, v); case SET: return new Sets.Discarder(receiver.name, v); case MAP: throw new InvalidRequestException(String.format("Invalid operation (%s) for map column %s", toString(receiver), receiver)); } throw new AssertionError(); } protected String toString(ColumnSpecification column) { return String.format("%s = %s - %s", column, column, value); } public boolean isCompatibleWith(RawUpdate other) { return !(other instanceof SetValue); } } public static class Prepend implements RawUpdate { private final Term.Raw value; public Prepend(Term.Raw value) { this.value = value; } public Operation prepare(CFDefinition.Name receiver) throws InvalidRequestException { Term v = value.prepare(receiver); if (!(receiver.type instanceof ListType)) throw new InvalidRequestException(String.format("Invalid operation (%s) for non list column %s", toString(receiver), receiver)); return new Lists.Prepender(receiver.name, v); } protected String toString(ColumnSpecification column) { return String.format("%s = %s - %s", column, value, column); } public boolean isCompatibleWith(RawUpdate other) { return !(other instanceof SetValue); } } public static class ColumnDeletion implements RawDeletion { private final ColumnIdentifier id; public ColumnDeletion(ColumnIdentifier id) { this.id = id; } public ColumnIdentifier affectedColumn() { return id; } public Operation prepare(ColumnSpecification receiver) throws InvalidRequestException { // No validation, deleting a column is always "well typed" return new Constants.Deleter(id, receiver.type instanceof CollectionType); } } public static class ElementDeletion implements RawDeletion { private final ColumnIdentifier id; private final Term.Raw element; public ElementDeletion(ColumnIdentifier id, Term.Raw element) { this.id = id; this.element = element; } public ColumnIdentifier affectedColumn() { return id; } public Operation prepare(ColumnSpecification receiver) throws InvalidRequestException { if (!(receiver.type instanceof CollectionType)) throw new InvalidRequestException(String.format("Invalid deletion operation for non collection column %s", receiver)); switch (((CollectionType)receiver.type).kind) { case LIST: Term idx = element.prepare(Lists.indexSpecOf(receiver)); return new Lists.DiscarderByIndex(id, idx); case SET: Term elt = element.prepare(Sets.valueSpecOf(receiver)); return new Lists.Discarder(id, elt); case MAP: Term key = element.prepare(Maps.keySpecOf(receiver)); return new Maps.DiscarderByKey(id, key); } throw new AssertionError(); } } }
package de.felixbruns.jotify.gateway.stream; import java.io.IOException; import java.io.OutputStream; import java.security.InvalidAlgorithmParameterException; import java.security.InvalidKeyException; import java.security.Key; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import javax.crypto.BadPaddingException; import javax.crypto.Cipher; import javax.crypto.IllegalBlockSizeException; import javax.crypto.NoSuchPaddingException; import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; import com.sun.net.httpserver.HttpExchange; import de.felixbruns.jotify.cache.SubstreamCache; import de.felixbruns.jotify.exceptions.ProtocolException; import de.felixbruns.jotify.media.Track; import de.felixbruns.jotify.player.SpotifyOggHeader; import de.felixbruns.jotify.protocol.Protocol; import de.felixbruns.jotify.protocol.channel.Channel; import de.felixbruns.jotify.protocol.channel.ChannelListener; public class ChannelStreamer implements ChannelListener { /* Decryption stuff. */ private Cipher cipher; private Key key; private byte[] iv; /* Requesting and loading stuff. */ private Track track; private Protocol protocol; private int channelOffset; private int channelLength; private int channelTotal; private SpotifyOggHeader header; private HttpExchange exchange; private OutputStream output; /* Caching of substreams. */ private SubstreamCache cache; private byte[] cacheData; private int total = 0; public ChannelStreamer(Protocol protocol, Track track, byte[] key, HttpExchange exchange){ /* Set output stream and cache. */ this.exchange = exchange; this.output = exchange.getResponseBody(); this.cache = new SubstreamCache(); /* Get AES cipher instance. */ try { this.cipher = Cipher.getInstance("AES/CTR/NoPadding"); } catch (NoSuchAlgorithmException e){ System.err.println("AES not available! Aargh!"); } catch (NoSuchPaddingException e){ System.err.println("No padding not available... haha!"); } /* Create secret key from bytes. */ this.key = new SecretKeySpec(key, "AES"); /* Set IV. */ this.iv = new byte[]{ (byte)0x72, (byte)0xe0, (byte)0x67, (byte)0xfb, (byte)0xdd, (byte)0xcb, (byte)0xcf, (byte)0x77, (byte)0xeb, (byte)0xe8, (byte)0xbc, (byte)0x64, (byte)0x3f, (byte)0x63, (byte)0x0d, (byte)0x93 }; /* Initialize cipher with key and iv in encrypt mode. */ try { this.cipher.init(Cipher.ENCRYPT_MODE, this.key, new IvParameterSpec(this.iv)); } catch (InvalidKeyException e){ System.err.println("Invalid key!"); } catch (InvalidAlgorithmParameterException e){ System.err.println("Invalid IV!"); } /* Set track. */ this.track = track; /* Set protocol. */ this.protocol = protocol; /* Set offset and length. */ this.channelOffset = 0; this.channelLength = 160 * 1024 * 5 / 8; /* 160 kbit * 5 seconds. */ /* Header and semaphore to wait on. */ this.header = null; /* Send first substream request. */ String hash = this.cache.hash(this.track, this.channelOffset, this.channelLength); if(this.cache != null && this.cache.contains("substream", hash)){ this.cache.load("substream", hash, this); } else{ try{ this.protocol.sendSubstreamRequest(this, this.track, this.channelOffset, this.channelLength); } catch(ProtocolException e){ return; } } } public void channelHeader(Channel channel, byte[] header){ this.cacheData = new byte[this.channelLength]; this.channelTotal = 0; } public void channelData(Channel channel, byte[] data){ /* Offsets needed for deinterleaving. */ int off, w, x, y, z; /* Copy data to cache buffer. */ for(int i = 0; i < data.length; i++){ this.cacheData[this.channelTotal + i] = data[i]; } /* Allocate space for ciphertext. */ byte[] ciphertext = new byte[data.length + 1024]; byte[] keystream = new byte[16]; /* Decrypt each 1024 byte block. */ for(int block = 0; block < data.length / 1024; block++){ /* Deinterleave the 4x256 byte blocks. */ off = block * 1024; w = block * 1024 + 0 * 256; x = block * 1024 + 1 * 256; y = block * 1024 + 2 * 256; z = block * 1024 + 3 * 256; for(int i = 0; i < 1024 && (block * 1024 + i) < data.length; i += 4){ ciphertext[off++] = data[w++]; ciphertext[off++] = data[x++]; ciphertext[off++] = data[y++]; ciphertext[off++] = data[z++]; } /* Decrypt 1024 bytes block. This will fail for the last block. */ for(int i = 0; i < 1024 && (block * 1024 + i) < data.length; i += 16){ /* Produce 16 bytes of keystream from the IV. */ try{ keystream = this.cipher.doFinal(this.iv); } catch(IllegalBlockSizeException e){ e.printStackTrace(); } catch(BadPaddingException e){ e.printStackTrace(); } /* * Produce plaintext by XORing ciphertext with keystream. * And somehow I also need to XOR with the IV... Please * somebody tell me what I'm doing wrong, or is it the * Java implementation of AES? At least it works like this. */ for(int j = 0; j < 16; j++){ ciphertext[block * 1024 + i + j] ^= keystream[j] ^ this.iv[j]; } /* Update IV counter. */ for(int j = 15; j >= 0; j--){ this.iv[j] += 1; if((int)(this.iv[j] & 0xFF) != 0){ break; } } /* Set new IV. */ try{ this.cipher.init(Cipher.ENCRYPT_MODE, this.key, new IvParameterSpec(this.iv)); } catch(InvalidKeyException e){ e.printStackTrace(); } catch(InvalidAlgorithmParameterException e){ e.printStackTrace(); } } } /* Save data to output stream. */ try{ off = 0; /* Check if we decoded the header yet. */ if(this.header == null){ /* Get header from data. */ byte[] bytes = Arrays.copyOfRange(ciphertext, 0, 167); /* Decode header. */ this.header = new SpotifyOggHeader(bytes); /* Send response headers. */ System.out.format("Header: 0x%08x\n", (this.header.getLength() & 0xfffff000) - this.header.getHeaderLength()); this.exchange.sendResponseHeaders(200, (this.header.getLength() & 0xfffff000) - this.header.getHeaderLength()); off = this.header.getHeaderLength(); } this.output.write(ciphertext, off, data.length - off); this.output.flush(); /* * Don't subtract 'off' here! Otherwise we would * accidentially close the stream in channelEnd! */ this.channelTotal += data.length; this.total += data.length; } catch(Exception e){ /* Don't care. */ } } public void channelEnd(Channel channel){ /* Create cache hash. */ String hash = this.cache.hash(this.track, this.channelOffset, this.channelTotal); /* Save to cache. */ if(this.cache != null && !this.cache.contains("substream", hash)){ this.cache.store("substream", hash, this.cacheData, this.channelTotal); } /* Send next substream request. */ try{ if(this.channelTotal < this.channelLength){ this.output.close(); System.out.format("Stream: 0x%08x\n", this.total - this.header.getHeaderLength()); return; } this.channelOffset += this.channelLength; hash = this.cache.hash(this.track, this.channelOffset, this.channelLength); if(this.cache != null && this.cache.contains("substream", hash)){ this.cache.load("substream", hash, this); } else{ this.protocol.sendSubstreamRequest(this, this.track, this.channelOffset, this.channelLength); } } catch(IOException e){ /* Ignore. */ } catch(ProtocolException e){ /* Ignore. */ } Channel.unregister(channel.getId()); } public void channelError(Channel channel){ /* Ignore. */ } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.distributedlog.service; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.twitter.common.zookeeper.ServerSet; import com.twitter.distributedlog.client.ClientConfig; import com.twitter.distributedlog.client.DistributedLogClientImpl; import com.twitter.distributedlog.client.monitor.MonitorServiceClient; import com.twitter.distributedlog.client.resolver.RegionResolver; import com.twitter.distributedlog.client.resolver.DefaultRegionResolver; import com.twitter.distributedlog.client.routing.RegionsRoutingService; import com.twitter.distributedlog.client.routing.RoutingService; import com.twitter.distributedlog.client.routing.RoutingUtils; import com.twitter.finagle.builder.ClientBuilder; import com.twitter.finagle.stats.NullStatsReceiver; import com.twitter.finagle.stats.StatsReceiver; import com.twitter.finagle.thrift.ClientId; import org.apache.commons.lang.StringUtils; import java.net.SocketAddress; import java.net.URI; import java.util.Random; public final class DistributedLogClientBuilder { private static final Random random = new Random(System.currentTimeMillis()); private String _name = null; private ClientId _clientId = null; private RoutingService.Builder _routingServiceBuilder = null; private ClientBuilder _clientBuilder = null; private StatsReceiver _statsReceiver = new NullStatsReceiver(); private StatsReceiver _streamStatsReceiver = new NullStatsReceiver(); private ClientConfig _clientConfig = new ClientConfig(); private boolean _enableRegionStats = false; private final RegionResolver _regionResolver = new DefaultRegionResolver(); /** * Create a client builder * * @return client builder */ public static DistributedLogClientBuilder newBuilder() { return new DistributedLogClientBuilder(); } public static DistributedLogClientBuilder newBuilder(DistributedLogClientBuilder builder) { DistributedLogClientBuilder newBuilder = new DistributedLogClientBuilder(); newBuilder._name = builder._name; newBuilder._clientId = builder._clientId; newBuilder._clientBuilder = builder._clientBuilder; newBuilder._routingServiceBuilder = builder._routingServiceBuilder; newBuilder._statsReceiver = builder._statsReceiver; newBuilder._streamStatsReceiver = builder._streamStatsReceiver; newBuilder._enableRegionStats = builder._enableRegionStats; newBuilder._clientConfig = ClientConfig.newConfig(builder._clientConfig); return newBuilder; } // private constructor private DistributedLogClientBuilder() {} /** * Client Name. * * @param name * client name * @return client builder. */ public DistributedLogClientBuilder name(String name) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._name = name; return newBuilder; } /** * Client ID. * * @param clientId * client id * @return client builder. */ public DistributedLogClientBuilder clientId(ClientId clientId) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientId = clientId; return newBuilder; } /** * Serverset to access proxy services. * * @param serverSet * server set. * @return client builder. */ public DistributedLogClientBuilder serverSet(ServerSet serverSet) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._routingServiceBuilder = RoutingUtils.buildRoutingService(serverSet); newBuilder._enableRegionStats = false; return newBuilder; } /** * Server Sets to access proxy services. The <i>local</i> server set will be tried first, * then <i>remotes</i>. * * @param local local server set. * @param remotes remote server sets. * @return client builder. */ public DistributedLogClientBuilder serverSets(ServerSet local, ServerSet...remotes) { DistributedLogClientBuilder newBuilder = newBuilder(this); RoutingService.Builder[] builders = new RoutingService.Builder[remotes.length + 1]; builders[0] = RoutingUtils.buildRoutingService(local); for (int i = 1; i < builders.length; i++) { builders[i] = RoutingUtils.buildRoutingService(remotes[i-1]); } newBuilder._routingServiceBuilder = RegionsRoutingService.newBuilder() .resolver(_regionResolver) .routingServiceBuilders(builders); newBuilder._enableRegionStats = remotes.length > 0; return newBuilder; } /** * Name to access proxy services. * * @param finagleNameStr * finagle name string. * @return client builder. */ public DistributedLogClientBuilder finagleNameStr(String finagleNameStr) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._routingServiceBuilder = RoutingUtils.buildRoutingService(finagleNameStr); newBuilder._enableRegionStats = false; return newBuilder; } /** * Finagle name strs to access proxy services. The <i>local</i> finalge name str will be tried first, * then <i>remotes</i>. * * @param local local server set. * @param remotes remote server sets. * @return client builder. */ public DistributedLogClientBuilder finagleNameStrs(String local, String...remotes) { DistributedLogClientBuilder newBuilder = newBuilder(this); RoutingService.Builder[] builders = new RoutingService.Builder[remotes.length + 1]; builders[0] = RoutingUtils.buildRoutingService(local); for (int i = 1; i < builders.length; i++) { builders[i] = RoutingUtils.buildRoutingService(remotes[i - 1]); } newBuilder._routingServiceBuilder = RegionsRoutingService.newBuilder() .routingServiceBuilders(builders) .resolver(_regionResolver); newBuilder._enableRegionStats = remotes.length > 0; return newBuilder; } /** * URI to access proxy services. Assuming the write proxies are announced under `.write_proxy` of * the provided namespace uri. * <p> * The builder will convert the dl uri (e.g. distributedlog://{zkserver}/path/to/namespace) to * zookeeper serverset based finagle name str (`zk!{zkserver}!/path/to/namespace/.write_proxy`) * * @param uri namespace uri to access the serverset of write proxies * @return distributedlog builder */ public DistributedLogClientBuilder uri(URI uri) { DistributedLogClientBuilder newBuilder = newBuilder(this); String zkServers = uri.getAuthority().replace(";", ","); String[] zkServerList = StringUtils.split(zkServers, ','); String finagleNameStr = String.format( "zk!%s!%s/.write_proxy", zkServerList[random.nextInt(zkServerList.length)], // zk server uri.getPath()); newBuilder._routingServiceBuilder = RoutingUtils.buildRoutingService(finagleNameStr); newBuilder._enableRegionStats = false; return newBuilder; } /** * Address of write proxy to connect. * * @param address * write proxy address. * @return client builder. */ public DistributedLogClientBuilder host(SocketAddress address) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._routingServiceBuilder = RoutingUtils.buildRoutingService(address); newBuilder._enableRegionStats = false; return newBuilder; } private DistributedLogClientBuilder routingServiceBuilder(RoutingService.Builder builder) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._routingServiceBuilder = builder; newBuilder._enableRegionStats = false; return newBuilder; } /** * Routing Service to access proxy services. * * @param routingService * routing service * @return client builder. */ @VisibleForTesting public DistributedLogClientBuilder routingService(RoutingService routingService) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._routingServiceBuilder = RoutingUtils.buildRoutingService(routingService); newBuilder._enableRegionStats = false; return newBuilder; } /** * Stats receiver to expose client stats. * * @param statsReceiver * stats receiver. * @return client builder. */ public DistributedLogClientBuilder statsReceiver(StatsReceiver statsReceiver) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._statsReceiver = statsReceiver; return newBuilder; } /** * Stream Stats Receiver to expose per stream stats. * * @param streamStatsReceiver * stream stats receiver * @return client builder. */ public DistributedLogClientBuilder streamStatsReceiver(StatsReceiver streamStatsReceiver) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._streamStatsReceiver = streamStatsReceiver; return newBuilder; } /** * Set underlying finagle client builder. * * @param builder * finagle client builder. * @return client builder. */ public DistributedLogClientBuilder clientBuilder(ClientBuilder builder) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientBuilder = builder; return newBuilder; } /** * Backoff time when redirecting to an already retried host. * * @param ms * backoff time. * @return client builder. */ public DistributedLogClientBuilder redirectBackoffStartMs(int ms) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setRedirectBackoffStartMs(ms); return newBuilder; } /** * Max backoff time when redirecting to an already retried host. * * @param ms * backoff time. * @return client builder. */ public DistributedLogClientBuilder redirectBackoffMaxMs(int ms) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setRedirectBackoffMaxMs(ms); return newBuilder; } /** * Max redirects that is allowed per request. If <i>redirects</i> are * exhausted, fail the request immediately. * * @param redirects * max redirects allowed before failing a request. * @return client builder. */ public DistributedLogClientBuilder maxRedirects(int redirects) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setMaxRedirects(redirects); return newBuilder; } /** * Timeout per request in millis. * * @param timeoutMs * timeout per request in millis. * @return client builder. */ public DistributedLogClientBuilder requestTimeoutMs(int timeoutMs) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setRequestTimeoutMs(timeoutMs); return newBuilder; } /** * Set thriftmux enabled. * * @param enabled * is thriftmux enabled * @return client builder. */ public DistributedLogClientBuilder thriftmux(boolean enabled) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setThriftMux(enabled); return newBuilder; } /** * Set failfast stream exception handling enabled. * * @param enabled * is failfast exception handling enabled * @return client builder. */ public DistributedLogClientBuilder streamFailfast(boolean enabled) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setStreamFailfast(enabled); return newBuilder; } /** * Set the regex to match stream names that the client cares about. * * @param nameRegex * stream name regex * @return client builder */ public DistributedLogClientBuilder streamNameRegex(String nameRegex) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setStreamNameRegex(nameRegex); return newBuilder; } /** * Whether to use the new handshake endpoint to exchange ownership cache. Enable this * when the servers are updated to support handshaking with client info. * * @param enabled * new handshake endpoint is enabled. * @return client builder. */ public DistributedLogClientBuilder handshakeWithClientInfo(boolean enabled) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setHandshakeWithClientInfo(enabled); return newBuilder; } /** * Set the periodic handshake interval in milliseconds. Every <code>intervalMs</code>, * the DL client will handshake with existing proxies again. If the interval is less than * ownership sync interval, the handshake won't sync ownerships. Otherwise, it will. * * @see #periodicOwnershipSyncIntervalMs(long) * @param intervalMs * handshake interval * @return client builder. */ public DistributedLogClientBuilder periodicHandshakeIntervalMs(long intervalMs) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setPeriodicHandshakeIntervalMs(intervalMs); return newBuilder; } /** * Set the periodic ownership sync interval in milliseconds. If periodic handshake is enabled, * the handshake will sync ownership if the elapsed time is larger than sync interval. * * @see #periodicHandshakeIntervalMs(long) * @param intervalMs * interval that handshake should sync ownerships. * @return client builder */ public DistributedLogClientBuilder periodicOwnershipSyncIntervalMs(long intervalMs) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setPeriodicOwnershipSyncIntervalMs(intervalMs); return newBuilder; } /** * Enable/Disable periodic dumping ownership cache. * * @param enabled * flag to enable/disable periodic dumping ownership cache * @return client builder. */ public DistributedLogClientBuilder periodicDumpOwnershipCache(boolean enabled) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setPeriodicDumpOwnershipCacheEnabled(enabled); return newBuilder; } /** * Set periodic dumping ownership cache interval. * * @param intervalMs * interval on dumping ownership cache, in millis. * @return client builder */ public DistributedLogClientBuilder periodicDumpOwnershipCacheIntervalMs(long intervalMs) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setPeriodicDumpOwnershipCacheIntervalMs(intervalMs); return newBuilder; } /** * Enable handshake tracing. * * @param enabled * flag to enable/disable handshake tracing * @return client builder */ public DistributedLogClientBuilder handshakeTracing(boolean enabled) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setHandshakeTracingEnabled(enabled); return newBuilder; } /** * Enable checksum on requests to the proxy. * * @param enabled * flag to enable/disable checksum * @return client builder */ public DistributedLogClientBuilder checksum(boolean enabled) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig.setChecksumEnabled(enabled); return newBuilder; } DistributedLogClientBuilder clientConfig(ClientConfig clientConfig) { DistributedLogClientBuilder newBuilder = newBuilder(this); newBuilder._clientConfig = ClientConfig.newConfig(clientConfig); return newBuilder; } /** * Build distributedlog client. * * @return distributedlog client. */ public DistributedLogClient build() { return buildClient(); } /** * Build monitor service client. * * @return monitor service client. */ public MonitorServiceClient buildMonitorClient() { return buildClient(); } DistributedLogClientImpl buildClient() { Preconditions.checkNotNull(_name, "No name provided."); Preconditions.checkNotNull(_clientId, "No client id provided."); Preconditions.checkNotNull(_routingServiceBuilder, "No routing service builder provided."); Preconditions.checkNotNull(_statsReceiver, "No stats receiver provided."); if (null == _streamStatsReceiver) { _streamStatsReceiver = new NullStatsReceiver(); } RoutingService routingService = _routingServiceBuilder .statsReceiver(_statsReceiver.scope("routing")) .build(); DistributedLogClientImpl clientImpl = new DistributedLogClientImpl( _name, _clientId, routingService, _clientBuilder, _clientConfig, _statsReceiver, _streamStatsReceiver, _regionResolver, _enableRegionStats); routingService.startService(); clientImpl.handshake(); return clientImpl; } }
/* * Copyright (c) 2016, Ronald DeSantis * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hauldata.dbpa.manage.resources; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.NoSuchFileException; import java.sql.SQLException; import java.util.List; import com.hauldata.dbpa.manage.JobManager; import com.hauldata.dbpa.manage_control.api.ScriptValidation; import junit.framework.TestCase; public class ScriptsResourceTest extends TestCase { private static final String garbageName = "garbage"; private static final String garbageBody = "TASK LOG 'It may be garbage, but it''s MY garbage' END TASK"; private static final String differentName = "NotGarbage"; private static final String bogusName = "DoesNotExist"; private static final String invalidName = "invalid"; private static final String invalidBody = "This is not a valid script"; private static final String hasParamsName = "HasParams"; private static final String hasParamsBody = "PARAMETERS one int, two VARCHAR(50), three DATE END PARAMETERS\n TASK GO END TASK\n"; private ScriptsResource scriptsResource; public ScriptsResourceTest(String name) { super(name); } protected void setUp() throws SQLException { JobManager.instantiate(false).startup(); scriptsResource = new ScriptsResource(); } protected void tearDown() throws InterruptedException { JobManager.getInstance().shutdown(); JobManager.killInstance(); } public void testPut() throws IOException { scriptsResource.put(garbageName, garbageBody); } public void testGetPostive() throws IOException { // Positive test: can get an existing script testPut(); String checkBody = scriptsResource.get(garbageName); assertEquals(garbageBody, checkBody); } public void testGetNegative() throws IOException { // Negative test: attempt to get a non-existent script fails as expected try { scriptsResource.delete(bogusName); } catch (Exception ex) {} boolean isNonExistentGotten; try { scriptsResource.get(bogusName); isNonExistentGotten = true; } catch (FileNotFoundException ex) { assertEquals(notFoundMessage(bogusName), ex.getMessage()); isNonExistentGotten = false; } assertFalse(isNonExistentGotten); } public void testDeletePositive() throws IOException { // Positive test: can get an existing script testPut(); scriptsResource.delete(garbageName); } public void testDeleteNegative() throws IOException { // Negative test: attempt to delete a non-existent script fails as expected try { scriptsResource.delete(bogusName); } catch (Exception ex) {} boolean isNonExistentDeleted; try { scriptsResource.delete(bogusName); isNonExistentDeleted = true; } catch (NoSuchFileException ex) { assertEquals(notFoundMessage(bogusName), ex.getMessage()); isNonExistentDeleted = false; } assertFalse(isNonExistentDeleted); } public void testGetNames() throws IOException { // Create a bunch of like-named scripts and a not-like-named script. final int scriptCount = 9; for (int i = 1; i <= scriptCount; i++) { scriptsResource.put(garbageName + String.valueOf(i), garbageBody); } scriptsResource.put(differentName, garbageBody); // Confirm all like named are found. There may be other similarly named scripts too. List<String> likeNames = scriptsResource.getNames(garbageName + "*"); assertTrue(scriptCount <= likeNames.size()); for (String name : likeNames) { assertTrue(name.startsWith(garbageName)); } // Confirm that an all-names list is bigger than the like-named list. List<String> allNames = scriptsResource.getNames(null); assertTrue(likeNames.size() < allNames.size()); } public void testValidatePositive() throws IOException { ScriptValidation validation; testPut(); validation = scriptsResource.validate(garbageName); assertTrue(validation.isValid()); assertEquals(0, validation.getParameters().size()); scriptsResource.put(hasParamsName, hasParamsBody); validation = scriptsResource.validate(hasParamsName); assertTrue(validation.isValid()); assertEquals(3, validation.getParameters().size()); assertEquals("ONE", validation.getParameters().get(0).getName()); assertEquals("INTEGER", validation.getParameters().get(0).getTypeName()); assertEquals("TWO", validation.getParameters().get(1).getName()); assertEquals("VARCHAR", validation.getParameters().get(1).getTypeName()); assertEquals("THREE", validation.getParameters().get(2).getName()); assertEquals("DATETIME", validation.getParameters().get(2).getTypeName()); } public void testValidateNegative() throws IOException { ScriptValidation validation; scriptsResource.put(invalidName, invalidBody); validation = scriptsResource.validate(invalidName); assertFalse(validation.isValid()); assertNotNull(validation.getValidationMessage()); } public void testValidateNonExistent() throws IOException { try { scriptsResource.delete(bogusName); } catch (Exception ex) {} boolean isNonExistentValidated = true; try { scriptsResource.validate(bogusName); } catch (FileNotFoundException ex) { assertEquals(notFoundMessage(bogusName), ex.getMessage()); isNonExistentValidated = false; } assertFalse(isNonExistentValidated); } public void testRun() throws Exception { scriptsResource.put(hasParamsName, hasParamsBody); String[] arguments = new String[2]; arguments[0] = "123"; arguments[1] = "whatever"; scriptsResource.run(hasParamsName, arguments); } private String notFoundMessage(String name) { return ScriptsResource.scriptNotFoundMessageStem + name; } }
/** * * Copyright (C) 2013 Nicolas Hernandez * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package fr.univnantes.lina.mlnlp.model.alphabet; import java.util.HashMap; import java.util.Map; import java.util.Set; import fr.univnantes.lina.javautil.SecurityUtilities; import fr.univnantes.lina.javautil.StringUtilities; /** * @author hernandez * string-typed feature value which are turned into a double-typed code * distinct string-typed feature values should have distinct double-typed codes * symbol means the feature value * * TODO to remove one of the map http://stackoverflow.com/questions/1383797/java-hashmap-how-to-get-key-from-value */ public class SimpleAlphabet implements Alphabet { /** * unique indice */ private double indice; /** * when a symbol is encoded, a code results. * allow to check whether the code correspond to the same */ private Map<Double,Object> code2SymbolMap ; /** * allow to store the association symbol to code * to avoid of computing code at each symbol * assumed faster */ private Map<Object,Double> symbol2CodeMap ; /** * */ public SimpleAlphabet() { super(); this.symbol2CodeMap = new HashMap<Object,Double>(); this.code2SymbolMap = new HashMap<Double,Object>(); this.indice = 1; } /** * various ways of encoding * the surest way may be to concatenate each codePoint but takes some room * we prefer a heuristic hoping there is no clash */ private Double encode1 (String symbolString) { int codePointArray[] = StringUtilities.string2CodePoint((String)symbolString); Double code = 0.0; // encoding algorithm too simple // Error: the code 13812.0 generated for symbol zoomer has already been assigned to another symbol arment for (int i = 0; i < codePointArray.length ; i++) { code = code + codePointArray[i] * (i+1); } code = code * codePointArray.length; return code ; } /** * various ways of encoding * the surest way may be to concatenate each codePoint but takes some room */ private Double encode2 (int codePointArray[]) { Double code = -1.0; int firstCharCode = (StringUtilities.string2CodePoint("!"))[0]; String codeString = ""; String codeStringMod = ""; for (int i = 0; i < codePointArray.length ; i++) { code = code + (codePointArray[i] - firstCharCode) * (i+1); codeString = String.valueOf(codePointArray[i]) + codeString ; codeStringMod = String.valueOf(codePointArray[i] % firstCharCode) + codeStringMod; } Double codeDouble = Double.valueOf(codeString); System.err.println("Debug: codeString "+codeString+" code "+ code+" codeDouble "+codeDouble + " codeStringMod "+ codeStringMod); //code = Double.valueOf(codeStringMod); //System.err.println("Debug: codeStringMod "+codeStringMod+" code "+ Double.valueOf(codeStringMod)); return code ; } /** * encoding by md5 */ private Double encode3 (String symbol) { Double code = -1.0; String md5code = SecurityUtilities.encodeMD5((String)symbol); String md5codeAfterReplacement = md5code; md5codeAfterReplacement = md5codeAfterReplacement.replaceAll("a", "1"); md5codeAfterReplacement = md5codeAfterReplacement.replaceAll("b", "2"); md5codeAfterReplacement = md5codeAfterReplacement.replaceAll("c", "3"); md5codeAfterReplacement = md5codeAfterReplacement.replaceAll("d", "4"); md5codeAfterReplacement = md5codeAfterReplacement.replaceAll("e", "5"); md5codeAfterReplacement = md5codeAfterReplacement.replaceAll("f", "6"); //System.out.println("Debug: md5code before "+ md5code + " after " + md5codeAfterReplacement); code = Double.valueOf(md5codeAfterReplacement); return code ; } /** * simple encoding (assume only asynchronous access) */ private Double encode (Object symbol) { return this.indice++ ; } /** * convert any symbol to a double code */ private Double symbol2code (Object symbol) { Double code = null; if (symbol instanceof Double) code = (Double) symbol; else if (symbol instanceof Integer) { code = Double.valueOf((Integer)symbol); } else if (symbol instanceof Float) { code = Double.valueOf((Float)symbol); } else if (symbol instanceof Short) { code = Double.valueOf((Short)symbol); } else if (symbol instanceof Long) { code = Double.valueOf((Long)symbol); } else if (symbol instanceof Boolean) { if ((Boolean)symbol) code = Double.valueOf(1.0); else code = Double.valueOf(0.0); } else if (symbol instanceof Character) { //code = encode(String.valueOf(symbol)); //code= encode(String.valueOf(symbol)); } else if (symbol instanceof String) { //code= encode((String)symbol); } else { System.err.println("Error: encoding fail since cant identify the type of " + symbol.toString()); } return code; } /** * * add a symbol to the alphabet * */ private double addSymbol(Object symbol) { Double code = symbol2code(symbol); if (code == null) { if (symbol instanceof Character) { code= encode(String.valueOf(symbol)); } else if (symbol instanceof String) { code= encode((String)symbol); } // if the generated code has already been assigned to a symbol if (this.code2SymbolMap.containsKey(code)) { System.err.println("Error: the code "+code +" generated for symbol "+symbol +" has already been assigned to another symbol "+ this.code2SymbolMap.get(code)); code = null; } else { this.symbol2CodeMap.put(symbol, code); this.code2SymbolMap.put(code, symbol); } } return code; } /** * * get the code of the corresponding symbol * if absent, generate and add it to the alphabet * */ public Double getSymbolCode(Object symbol) { //System.err.println("Error: String.valueOf(symbol) " + String.valueOf(symbol)); Double code = null; // if the code has already been generated for this symbol if (this.symbol2CodeMap.containsKey(symbol)) { code = this.symbol2CodeMap.get(symbol); //System.err.println("Error: this.symbol2CodeMap.get(symbol) " + code); } else { code = addSymbol(symbol); //System.err.println("Error: addSymbol(symbol) " + code); } return code; } /** * Return the alphabet symbols * @return */ public Set<Object> getSymbolSet() { return this.symbol2CodeMap.keySet(); } /** * Return the code used in the alphabet * @return */ public Set<Double> getCodeSet() { return this.code2SymbolMap.keySet(); } }
package com.planet_ink.coffee_mud.Abilities.Properties; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2010 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings("unchecked") public class Prop_ItemTransporter extends Property { public String ID() { return "Prop_ItemTransporter"; } public String name(){ return "Item Transporter";} protected int canAffectCode(){return Ability.CAN_MOBS|Ability.CAN_ITEMS|Ability.CAN_ROOMS;} protected Room roomDestination=null; protected MOB mobDestination=null; protected Item nextDestination=null; protected static Hashtable possiblePossibilities=new Hashtable(); protected static Hashtable lastLooks=new Hashtable(); public String accountForYourself() { return "Item Transporter"; } public Item ultimateParent(Item item) { if(item==null) return null; if(item.container()==null) return item; if(item.container().container()==item) item.container().setContainer(null); if(item.container()==item) item.setContainer(null); return ultimateParent(item.container()); } private synchronized boolean setDestination() { Vector possibilities=(Vector)possiblePossibilities.get(text()); Integer lastLook=(Integer)lastLooks.get(text()); if((possibilities==null)||(lastLook==null)||(lastLook.intValue()<0)) { possibilities=new Vector(); possiblePossibilities.put(text(),possibilities); lastLook=Integer.valueOf(10); lastLooks.put(text(),lastLook); } else lastLooks.put(text(),Integer.valueOf(lastLook.intValue()-1)); if(possibilities.size()==0) { roomDestination=null; mobDestination=null; nextDestination=null; try { for(Enumeration r=CMLib.map().rooms();r.hasMoreElements();) { Room room=(Room)r.nextElement(); Ability A=room.fetchEffect("Prop_ItemTransReceiver"); if((A!=null)&&(A.text().equalsIgnoreCase(text()))) possibilities.addElement(room); for(int i=0;i<room.numItems();i++) { Item item=room.fetchItem(i); if((item!=null)&&(item!=affected)) { A=item.fetchEffect("Prop_ItemTransReceiver"); if((A!=null)&&(A.text().equalsIgnoreCase(text()))) possibilities.addElement(item); } } for(int m=0;m<room.numInhabitants();m++) { MOB mob=room.fetchInhabitant(m); if((mob!=null)&&(mob!=affected)) { A=mob.fetchEffect("Prop_ItemTransReceiver"); if((A!=null)&&(A.text().equalsIgnoreCase(text()))) possibilities.addElement(mob); for(int i=0;i<mob.inventorySize();i++) { Item item=mob.fetchInventory(i); if((item!=null)&&(item!=affected)) { A=item.fetchEffect("Prop_ItemTransReceiver"); if((A!=null)&&(A.text().equalsIgnoreCase(text()))) possibilities.addElement(item); } } } } } }catch(NoSuchElementException e){} } if(possibilities.size()>0) { Environmental E=(Environmental)possibilities.elementAt(CMLib.dice().roll(1,possibilities.size(),-1)); nextDestination=null; if(E instanceof Room) roomDestination=(Room)E; else if(E instanceof MOB) mobDestination=(MOB)E; else if(E instanceof Item) { nextDestination=(Item)E; if((nextDestination!=null)&&(nextDestination.owner()!=null)) { if(nextDestination.owner() instanceof Room) roomDestination=(Room)nextDestination.owner(); else if(nextDestination.owner() instanceof MOB) mobDestination=(MOB)nextDestination.owner(); } else nextDestination=null; } } if((mobDestination==null)&&(roomDestination==null)) return false; return true; } public boolean okMessage(Environmental myHost, CMMsg msg) { if(!super.okMessage(myHost,msg)) return false; if(affected==null) return true; if(((msg.amITarget(affected)) &&((msg.targetMinor()==CMMsg.TYP_PUT) ||(msg.targetMinor()==CMMsg.TYP_GIVE)) &&(msg.tool()!=null) &&(msg.tool() instanceof Item)) ||((affected instanceof MOB) &&(msg.amISource((MOB)affected)) &&(msg.targetMinor()==CMMsg.TYP_GET) &&(msg.target() !=null) &&(msg.target() instanceof Item)) ||((affected instanceof Room) &&(msg.targetMinor()==CMMsg.TYP_DROP) &&(msg.target()!=null) &&(msg.target() instanceof Item)) ||((affected instanceof Room) &&(msg.sourceMinor()==CMMsg.TYP_THROW) &&(affected==CMLib.map().roomLocation(msg.target())) &&(msg.tool() instanceof Item))) { if(!setDestination()) { msg.source().tell("The transporter has no possible ItemTransReceiver with the code '"+text()+"'."); return false; } } return true; } public synchronized void tryToMoveStuff() { if((mobDestination!=null)||(roomDestination!=null)) { Room room=roomDestination; MOB mob=mobDestination; Room roomMover=null; MOB mobMover=null; Item container=null; if(affected==null) return; if(affected instanceof Room) roomMover=(Room)affected; else if(affected instanceof MOB) mobMover=(MOB)affected; else if(affected instanceof Item) { container=(Item)affected; if((container.owner()!=null)&&(container.owner() instanceof Room)) roomMover=(Room)container.owner(); else if((container.owner()!=null)&&(container.owner() instanceof MOB)) mobMover=(MOB)container.owner(); } Vector itemsToMove=new Vector(); if(roomMover!=null) { for(int i=0;i<roomMover.numItems();i++) { Item item=roomMover.fetchItem(i); if((item!=null) &&(item!=container) &&(item.amWearingAt(Wearable.IN_INVENTORY)) &&((item.container()==container)||(ultimateParent(item)==container))) itemsToMove.addElement(item); } for(int i=0;i<itemsToMove.size();i++) roomMover.delItem((Item)itemsToMove.elementAt(i)); } else if(mobMover!=null) { int oldNum=itemsToMove.size(); for(int i=0;i<mobMover.inventorySize();i++) { Item item=mobMover.fetchInventory(i); if((item!=null) &&(item!=container) &&(item.amWearingAt(Wearable.IN_INVENTORY)) &&((item.container()==container)||(ultimateParent(item)==container))) itemsToMove.addElement(item); } for(int i=oldNum;i<itemsToMove.size();i++) mobMover.delInventory((Item)itemsToMove.elementAt(i)); } if(itemsToMove.size()>0) { mobDestination=null; roomDestination=null; if(room!=null) for(int i=0;i<itemsToMove.size();i++) { Item item=(Item)itemsToMove.elementAt(i); if((item.container()==null)||(item.container()==container)) item.setContainer(nextDestination); room.addItemRefuse(item,CMProps.getIntVar(CMProps.SYSTEMI_EXPIRE_PLAYER_DROP)); } if(mob!=null) for(int i=0;i<itemsToMove.size();i++) { Item item=(Item)itemsToMove.elementAt(i); if((item.container()==null)||(item.container()==container)) item.setContainer(nextDestination); if(mob instanceof ShopKeeper) ((ShopKeeper)mob).getShop().addStoreInventory(item); else mob.addInventory(item); } if(room!=null) room.recoverRoomStats(); if(mob!=null){ mob.recoverCharStats(); mob.recoverEnvStats(); mob.recoverMaxState(); } } } } public boolean tick(Tickable ticking, int tickID) { if(tickID==Tickable.TICKID_MOB) tryToMoveStuff(); return true; } public void executeMsg(Environmental myHost, CMMsg msg) { // amazingly important that this happens first! super.executeMsg(myHost,msg); if((msg.targetMinor()==CMMsg.TYP_GET) ||(msg.targetMinor()==CMMsg.TYP_GIVE) ||(msg.targetMinor()==CMMsg.TYP_PUT) ||(msg.sourceMinor()==CMMsg.TYP_THROW) ||(msg.targetMinor()==CMMsg.TYP_DROP)) tryToMoveStuff(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.Session; import com.facebook.presto.spi.security.Identity; import com.facebook.presto.spi.security.SelectedRole; import com.facebook.presto.spi.type.Type; import com.facebook.presto.testing.MaterializedResult; import com.facebook.presto.tests.AbstractTestQueryFramework; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.testng.annotations.AfterMethod; import org.testng.annotations.Test; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import static com.facebook.presto.spi.type.VarcharType.createUnboundedVarcharType; import static com.facebook.presto.tests.QueryAssertions.assertContains; import static com.facebook.presto.tests.QueryAssertions.assertEqualsIgnoreOrder; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static org.testng.Assert.assertEquals; @Test(singleThreaded = true) public class TestHiveRoles extends AbstractTestQueryFramework { protected TestHiveRoles() { super(HiveQueryRunner::createQueryRunner); } @AfterMethod public void afterMethod() { for (String role : listRoles()) { executeFromAdmin("DROP ROLE " + role); } } @Test public void testCreateRole() throws Exception { executeFromAdmin("CREATE ROLE role1"); assertEquals(listRoles(), ImmutableSet.of("role1")); assertEquals(listRoles(), ImmutableSet.of("role1")); } @Test public void testCreateDuplicateRole() throws Exception { executeFromAdmin("CREATE ROLE duplicate_role"); assertQueryFails(createAdminSession(), "CREATE ROLE duplicate_role", ".*?Role 'duplicate_role' already exists"); } @Test public void testCreateRoleWithAdminOption() throws Exception { assertQueryFails(createAdminSession(), "CREATE ROLE role1 WITH ADMIN admin", ".*?Hive Connector does not support WITH ADMIN statement"); } @Test public void testCreateReservedRole() throws Exception { assertQueryFails(createAdminSession(), "CREATE ROLE all", "Role name cannot be one of the reserved roles: \\[all, default, none\\]"); assertQueryFails(createAdminSession(), "CREATE ROLE default", "Role name cannot be one of the reserved roles: \\[all, default, none\\]"); assertQueryFails(createAdminSession(), "CREATE ROLE none", "Role name cannot be one of the reserved roles: \\[all, default, none\\]"); } @Test public void testCreateRoleByNonAdminUser() throws Exception { assertQueryFails(createUserSession("non_admin_user"), "CREATE ROLE role1", "Access Denied: Cannot create role role1"); } @Test public void testDropRole() throws Exception { executeFromAdmin("CREATE ROLE role1"); assertEquals(listRoles(), ImmutableSet.of("role1")); executeFromAdmin("DROP ROLE role1"); assertEquals(listRoles(), ImmutableSet.of()); } @Test public void testDropNonExistentRole() throws Exception { assertQueryFails(createAdminSession(), "DROP ROLE non_existent_role", ".*?Role 'non_existent_role' does not exist"); } @Test public void testDropRoleByNonAdminUser() throws Exception { assertQueryFails(createUserSession("non_admin_user"), "DROP ROLE role1", "Access Denied: Cannot drop role role1"); } @Test public void testListRolesByNonAdminUser() throws Exception { assertQueryFails(createUserSession("non_admin_user"), "SELECT * FROM hive.information_schema.roles", "Access Denied: Cannot select from table information_schema.roles"); } @Test public void testPublicRoleIsGrantedToAnyone() throws Exception { assertContains(listApplicableRoles("some_user"), applicableRoles("some_user", "USER", "public", "NO")); } @Test public void testAdminRoleIsGrantedToAdmin() throws Exception { assertContains(listApplicableRoles("admin"), applicableRoles("admin", "USER", "admin", "YES")); } @Test public void testGrantRoleToUser() throws Exception { executeFromAdmin("CREATE ROLE role1"); executeFromAdmin("GRANT role1 TO USER user"); assertContains(listApplicableRoles("user"), applicableRoles("user", "USER", "role1", "NO")); } @Test public void testGrantRoleToRole() throws Exception { executeFromAdmin("CREATE ROLE role1"); executeFromAdmin("CREATE ROLE role2"); executeFromAdmin("GRANT role1 TO USER user"); executeFromAdmin("GRANT role2 TO ROLE role1"); assertContains(listApplicableRoles("user"), applicableRoles( "user", "USER", "role1", "NO", "role1", "ROLE", "role2", "NO")); } @Test public void testGrantRoleWithAdminOption() throws Exception { executeFromAdmin("CREATE ROLE role1"); executeFromAdmin("CREATE ROLE role2"); executeFromAdmin("GRANT role1 TO USER user WITH ADMIN OPTION"); executeFromAdmin("GRANT role2 TO ROLE role1 WITH ADMIN OPTION"); assertContains(listApplicableRoles("user"), applicableRoles( "user", "USER", "role1", "YES", "role1", "ROLE", "role2", "YES")); } @Test public void testGrantRoleMultipleTimes() throws Exception { executeFromAdmin("CREATE ROLE role1"); executeFromAdmin("CREATE ROLE role2"); executeFromAdmin("GRANT role1 TO USER user"); executeFromAdmin("GRANT role1 TO USER user"); executeFromAdmin("GRANT role2 TO ROLE role1"); executeFromAdmin("GRANT role2 TO ROLE role1"); executeFromAdmin("GRANT role1 TO USER user WITH ADMIN OPTION"); executeFromAdmin("GRANT role1 TO USER user WITH ADMIN OPTION"); executeFromAdmin("GRANT role2 TO ROLE role1 WITH ADMIN OPTION"); executeFromAdmin("GRANT role2 TO ROLE role1 WITH ADMIN OPTION"); assertContains(listApplicableRoles("user"), applicableRoles( "user", "USER", "role1", "YES", "role1", "ROLE", "role2", "YES")); } @Test public void testGrantNonExistingRole() throws Exception { assertQueryFails("GRANT grant_revoke_role_existing_1 TO USER grant_revoke_existing_user_1", ".*?Role 'grant_revoke_role_existing_1' does not exist"); executeFromAdmin("CREATE ROLE grant_revoke_role_existing_1"); assertQueryFails("GRANT grant_revoke_role_existing_1 TO ROLE grant_revoke_role_existing_2", ".*?Role 'grant_revoke_role_existing_2' does not exist"); } @Test public void testRevokeRoleFromUser() throws Exception { executeFromAdmin("CREATE ROLE role1"); executeFromAdmin("GRANT role1 TO USER user"); assertContains(listApplicableRoles("user"), applicableRoles("user", "USER", "role1", "NO")); executeFromAdmin("REVOKE role1 FROM USER user"); assertEqualsIgnoreOrder(listApplicableRoles("user"), applicableRoles("user", "USER", "public", "NO")); } @Test public void testRevokeRoleFromRole() throws Exception { executeFromAdmin("CREATE ROLE role1"); executeFromAdmin("CREATE ROLE role2"); executeFromAdmin("GRANT role1 TO USER user"); executeFromAdmin("GRANT role2 TO ROLE role1"); assertContains(listApplicableRoles("user"), applicableRoles( "user", "USER", "role1", "NO", "role1", "ROLE", "role2", "NO")); executeFromAdmin("REVOKE role2 FROM ROLE role1"); assertEqualsIgnoreOrder(listApplicableRoles("user"), applicableRoles( "user", "USER", "public", "NO", "user", "USER", "role1", "NO")); } @Test public void testDropGrantedRole() throws Exception { executeFromAdmin("CREATE ROLE role1"); executeFromAdmin("GRANT role1 TO USER user"); assertContains(listApplicableRoles("user"), applicableRoles("user", "USER", "role1", "NO")); executeFromAdmin("DROP ROLE role1"); assertEqualsIgnoreOrder(listApplicableRoles("user"), applicableRoles("user", "USER", "public", "NO")); } @Test public void testRevokeTransitiveRoleFromUser() throws Exception { executeFromAdmin("CREATE ROLE role1"); executeFromAdmin("CREATE ROLE role2"); executeFromAdmin("CREATE ROLE role3"); executeFromAdmin("GRANT role1 TO USER user"); executeFromAdmin("GRANT role2 TO ROLE role1"); executeFromAdmin("GRANT role3 TO ROLE role2"); assertContains(listApplicableRoles("user"), applicableRoles( "user", "USER", "role1", "NO", "role1", "ROLE", "role2", "NO", "role2", "ROLE", "role3", "NO")); executeFromAdmin("REVOKE role1 FROM USER user"); assertEqualsIgnoreOrder(listApplicableRoles("user"), applicableRoles("user", "USER", "public", "NO")); } @Test public void testRevokeTransitiveRoleFromRole() throws Exception { executeFromAdmin("CREATE ROLE role1"); executeFromAdmin("CREATE ROLE role2"); executeFromAdmin("CREATE ROLE role3"); executeFromAdmin("GRANT role1 TO USER user"); executeFromAdmin("GRANT role2 TO ROLE role1"); executeFromAdmin("GRANT role3 TO ROLE role2"); assertContains(listApplicableRoles("user"), applicableRoles( "user", "USER", "role1", "NO", "role1", "ROLE", "role2", "NO", "role2", "ROLE", "role3", "NO")); executeFromAdmin("REVOKE role2 FROM ROLE role1"); assertEqualsIgnoreOrder(listApplicableRoles("user"), applicableRoles( "user", "USER", "public", "NO", "user", "USER", "role1", "NO")); } @Test public void testDropTransitiveRole() throws Exception { executeFromAdmin("CREATE ROLE role1"); executeFromAdmin("CREATE ROLE role2"); executeFromAdmin("CREATE ROLE role3"); executeFromAdmin("GRANT role1 TO USER user"); executeFromAdmin("GRANT role2 TO ROLE role1"); executeFromAdmin("GRANT role3 TO ROLE role2"); assertContains(listApplicableRoles("user"), applicableRoles( "user", "USER", "role1", "NO", "role1", "ROLE", "role2", "NO", "role2", "ROLE", "role3", "NO")); executeFromAdmin("DROP ROLE role2"); assertEqualsIgnoreOrder(listApplicableRoles("user"), applicableRoles( "user", "USER", "public", "NO", "user", "USER", "role1", "NO")); } @Test public void testRevokeAdminOption() throws Exception { executeFromAdmin("CREATE ROLE role1"); executeFromAdmin("CREATE ROLE role2"); executeFromAdmin("GRANT role1 TO USER user WITH ADMIN OPTION"); executeFromAdmin("GRANT role2 TO ROLE role1 WITH ADMIN OPTION"); assertContains(listApplicableRoles("user"), applicableRoles( "user", "USER", "role1", "YES", "role1", "ROLE", "role2", "YES")); executeFromAdmin("REVOKE ADMIN OPTION FOR role1 FROM USER user"); executeFromAdmin("REVOKE ADMIN OPTION FOR role2 FROM ROLE role1"); assertContains(listApplicableRoles("user"), applicableRoles( "user", "USER", "role1", "NO", "role1", "ROLE", "role2", "NO")); } @Test public void testRevokeRoleMultipleTimes() throws Exception { executeFromAdmin("CREATE ROLE role1"); executeFromAdmin("CREATE ROLE role2"); executeFromAdmin("GRANT role1 TO USER user WITH ADMIN OPTION"); executeFromAdmin("GRANT role2 TO ROLE role1 WITH ADMIN OPTION"); assertContains(listApplicableRoles("user"), applicableRoles( "user", "USER", "role1", "YES", "role1", "ROLE", "role2", "YES")); executeFromAdmin("REVOKE ADMIN OPTION FOR role1 FROM USER user"); executeFromAdmin("REVOKE ADMIN OPTION FOR role1 FROM USER user"); executeFromAdmin("REVOKE ADMIN OPTION FOR role2 FROM ROLE role1"); executeFromAdmin("REVOKE ADMIN OPTION FOR role2 FROM ROLE role1"); assertContains(listApplicableRoles("user"), applicableRoles( "user", "USER", "role1", "NO", "role1", "ROLE", "role2", "NO")); executeFromAdmin("REVOKE role1 FROM USER user"); executeFromAdmin("REVOKE role1 FROM USER user"); executeFromAdmin("REVOKE role2 FROM ROLE role1"); executeFromAdmin("REVOKE role2 FROM ROLE role1"); assertEqualsIgnoreOrder(listApplicableRoles("user"), applicableRoles("user", "USER", "public", "NO")); } @Test public void testRevokeNonExistingRole() throws Exception { assertQueryFails(createAdminSession(), "REVOKE grant_revoke_role_existing_1 FROM USER grant_revoke_existing_user_1", ".*?Role 'grant_revoke_role_existing_1' does not exist"); executeFromAdmin("CREATE ROLE grant_revoke_role_existing_1"); assertQueryFails(createAdminSession(), "REVOKE grant_revoke_role_existing_1 FROM ROLE grant_revoke_role_existing_2", ".*?Role 'grant_revoke_role_existing_2' does not exist"); } @Test public void testSetRole() throws Exception { executeFromAdmin("CREATE ROLE set_role_1"); executeFromAdmin("CREATE ROLE set_role_2"); executeFromAdmin("CREATE ROLE set_role_3"); executeFromAdmin("CREATE ROLE set_role_4"); executeFromAdmin("GRANT set_role_1 TO USER set_user_1"); executeFromAdmin("GRANT set_role_2 TO ROLE set_role_1"); executeFromAdmin("GRANT set_role_3 TO ROLE set_role_2"); Session unsetRole = Session.builder(getQueryRunner().getDefaultSession()) .setIdentity(new Identity("set_user_1", Optional.empty())) .build(); Session setRoleAll = Session.builder(getQueryRunner().getDefaultSession()) .setIdentity(new Identity("set_user_1", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.ALL, Optional.empty())))) .build(); Session setRoleNone = Session.builder(getQueryRunner().getDefaultSession()) .setIdentity(new Identity("set_user_1", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.NONE, Optional.empty())))) .build(); Session setRole1 = Session.builder(getQueryRunner().getDefaultSession()) .setIdentity(new Identity("set_user_1", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.ROLE, Optional.of("set_role_1"))))) .build(); Session setRole2 = Session.builder(getQueryRunner().getDefaultSession()) .setIdentity(new Identity("set_user_1", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.ROLE, Optional.of("set_role_2"))))) .build(); Session setRole3 = Session.builder(getQueryRunner().getDefaultSession()) .setIdentity(new Identity("set_user_1", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.ROLE, Optional.of("set_role_3"))))) .build(); Session setRole4 = Session.builder(getQueryRunner().getDefaultSession()) .setIdentity(new Identity("set_user_1", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.ROLE, Optional.of("set_role_4"))))) .build(); MaterializedResult actual = getQueryRunner().execute(unsetRole, "SELECT * FROM hive.information_schema.applicable_roles"); MaterializedResult expected = MaterializedResult.resultBuilder(unsetRole, createUnboundedVarcharType(), createUnboundedVarcharType(), createUnboundedVarcharType(), createUnboundedVarcharType()) .row("set_user_1", "USER", "public", "NO") .row("set_user_1", "USER", "set_role_1", "NO") .row("set_role_1", "ROLE", "set_role_2", "NO") .row("set_role_2", "ROLE", "set_role_3", "NO") .build(); assertEqualsIgnoreOrder(actual, expected); actual = getQueryRunner().execute(unsetRole, "SELECT * FROM hive.information_schema.enabled_roles"); expected = MaterializedResult.resultBuilder(unsetRole, createUnboundedVarcharType()) .row("public") .row("set_role_1") .row("set_role_2") .row("set_role_3") .build(); assertEqualsIgnoreOrder(actual, expected); actual = getQueryRunner().execute(setRoleAll, "SELECT * FROM hive.information_schema.enabled_roles"); expected = MaterializedResult.resultBuilder(setRoleAll, createUnboundedVarcharType()) .row("public") .row("set_role_1") .row("set_role_2") .row("set_role_3") .build(); assertEqualsIgnoreOrder(actual, expected); actual = getQueryRunner().execute(setRoleNone, "SELECT * FROM hive.information_schema.enabled_roles"); expected = MaterializedResult.resultBuilder(setRoleNone, createUnboundedVarcharType()) .row("public") .build(); assertEqualsIgnoreOrder(actual, expected); actual = getQueryRunner().execute(setRole1, "SELECT * FROM hive.information_schema.enabled_roles"); expected = MaterializedResult.resultBuilder(setRole1, createUnboundedVarcharType()) .row("public") .row("set_role_1") .row("set_role_2") .row("set_role_3") .build(); assertEqualsIgnoreOrder(actual, expected); actual = getQueryRunner().execute(setRole2, "SELECT * FROM hive.information_schema.enabled_roles"); expected = MaterializedResult.resultBuilder(setRole2, createUnboundedVarcharType()) .row("public") .row("set_role_2") .row("set_role_3") .build(); assertEqualsIgnoreOrder(actual, expected); actual = getQueryRunner().execute(setRole3, "SELECT * FROM hive.information_schema.enabled_roles"); expected = MaterializedResult.resultBuilder(setRole3, createUnboundedVarcharType()) .row("public") .row("set_role_3") .build(); assertEqualsIgnoreOrder(actual, expected); assertQueryFails(setRole4, "SELECT * FROM hive.information_schema.enabled_roles", ".*?Cannot set role set_role_4"); executeFromAdmin("DROP ROLE set_role_1"); executeFromAdmin("DROP ROLE set_role_2"); executeFromAdmin("DROP ROLE set_role_3"); executeFromAdmin("DROP ROLE set_role_4"); } private Set<String> listRoles() { return executeFromAdmin("SELECT * FROM hive.information_schema.roles") .getMaterializedRows() .stream() .map(row -> row.getField(0).toString()) .collect(Collectors.toSet()); } private MaterializedResult listApplicableRoles(String user) { return executeFromUser(user, "SELECT * FROM hive.information_schema.applicable_roles"); } private MaterializedResult applicableRoles(String... values) { List<Type> types = ImmutableList.of(createUnboundedVarcharType(), createUnboundedVarcharType(), createUnboundedVarcharType(), createUnboundedVarcharType()); int rowLength = types.size(); checkArgument(values.length % rowLength == 0); MaterializedResult.Builder result = MaterializedResult.resultBuilder(getQueryRunner().getDefaultSession(), types); Object[] row = null; for (int i = 0; i < values.length; i++) { if (i % rowLength == 0) { if (row != null) { result.row(row); } row = new Object[rowLength]; } checkState(row != null); row[i % rowLength] = values[i]; } if (row != null) { result.row(row); } return result.build(); } private MaterializedResult executeFromAdmin(String sql) { return getQueryRunner().execute(createAdminSession(), sql); } private MaterializedResult executeFromUser(String user, String sql) { return getQueryRunner().execute(createUserSession(user), sql); } private Session createAdminSession() { return Session.builder(getQueryRunner().getDefaultSession()) .setIdentity(new Identity("admin", Optional.empty(), ImmutableMap.of("hive", new SelectedRole(SelectedRole.Type.ROLE, Optional.of("admin"))))) .build(); } private Session createUserSession(String user) { return Session.builder(getQueryRunner().getDefaultSession()) .setIdentity(new Identity(user, Optional.empty())) .build(); } }
package ref.sdfe.gpslogsheet2; //An attempt to handle all the information in the app as an SQL db. //This is noob coding by a physicist, be gentle. :D /oldjo@sdfe.dk import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.util.Log; import com.google.gson.Gson; import java.util.ArrayList; import java.util.List; public class DataBaseHandler extends SQLiteOpenHelper { // Singleton pattern private static DataBaseHandler sInstance; // Define Database structure // Database Version private static final int DATABASE_VERSION = 1; // Database Name private static final String DATABASE_NAME = "GPSLogSheet2.db"; // Table names private static final String TABLE_PROJECTS = "projects"; private static final String TABLE_IMAGES = "images"; private static final String TABLE_INSTRUMENTS = "instruments"; private static final String TABLE_ANTENNAE = "antennae"; private static final String TABLE_ALARMS = "alarms"; private static final String TABLE_RODS = "rods"; private static final String TABLE_FIXEDPOINTS = "fixedpoints"; private static final String TABLE_SETTINGS = "settings"; // Projects column names private static final String KEY_PROJ_ID = "id"; private static final String KEY_PROJ_NAME = "name"; private static final String KEY_PROJ_DATE_START = "date_start"; private static final String KEY_PROJ_DATE_MOD = "date_modified"; private static final String KEY_PROJ_CLOB = "clob"; // Images column names private static final String KEY_IMG_ID = "id"; private static final String KEY_IMG_PROJ = "project_id"; private static final String KEY_IMG_DATE_START = "date_start"; private static final String KEY_IMG_BLOB = "blob"; // Instruments column names private static final String KEY_INST_ID = "id"; private static final String KEY_INST_NAME = "instrument_name"; // Antennae column names private static final String KEY_ANT_ID = "id"; private static final String KEY_ANT_NAME = "antenna_name"; private static final String KEY_ANT_CODE = "antenna_code"; // Alarms column names private static final String KEY_ALRM_ID = "id"; private static final String KEY_ALRM_NAME = "alarm_name"; // Rods column names private static final String KEY_ROD_ID = "id"; private static final String KEY_ROD_NAME = "rod_name"; private static final String KEY_ROD_LENGTH = "rod_length"; // Fixedpoints column names private static final String KEY_FP_ID = "id"; private static final String KEY_FP_GPS_NAME = "gps_name"; private static final String KEY_FP_HS_NAME = "hs_name"; private static final String KEY_FP_EASTING = "easting"; private static final String KEY_FP_NORTHING = "northing"; // Settings column names private static final String KEY_SET_ID = "id"; private static final String KEY_SET_NAME = "setting_name"; private static final String KEY_SET_VALUE = "setting_val"; public static synchronized DataBaseHandler getInstance(Context context) { // Use the application context, which will ensure that you // don't accidentally leak an Activity's context. // See this article for more information: http://bit.ly/6LRzfx if (sInstance == null) { sInstance = new DataBaseHandler(context.getApplicationContext()); } return sInstance; } // Made the constructor private, use getInstance() instead. private DataBaseHandler(Context context) { super(context, DATABASE_NAME, null, DATABASE_VERSION); } // Creating tables @Override public void onCreate(SQLiteDatabase db) { String CREATE_PROJECTS_TABLE = "CREATE TABLE " + TABLE_PROJECTS + "(" + KEY_PROJ_ID + " INTEGER PRIMARY KEY," + KEY_PROJ_NAME + " TEXT," + KEY_PROJ_DATE_START + " INTEGER," + KEY_PROJ_DATE_MOD + " INTEGER," + KEY_PROJ_CLOB + " CLOB)"; db.execSQL(CREATE_PROJECTS_TABLE); Log.i("SQL", "table created?"); String CREATE_IMAGES_TABLE = "CREATE TABLE " + TABLE_IMAGES + "(" + KEY_IMG_ID + " INTEGER PRIMARY KEY," + KEY_IMG_DATE_START + " INTEGER," + KEY_IMG_PROJ + " INTEGER," + KEY_IMG_BLOB + " BLOB)"; db.execSQL(CREATE_IMAGES_TABLE); String CREATE_INSTRUMENTS_TABLE = "CREATE TABLE " + TABLE_INSTRUMENTS + "(" + KEY_INST_ID + " INTEGER PRIMARY KEY," + KEY_INST_NAME + " TEXT)"; db.execSQL(CREATE_INSTRUMENTS_TABLE); String CREATE_ANTENNAE_TABLE = "CREATE TABLE " + TABLE_ANTENNAE + "(" + KEY_ANT_ID + " INTEGER PRIMARY KEY," + KEY_ANT_NAME + " TEXT," + KEY_ANT_CODE + " TEXT)"; db.execSQL(CREATE_ANTENNAE_TABLE); String CREATE_ALARMS_TABLE = "CREATE TABLE " + TABLE_ALARMS + "(" + KEY_ALRM_ID + " INTEGER PRIMARY KEY," + KEY_ALRM_NAME + " TEXT unique)"; db.execSQL(CREATE_ALARMS_TABLE); String CREATE_RODS_TABLE = "CREATE TABLE " + TABLE_RODS + "(" + KEY_ROD_ID + " INTEGER PRIMARY KEY," + KEY_ROD_NAME + " TEXT unique," + KEY_ROD_LENGTH + " REAL)"; db.execSQL(CREATE_RODS_TABLE); String CREATE_FIXEDPOINTS_TABLE = "CREATE TABLE " + TABLE_FIXEDPOINTS + "(" + KEY_FP_ID + " INTEGER PRIMARY KEY," + KEY_FP_GPS_NAME + " TEXT unique," + KEY_FP_HS_NAME + " TEXT," + KEY_FP_EASTING + " REAL," + KEY_FP_NORTHING + " REAL)"; db.execSQL(CREATE_FIXEDPOINTS_TABLE); String CREATE_SETTING_TABLE = "CREATE TABLE " + TABLE_SETTINGS + "(" + KEY_SET_ID + " INTEGER PRIMARY KEY," + KEY_SET_NAME + " TEXT unique," + KEY_SET_VALUE + " INTEGER)"; db.execSQL(CREATE_SETTING_TABLE); } // Upgrade database @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { // Drop older table if existed db.execSQL("DROP TABLE IF EXISTS " + TABLE_PROJECTS); db.execSQL("DROP TABLE IF EXISTS " + TABLE_IMAGES); db.execSQL("DROP TABLE IF EXISTS " + TABLE_INSTRUMENTS); db.execSQL("DROP TABLE IF EXISTS " + TABLE_ANTENNAE); db.execSQL("DROP TABLE IF EXISTS " + TABLE_ALARMS); db.execSQL("DROP TABLE IF EXISTS " + TABLE_RODS); db.execSQL("DROP TABLE IF EXISTS " + TABLE_FIXEDPOINTS); db.execSQL("DROP TABLE IF EXISTS " + TABLE_SETTINGS); // Create tables again onCreate(db); } // CRUD (create, read, update, delete) operations //Projects void addProjectEntry(ProjectEntry projectEntry) { // Open database SQLiteDatabase db = this.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(KEY_PROJ_ID, projectEntry.getId()); values.put(KEY_PROJ_NAME, projectEntry.getName()); values.put(KEY_PROJ_DATE_START, projectEntry.getStartDate()); values.put(KEY_PROJ_DATE_MOD, projectEntry.getModDate()); values.put(KEY_PROJ_CLOB, projectEntry.getJsonString()); // Insert row db.insert(TABLE_PROJECTS, null, values); // Close database db.close(); Log.i("SQL", "Project Entry added."); } // Update project public int updateProjectEntry(ProjectEntry projectEntry) { SQLiteDatabase db = this.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(KEY_PROJ_NAME, projectEntry.getName()); values.put(KEY_PROJ_DATE_MOD, projectEntry.getModDate()); values.put(KEY_PROJ_CLOB, projectEntry.getJsonString()); Log.i("SQL", "Project Entry updated."); // updating row return db.update(TABLE_PROJECTS, values, KEY_PROJ_ID + " = ?", new String[]{String.valueOf(projectEntry.getId())}); } public ProjectEntry getProjectEntry(int id) { SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.query(TABLE_PROJECTS, new String[] { KEY_PROJ_ID, KEY_PROJ_CLOB }, KEY_PROJ_ID + "=?", new String[] { String.valueOf(id) }, null, null, null, null); if (cursor != null) cursor.moveToFirst(); Gson gson = new Gson(); //ProjectEntry projectEntry = gson.fromJson(cursor.getString(1), ProjectEntry.class); // Return Project entry return gson.fromJson(cursor.getString(1), ProjectEntry.class); } // Get list of all projects public List<ProjectEntry> getAllProjectEntries() { List<ProjectEntry> projectList = new ArrayList<ProjectEntry>(); // Select all query String selectQuery = "SELECT * FROM " + TABLE_PROJECTS; SQLiteDatabase db = this.getWritableDatabase(); Cursor cursor = db.rawQuery(selectQuery, null); if (cursor.moveToFirst()) { do { ProjectEntry projectEntry = getProjectEntry(Integer.parseInt(cursor.getString(0))); // Add project to list projectList.add(projectEntry); } while (cursor.moveToNext()); } return projectList; } // Get list of all projects names public List<String> getAllProjectNames() { List<String> projectNamesList = new ArrayList<String>(); // Select all query String selectQuery = "SELECT * FROM " + TABLE_PROJECTS; SQLiteDatabase db = this.getWritableDatabase(); Cursor cursor = db.rawQuery(selectQuery, null); if (cursor.moveToFirst()) { do { ProjectEntry projectEntry = getProjectEntry(Integer.parseInt(cursor.getString(0))); // Add project to list projectNamesList.add(projectEntry.getName()); } while (cursor.moveToNext()); } return projectNamesList; } // Get list of all projects IDs public List<Integer> getAllProjectIDs() { List<Integer> projectIDsList = new ArrayList<Integer>(); // Select all query String selectQuery = "SELECT * FROM " + TABLE_PROJECTS; SQLiteDatabase db = this.getWritableDatabase(); Cursor cursor = db.rawQuery(selectQuery, null); if (cursor.moveToFirst()) { do { ProjectEntry projectEntry = getProjectEntry(Integer.parseInt(cursor.getString(0))); // Add project to list projectIDsList.add(projectEntry.getId()); } while (cursor.moveToNext()); } return projectIDsList; } // Get number of projects in db public int getProjectsCount() { String countQuery = "SELECT * FROM " + TABLE_PROJECTS; SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.rawQuery(countQuery, null); int count = cursor.getCount(); cursor.close(); // return count return count; } // Delete project public void deleteProject(ProjectEntry projectEntry) { SQLiteDatabase db = this.getWritableDatabase(); db.delete(TABLE_PROJECTS, KEY_PROJ_ID + " = ? ", new String[] {String.valueOf(projectEntry.getId())}); db.close(); } public void deleteProjectTable() { SQLiteDatabase db = this.getWritableDatabase(); db.delete(TABLE_PROJECTS, null ,null); db.close(); } //Alarms void addAlarmEntry(AlarmEntry alarmEntry) { // Open database SQLiteDatabase db = this.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(KEY_ALRM_ID, alarmEntry.getID()); // Alarm ID values.put(KEY_ALRM_NAME, alarmEntry.getName()); // Insert row db.insert(TABLE_ALARMS, null, values); // Close database db.close(); } public AlarmEntry getAlarmEntry(int id) { SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.query(TABLE_ALARMS, new String[] { KEY_ALRM_ID, KEY_ALRM_NAME}, KEY_ALRM_ID + "=?", new String[] { String.valueOf(id) }, null, null, null, null); if (cursor != null) cursor.moveToFirst(); //AlarmEntry alarmEntry = new AlarmEntry(Integer.parseInt(cursor.getString(0)), // cursor.getString(1)); // Return Alarm entry return new AlarmEntry(Integer.parseInt(cursor.getString(0)),cursor.getString(1)); } // Get list of all alarms public List<AlarmEntry> getAllAlarmEntries() { List<AlarmEntry> alarmList = new ArrayList<AlarmEntry>(); // Select all query String selectQuery = "SELECT * FROM " + TABLE_ALARMS; SQLiteDatabase db = this.getWritableDatabase(); Cursor cursor = db.rawQuery(selectQuery, null); if (cursor.moveToFirst()) { do { AlarmEntry alarmEntry = new AlarmEntry(Integer.parseInt(cursor.getString(0)), cursor.getString(1)); //alarmEntry.setID(Integer.parseInt(cursor.getString(0))); //alarmEntry.setName(cursor.getString(1)); // Add alarm to list alarmList.add(alarmEntry); } while (cursor.moveToNext()); } return alarmList; } // Get number of alarms in db public int getAlarmsCount() { String countQuery = "SELECT * FROM " + TABLE_ALARMS; SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.rawQuery(countQuery, null); int count = cursor.getCount(); cursor.close(); // return count return count; } // Update alarm public int updateAlarmEntry(AlarmEntry alarmEntry) { SQLiteDatabase db = this.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(KEY_ALRM_NAME, alarmEntry.getName()); // updating row return db.update(TABLE_ALARMS, values, KEY_ALRM_ID + " = ?", new String[]{String.valueOf(alarmEntry.getID())}); } // Delete alarm public void deleteAlarm(AlarmEntry alarmEntry) { SQLiteDatabase db = this.getWritableDatabase(); db.delete(TABLE_ALARMS, KEY_ALRM_ID + " = ? ", new String[] {String.valueOf(alarmEntry.getID())}); db.close(); } public void deleteAlarmTable() { SQLiteDatabase db = this.getWritableDatabase(); db.delete(TABLE_ALARMS, null ,null); db.close(); } // ANTENNAE //Antennae void addAntennaEntry(AntennaEntry antennaEntry) { // Open database SQLiteDatabase db = this.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(KEY_ANT_ID, antennaEntry.getID()); // Antenna ID values.put(KEY_ANT_NAME, antennaEntry.getName()); values.put(KEY_ANT_CODE, antennaEntry.getCode()); // Insert row db.insert(TABLE_ANTENNAE, null, values); // Close database db.close(); } public AntennaEntry getAntennaEntry(int id) { SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.query(TABLE_ANTENNAE, new String[] { KEY_ANT_ID, KEY_ANT_NAME, KEY_ANT_CODE}, KEY_ANT_ID + "=?", new String[] { String.valueOf(id) }, null, null, null, null); if (cursor != null) cursor.moveToFirst(); AntennaEntry antennaEntry = new AntennaEntry(Integer.parseInt(cursor.getString(0)), cursor.getString(1),cursor.getString(2)); // Return Antenna entry return antennaEntry; } // Get list of all antennas public List<AntennaEntry> getAllAntennaEntries() { List<AntennaEntry> antennaList = new ArrayList<AntennaEntry>(); // Select all query String selectQuery = "SELECT * FROM " + TABLE_ANTENNAE; SQLiteDatabase db = this.getWritableDatabase(); Cursor cursor = db.rawQuery(selectQuery, null); if (cursor.moveToFirst()) { do { AntennaEntry antennaEntry = new AntennaEntry(Integer.parseInt(cursor.getString(0)), cursor.getString(1),cursor.getString(2)); //antennaEntry.setID(Integer.parseInt(cursor.getString(0))); //antennaEntry.setName(cursor.getString(1)); // Add antenna to list antennaList.add(antennaEntry); } while (cursor.moveToNext()); } return antennaList; } // Get number of antennas in db public int getAntennaeCount() { String countQuery = "SELECT * FROM " + TABLE_ANTENNAE; SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.rawQuery(countQuery, null); int count = cursor.getCount(); cursor.close(); // return count return count; } // Update antenna public int updateAntennaEntry(AntennaEntry antennaEntry) { SQLiteDatabase db = this.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(KEY_ANT_NAME, antennaEntry.getName()); // updating row return db.update(TABLE_ANTENNAE, values, KEY_ANT_ID + " = ?", new String[]{String.valueOf(antennaEntry.getID())}); } // Delete antenna public void deleteAntenna(AntennaEntry antennaEntry) { SQLiteDatabase db = this.getWritableDatabase(); db.delete(TABLE_ANTENNAE, KEY_ANT_ID + " = ? ", new String[] {String.valueOf(antennaEntry.getID())}); db.close(); } public void deleteAntennaTable() { SQLiteDatabase db = this.getWritableDatabase(); db.delete(TABLE_ANTENNAE, null ,null); db.close(); } //INSTRUMENTS //Instruments void addInstrumentEntry(InstrumentEntry instrumentEntry) { // Open database SQLiteDatabase db = this.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(KEY_INST_ID, instrumentEntry.getID()); // Instrument ID values.put(KEY_INST_NAME, instrumentEntry.getName()); // Insert row db.insert(TABLE_INSTRUMENTS, null, values); // Close database db.close(); } public InstrumentEntry getInstrumentEntry(int id) { SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.query(TABLE_INSTRUMENTS, new String[] { KEY_INST_ID, KEY_INST_NAME}, KEY_INST_ID + "=?", new String[] { String.valueOf(id) }, null, null, null, null); if (cursor != null) cursor.moveToFirst(); InstrumentEntry instrumentEntry = new InstrumentEntry(Integer.parseInt(cursor.getString(0)), cursor.getString(1)); // Return Instrument entry return instrumentEntry; } // Get list of all instruments public List<InstrumentEntry> getAllInstrumentEntries() { List<InstrumentEntry> instrumentList = new ArrayList<InstrumentEntry>(); // Select all query String selectQuery = "SELECT * FROM " + TABLE_INSTRUMENTS; SQLiteDatabase db = this.getWritableDatabase(); Cursor cursor = db.rawQuery(selectQuery, null); if (cursor.moveToFirst()) { do { InstrumentEntry instrumentEntry = new InstrumentEntry( Integer.parseInt(cursor.getString(0)), cursor.getString(1)); //instrumentEntry.setID(Integer.parseInt(cursor.getString(0))); //instrumentEntry.setName(cursor.getString(1)); // Add instrument to list instrumentList.add(instrumentEntry); } while (cursor.moveToNext()); } return instrumentList; } // Get number of instruments in db public int getInstrumentsCount() { String countQuery = "SELECT * FROM " + TABLE_INSTRUMENTS; SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.rawQuery(countQuery, null); int count = cursor.getCount(); cursor.close(); // return count return count; } // Update instrument public int updateInstrumentEntry(InstrumentEntry instrumentEntry) { SQLiteDatabase db = this.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(KEY_INST_NAME, instrumentEntry.getName()); // updating row return db.update(TABLE_INSTRUMENTS, values, KEY_INST_ID + " = ?", new String[]{String.valueOf(instrumentEntry.getID())}); } // Delete instrument public void deleteInstrument(InstrumentEntry instrumentEntry) { SQLiteDatabase db = this.getWritableDatabase(); db.delete(TABLE_INSTRUMENTS, KEY_INST_ID + " = ? ", new String[] {String.valueOf(instrumentEntry.getID())}); db.close(); } public void deleteInstrumentTable() { SQLiteDatabase db = this.getWritableDatabase(); db.delete(TABLE_INSTRUMENTS, null ,null); db.close(); } // RODS void addRodEntry(RodEntry rodEntry) { // Open database SQLiteDatabase db = this.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(KEY_ROD_ID, rodEntry.getID()); // Rod ID values.put(KEY_ROD_NAME, rodEntry.getName()); values.put(KEY_ROD_LENGTH, rodEntry.getLength()); // Insert row db.insert(TABLE_RODS, null, values); // Close database db.close(); } public RodEntry getRodEntry(int id) { SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.query(TABLE_RODS, new String[] { KEY_ROD_ID, KEY_ROD_NAME, KEY_ROD_LENGTH}, KEY_ROD_ID + "=?", new String[] { String.valueOf(id) }, null, null, null, null); if (cursor != null) cursor.moveToFirst(); RodEntry rodEntry = new RodEntry(Integer.parseInt(cursor.getString(0)),cursor.getString(1), Double.parseDouble(cursor.getString(2))); // Return Rod entry return rodEntry; } // Get list of all rods public List<RodEntry> getAllRodEntries() { List<RodEntry> rodList = new ArrayList<RodEntry>(); // Select all query String selectQuery = "SELECT * FROM " + TABLE_RODS; SQLiteDatabase db = this.getWritableDatabase(); Cursor cursor = db.rawQuery(selectQuery, null); if (cursor.moveToFirst()) { do { RodEntry rodEntry = new RodEntry(Integer.parseInt(cursor.getString(0)), cursor.getString(1),Double.parseDouble(cursor.getString(2))); //rodEntry.setID(Integer.parseInt(cursor.getString(0))); //rodEntry.setName(cursor.getString(1)); //rodEntry.setLength(Double.parseDouble(cursor.getString(2))); // Add rod to list rodList.add(rodEntry); } while (cursor.moveToNext()); } return rodList; } // Get number of rods in db public int getRodsCount() { String countQuery = "SELECT * FROM " + TABLE_RODS; SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.rawQuery(countQuery, null); int count = cursor.getCount(); cursor.close(); // return count return count; } // Update rod public int updateRodEntry(RodEntry rodEntry) { SQLiteDatabase db = this.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(KEY_ROD_NAME, rodEntry.getName()); values.put(KEY_ROD_LENGTH, rodEntry.getLength()); // updating row return db.update(TABLE_RODS, values, KEY_ROD_ID + " = ?", new String[]{String.valueOf(rodEntry.getID())}); } // Delete rod public void deleteRod(RodEntry rodEntry) { SQLiteDatabase db = this.getWritableDatabase(); db.delete(TABLE_RODS, KEY_ROD_ID + " = ? ", new String[] {String.valueOf(rodEntry.getID())}); db.close(); } // Delete all rods public void deleteRodTable() { SQLiteDatabase db = this.getWritableDatabase(); db.delete(TABLE_RODS, null ,null); db.close(); } //Fixedpoints void addFixedpointEntry(FixedpointEntry fixedpointEntry) { // Open database SQLiteDatabase db = this.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(KEY_FP_ID, fixedpointEntry.getID()); // Fixedpoint ID values.put(KEY_FP_GPS_NAME, fixedpointEntry.getGPSName()); values.put(KEY_FP_HS_NAME, fixedpointEntry.getHSName()); values.put(KEY_FP_EASTING, fixedpointEntry.getEasting()); values.put(KEY_FP_NORTHING, fixedpointEntry.getNorthing()); // Insert row db.insert(TABLE_FIXEDPOINTS, null, values); // Close database db.close(); } public FixedpointEntry getFixedpointEntry(int id) { SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.query(TABLE_FIXEDPOINTS, new String[] { KEY_FP_ID, KEY_FP_GPS_NAME, KEY_FP_HS_NAME, KEY_FP_EASTING, KEY_FP_NORTHING}, KEY_FP_ID + "=?", new String[] { String.valueOf(id) }, null, null, null, null); if (cursor != null) cursor.moveToFirst(); FixedpointEntry fixedpointEntry = new FixedpointEntry( Integer.parseInt(cursor.getString(0)), cursor.getString(1), cursor.getString(2), Double.parseDouble(cursor.getString(3)), Double.parseDouble(cursor.getString(4))); // Return Fixedpoint entry return fixedpointEntry; } // Get list of all fixedpoints (ID and GPS name) public List<FixedpointEntry> getAllFixedpointEntries() { List<FixedpointEntry> fixedpointList = new ArrayList<FixedpointEntry>(); // Select all query String selectQuery = "SELECT * FROM " + TABLE_FIXEDPOINTS; SQLiteDatabase db = this.getWritableDatabase(); Cursor cursor = db.rawQuery(selectQuery, null); if (cursor.moveToFirst()) { do { FixedpointEntry fixedpointEntry = new FixedpointEntry( Integer.parseInt(cursor.getString(0)), cursor.getString(1),cursor.getString(2), Double.parseDouble(cursor.getString(3)), Double.parseDouble(cursor.getString(4))); //fixedpointEntry.setID(Integer.parseInt(cursor.getString(0))); //fixedpointEntry.setName(cursor.getString(1)); // Add fixedpoint to list fixedpointList.add(fixedpointEntry); } while (cursor.moveToNext()); } cursor.close(); return fixedpointList; } // Get number of fixedpoints in db public int getFixedpointsCount() { String countQuery = "SELECT * FROM " + TABLE_FIXEDPOINTS; SQLiteDatabase db = this.getReadableDatabase(); Cursor cursor = db.rawQuery(countQuery, null); int count = cursor.getCount(); cursor.close(); // return count return count; } // Update fixedpoint public int updateFixedpointEntry(FixedpointEntry fixedpointEntry) { SQLiteDatabase db = this.getWritableDatabase(); ContentValues values = new ContentValues(); values.put(KEY_FP_GPS_NAME, fixedpointEntry.getGPSName()); values.put(KEY_FP_HS_NAME, fixedpointEntry.getHSName()); values.put(KEY_FP_EASTING, fixedpointEntry.getEasting()); values.put(KEY_FP_NORTHING, fixedpointEntry.getNorthing()); // updating row return db.update(TABLE_FIXEDPOINTS, values, KEY_FP_ID + " = ?", new String[]{String.valueOf(fixedpointEntry.getID())}); } // Delete fixedpoint public void deleteFixedpoint(FixedpointEntry fixedpointEntry) { SQLiteDatabase db = this.getWritableDatabase(); db.delete(TABLE_FIXEDPOINTS, KEY_FP_ID + " = ? ", new String[] {String.valueOf(fixedpointEntry.getID())}); db.close(); } public void deleteFixedpointTable() { SQLiteDatabase db = this.getWritableDatabase(); db.delete(TABLE_FIXEDPOINTS, null ,null); db.close(); } }
/** */ package fr.unice.spinefm.ActionModel.SystemActionModel.impl; import fr.unice.spinefm.ActionModel.SystemActionModel.ActionCreateConfiguration; import fr.unice.spinefm.ActionModel.SystemActionModel.SystemActionModelPackage; import fr.unice.spinefm.ConfigurationModel.CompositeConfiguration; import fr.unice.spinefm.ProcessModel.ConfigurationProcessStep; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Action Create Configuration</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link fr.unice.spinefm.ActionModel.SystemActionModel.impl.ActionCreateConfigurationImpl#getCps <em>Cps</em>}</li> * <li>{@link fr.unice.spinefm.ActionModel.SystemActionModel.impl.ActionCreateConfigurationImpl#getCompconf <em>Compconf</em>}</li> * </ul> * </p> * * @generated */ public class ActionCreateConfigurationImpl extends SystemActionImplDelegate implements ActionCreateConfiguration { /** * The cached value of the '{@link #getCps() <em>Cps</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCps() * @generated * @ordered */ protected ConfigurationProcessStep cps; /** * The cached value of the '{@link #getCompconf() <em>Compconf</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCompconf() * @generated * @ordered */ protected CompositeConfiguration compconf; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ActionCreateConfigurationImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return SystemActionModelPackage.Literals.ACTION_CREATE_CONFIGURATION; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ConfigurationProcessStep getCps() { if (cps != null && cps.eIsProxy()) { InternalEObject oldCps = (InternalEObject)cps; cps = (ConfigurationProcessStep)eResolveProxy(oldCps); if (cps != oldCps) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, SystemActionModelPackage.ACTION_CREATE_CONFIGURATION__CPS, oldCps, cps)); } } return cps; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ConfigurationProcessStep basicGetCps() { return cps; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCps(ConfigurationProcessStep newCps) { ConfigurationProcessStep oldCps = cps; cps = newCps; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, SystemActionModelPackage.ACTION_CREATE_CONFIGURATION__CPS, oldCps, cps)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public CompositeConfiguration getCompconf() { if (compconf != null && compconf.eIsProxy()) { InternalEObject oldCompconf = (InternalEObject)compconf; compconf = (CompositeConfiguration)eResolveProxy(oldCompconf); if (compconf != oldCompconf) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, SystemActionModelPackage.ACTION_CREATE_CONFIGURATION__COMPCONF, oldCompconf, compconf)); } } return compconf; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public CompositeConfiguration basicGetCompconf() { return compconf; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCompconf(CompositeConfiguration newCompconf) { CompositeConfiguration oldCompconf = compconf; compconf = newCompconf; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, SystemActionModelPackage.ACTION_CREATE_CONFIGURATION__COMPCONF, oldCompconf, compconf)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case SystemActionModelPackage.ACTION_CREATE_CONFIGURATION__CPS: if (resolve) return getCps(); return basicGetCps(); case SystemActionModelPackage.ACTION_CREATE_CONFIGURATION__COMPCONF: if (resolve) return getCompconf(); return basicGetCompconf(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case SystemActionModelPackage.ACTION_CREATE_CONFIGURATION__CPS: setCps((ConfigurationProcessStep)newValue); return; case SystemActionModelPackage.ACTION_CREATE_CONFIGURATION__COMPCONF: setCompconf((CompositeConfiguration)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case SystemActionModelPackage.ACTION_CREATE_CONFIGURATION__CPS: setCps((ConfigurationProcessStep)null); return; case SystemActionModelPackage.ACTION_CREATE_CONFIGURATION__COMPCONF: setCompconf((CompositeConfiguration)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case SystemActionModelPackage.ACTION_CREATE_CONFIGURATION__CPS: return cps != null; case SystemActionModelPackage.ACTION_CREATE_CONFIGURATION__COMPCONF: return compconf != null; } return super.eIsSet(featureID); } } //ActionCreateConfigurationImpl
package org.wiztools.restclient; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.net.URL; import java.nio.charset.Charset; import java.nio.charset.IllegalCharsetNameException; import java.nio.charset.UnsupportedCharsetException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpEntityEnclosingRequest; import org.apache.http.HttpException; import org.apache.http.HttpHost; import org.apache.http.HttpRequest; import org.apache.http.HttpRequestInterceptor; import org.apache.http.HttpResponse; import org.apache.http.ProtocolVersion; import org.apache.http.auth.AuthScheme; import org.apache.http.auth.AuthScope; import org.apache.http.auth.AuthState; import org.apache.http.auth.Credentials; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpHead; import org.apache.http.client.methods.HttpOptions; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpTrace; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.client.params.AuthPolicy; import org.apache.http.client.params.ClientPNames; import org.apache.http.client.protocol.ClientContext; import org.apache.http.conn.ClientConnectionManager; import org.apache.http.conn.params.ConnRoutePNames; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.conn.ssl.X509HostnameVerifier; import org.apache.http.entity.AbstractHttpEntity; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.impl.auth.BasicScheme; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.client.DefaultHttpRequestRetryHandler; import org.apache.http.message.AbstractHttpMessage; import org.apache.http.message.BasicHeader; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.CoreProtocolPNames; import org.apache.http.params.HttpConnectionParams; import org.apache.http.protocol.BasicHttpContext; import org.apache.http.protocol.ExecutionContext; import org.apache.http.protocol.HttpContext; import org.wiztools.commons.Implementation; import org.wiztools.commons.MultiValueMap; import org.wiztools.commons.StreamUtil; import org.wiztools.commons.StringUtil; /** * * @author subwiz */ public class HTTPClientRequestExecuter implements RequestExecuter { private static final Logger LOG = Logger.getLogger(HTTPClientRequestExecuter.class.getName()); private DefaultHttpClient httpclient; private boolean interruptedShutdown = false; private boolean isRequestCompleted = false; /* * This instance variable is for avoiding multiple execution of requests * on the same RequestExecuter object. We know it is not the perfect solution * (as it does not synchronize access to shared variable), but is * fine for finding this type of error during development phase. */ private boolean isRequestStarted = false; @Override public void execute(Request request, View... views) { // Verify if this is the first call to this object: if(isRequestStarted){ throw new MultipleRequestInSameRequestExecuterException( "A RequestExecuter object can be used only once!"); } isRequestStarted = true; // Proceed with execution: for(View view: views){ view.doStart(request); } final URL url = request.getUrl(); final String urlHost = url.getHost(); final int urlPort = url.getPort()==-1?url.getDefaultPort():url.getPort(); final String urlProtocol = url.getProtocol(); final String urlStr = url.toString(); // Needed for specifying HTTP pre-emptive authentication HttpContext httpContext = null; httpclient = new DefaultHttpClient(); // Set HTTP version HTTPVersion httpVersion = request.getHttpVersion(); ProtocolVersion protocolVersion = httpVersion==HTTPVersion.HTTP_1_1? new ProtocolVersion("HTTP", 1, 1): new ProtocolVersion("HTTP", 1, 0); httpclient.getParams().setParameter(CoreProtocolPNames.PROTOCOL_VERSION, protocolVersion); // Set request timeout (default 1 minute--60000 milliseconds) IGlobalOptions options = Implementation.of(IGlobalOptions.class); options.acquire(); HttpConnectionParams.setConnectionTimeout(httpclient.getParams(), Integer.parseInt(options.getProperty("request-timeout-in-millis"))); options.release(); // Set proxy ProxyConfig proxy = ProxyConfig.getInstance(); proxy.acquire(); if (proxy.isEnabled()) { final HttpHost proxyHost = new HttpHost(proxy.getHost(), proxy.getPort(), "http"); if (proxy.isAuthEnabled()) { httpclient.getCredentialsProvider().setCredentials( new AuthScope(proxy.getHost(), proxy.getPort()), new UsernamePasswordCredentials(proxy.getUsername(), new String(proxy.getPassword()))); } httpclient.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, proxyHost); } proxy.release(); // HTTP Authentication boolean authEnabled = request.getAuthMethods().size() > 0 ? true : false; if (authEnabled) { String uid = request.getAuthUsername(); String pwd = new String(request.getAuthPassword()); String host = StringUtil.isStrEmpty(request.getAuthHost()) ? urlHost : request.getAuthHost(); String realm = StringUtil.isStrEmpty(request.getAuthRealm()) ? AuthScope.ANY_REALM : request.getAuthRealm(); // Type of authentication List<String> authPrefs = new ArrayList<String>(2); List<HTTPAuthMethod> authMethods = request.getAuthMethods(); for(HTTPAuthMethod authMethod: authMethods){ switch(authMethod){ case BASIC: authPrefs.add(AuthPolicy.BASIC); break; case DIGEST: authPrefs.add(AuthPolicy.DIGEST); break; } } httpclient.getParams().setParameter("http.auth.scheme-pref", authPrefs); httpclient.getCredentialsProvider().setCredentials( new AuthScope(host, urlPort, realm), new UsernamePasswordCredentials(uid, pwd)); // preemptive mode // http://svn.apache.org/repos/asf/httpcomponents/httpclient/trunk/module-client/src/examples/org/apache/http/examples/client/ClientPreemptiveBasicAuthentication.java if (request.isAuthPreemptive()) { BasicHttpContext localcontext = new BasicHttpContext(); BasicScheme basicAuth = new BasicScheme(); localcontext.setAttribute("preemptive-auth", basicAuth); httpclient.addRequestInterceptor(new PreemptiveAuth(), 0); httpContext = localcontext; } } AbstractHttpMessage method = null; final HTTPMethod httpMethod = request.getMethod(); try { switch(httpMethod){ case GET: method = new HttpGet(urlStr); break; case POST: method = new HttpPost(urlStr); break; case PUT: method = new HttpPut(urlStr); break; case DELETE: method = new HttpDelete(urlStr); break; case HEAD: method = new HttpHead(urlStr); break; case OPTIONS: method = new HttpOptions(urlStr); break; case TRACE: method = new HttpTrace(urlStr); break; } method.setParams(new BasicHttpParams().setParameter(urlStr, url)); // Get request headers MultiValueMap<String, String> header_data = request.getHeaders(); for (String key : header_data.keySet()) { for(String value: header_data.get(key)) { Header header = new BasicHeader(key, value); method.addHeader(header); } } // POST/PUT method specific logic if (method instanceof HttpEntityEnclosingRequest) { HttpEntityEnclosingRequest eeMethod = (HttpEntityEnclosingRequest) method; // Create and set RequestEntity ReqEntity bean = request.getBody(); if (bean != null) { try { AbstractHttpEntity entity = new ByteArrayEntity(bean.getBody().getBytes(bean.getCharSet())); entity.setContentType(bean.getContentTypeCharsetFormatted()); eeMethod.setEntity(entity); } catch (UnsupportedEncodingException ex) { for(View view: views){ view.doError(Util.getStackTrace(ex)); view.doEnd(); } return; } } } // SSL // Set the hostname verifier: SSLHostnameVerifier verifier = request.getSslHostNameVerifier(); final X509HostnameVerifier hcVerifier; switch(verifier){ case STRICT: hcVerifier = SSLSocketFactory.STRICT_HOSTNAME_VERIFIER; break; case BROWSER_COMPATIBLE: hcVerifier = SSLSocketFactory.BROWSER_COMPATIBLE_HOSTNAME_VERIFIER; break; case ALLOW_ALL: hcVerifier = SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER; break; default: hcVerifier = SSLSocketFactory.STRICT_HOSTNAME_VERIFIER; break; } // Register the SSL Scheme: if(urlProtocol.equalsIgnoreCase("https")){ final String trustStorePath = request.getSslTrustStore(); final KeyStore trustStore = StringUtil.isStrEmpty(trustStorePath)? null: getTrustStore(trustStorePath, request.getSslTrustStorePassword()); SSLSocketFactory socketFactory = new SSLSocketFactory( "TLS", // Algorithm null, // Keystore null, // Keystore password trustStore, null, // Secure Random hcVerifier); Scheme sch = new Scheme(urlProtocol, urlPort, socketFactory); httpclient.getConnectionManager().getSchemeRegistry().register(sch); } // How to handle retries and redirects: httpclient.setHttpRequestRetryHandler(new DefaultHttpRequestRetryHandler()); httpclient.getParams().setParameter(ClientPNames.HANDLE_REDIRECTS, request.isFollowRedirect()); // Now Execute: long startTime = System.currentTimeMillis(); HttpResponse http_res = httpclient.execute((HttpUriRequest) method, httpContext); long endTime = System.currentTimeMillis(); ResponseBean response = new ResponseBean(); response.setExecutionTime(endTime - startTime); response.setStatusCode(http_res.getStatusLine().getStatusCode()); response.setStatusLine(http_res.getStatusLine().toString()); final Header[] responseHeaders = http_res.getAllHeaders(); String contentType = null; for (Header header : responseHeaders) { response.addHeader(header.getName(), header.getValue()); if(header.getName().equalsIgnoreCase("content-type")) { contentType = header.getValue(); } } // find out the charset: final Charset charset; { Charset c; if(contentType != null) { final String charsetStr = Util.getCharsetFromContentType(contentType); try{ c = Charset.forName(charsetStr); } catch(IllegalCharsetNameException ex) { LOG.log(Level.WARNING, "Charset name is illegal: {0}", charsetStr); c = Charset.defaultCharset(); } catch(UnsupportedCharsetException ex) { LOG.log(Level.WARNING, "Charset {0} is not supported in this JVM.", charsetStr); c = Charset.defaultCharset(); } catch(IllegalArgumentException ex) { LOG.log(Level.WARNING, "Charset parameter is not available in Content-Type header!"); c = Charset.defaultCharset(); } } else { c = Charset.defaultCharset(); LOG.log(Level.WARNING, "Content-Type header not available in response. Using platform default encoding: {0}", c.name()); } charset = c; } final HttpEntity entity = http_res.getEntity(); if(entity != null){ InputStream is = entity.getContent(); try{ String responseBody = StreamUtil.inputStream2String(is, charset); if (responseBody != null) { response.setResponseBody(responseBody); } } catch(IOException ex) { final String msg = "Response body conversion to string using " + charset.displayName() + " encoding failed. Response body not set!"; for(View view: views) { view.doError(msg); } LOG.log(Level.WARNING, msg); } } // Now execute tests: try { junit.framework.TestSuite suite = TestUtil.getTestSuite(request, response); if (suite != null) { // suite will be null if there is no associated script TestResult testResult = TestUtil.execute(suite); response.setTestResult(testResult); } } catch (TestException ex) { for(View view: views){ view.doError(Util.getStackTrace(ex)); } } for(View view: views){ view.doResponse(response); } } catch (IOException ex) { if(!interruptedShutdown){ for(View view: views){ view.doError(Util.getStackTrace(ex)); } } else{ for(View view: views){ view.doCancelled(); } } } catch (Exception ex) { if(!interruptedShutdown){ for(View view: views){ view.doError(Util.getStackTrace(ex)); } } else{ for(View view: views){ view.doCancelled(); } } } finally { if (method != null && !interruptedShutdown) { httpclient.getConnectionManager().shutdown(); } for(View view: views){ view.doEnd(); } isRequestCompleted = true; } } private KeyStore getTrustStore(String trustStorePath, char[] trustStorePassword) throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException { KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); if(!StringUtil.isStrEmpty(trustStorePath)) { FileInputStream instream = new FileInputStream(new File(trustStorePath)); try{ trustStore.load(instream, trustStorePassword); } finally{ instream.close(); } } return trustStore; } @Override public void abortExecution(){ if(!isRequestCompleted){ ClientConnectionManager conMgr = httpclient.getConnectionManager(); interruptedShutdown = true; conMgr.shutdown(); } else{ LOG.info("Request already completed. Doing nothing."); } } private static final class PreemptiveAuth implements HttpRequestInterceptor { @Override public void process( final HttpRequest request, final HttpContext context) throws HttpException, IOException { AuthState authState = (AuthState) context.getAttribute( ClientContext.TARGET_AUTH_STATE); // If no auth scheme avaialble yet, try to initialize it preemptively if (authState.getAuthScheme() == null) { AuthScheme authScheme = (AuthScheme) context.getAttribute( "preemptive-auth"); CredentialsProvider credsProvider = (CredentialsProvider) context.getAttribute( ClientContext.CREDS_PROVIDER); HttpHost targetHost = (HttpHost) context.getAttribute( ExecutionContext.HTTP_TARGET_HOST); if (authScheme != null) { Credentials creds = credsProvider.getCredentials( new AuthScope( targetHost.getHostName(), targetHost.getPort())); if (creds == null) { throw new HttpException("No credentials for preemptive authentication"); } authState.setAuthScheme(authScheme); authState.setCredentials(creds); } } // if ends } // process() method ends } // Inner class ends }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.jfoenix.controls; import com.jfoenix.assets.JFoenixResources; import com.jfoenix.skins.JFXCheckBoxSkin; import com.sun.javafx.css.converters.BooleanConverter; import com.sun.javafx.css.converters.PaintConverter; import javafx.css.*; import javafx.scene.control.CheckBox; import javafx.scene.control.Skin; import javafx.scene.paint.Color; import javafx.scene.paint.Paint; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * JFXCheckBox is the material design implementation of a checkbox. * it shows ripple effect and a custom selection animation. * * @author Shadi Shaheen * @version 1.0 * @since 2016-03-09 */ public class JFXCheckBox extends CheckBox { /** * {@inheritDoc} */ public JFXCheckBox(String text) { super(text); initialize(); } /** * {@inheritDoc} */ public JFXCheckBox() { initialize(); // init in scene builder workaround ( TODO : remove when JFoenix is well integrated in scenebuilder by gluon ) StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); for (int i = 0; i < stackTraceElements.length && i < 15; i++) { if (stackTraceElements[i].getClassName().toLowerCase().contains(".scenebuilder.kit.fxom.")) { this.setText("CheckBox"); break; } } } private void initialize() { this.getStyleClass().add(DEFAULT_STYLE_CLASS); } /** * {@inheritDoc} */ @Override protected Skin<?> createDefaultSkin() { return new JFXCheckBoxSkin(this); } /** * {@inheritDoc} */ @Override public String getUserAgentStylesheet() { return USER_AGENT_STYLESHEET; } /*************************************************************************** * * * Stylesheet Handling * * * **************************************************************************/ /** * Initialize the style class to 'jfx-check-box'. * <p> * This is the selector class from which CSS can be used to style * this control. */ private static final String DEFAULT_STYLE_CLASS = "jfx-check-box"; private static final String USER_AGENT_STYLESHEET = JFoenixResources.load("css/controls/jfx-check-box.css").toExternalForm(); /** * checkbox color property when selected */ private StyleableObjectProperty<Paint> checkedColor = new SimpleStyleableObjectProperty<>(StyleableProperties.CHECKED_COLOR, JFXCheckBox.this, "checkedColor", Color.valueOf( "#0F9D58")); public Paint getCheckedColor() { return checkedColor == null ? Color.valueOf("#0F9D58") : checkedColor.get(); } public StyleableObjectProperty<Paint> checkedColorProperty() { return this.checkedColor; } public void setCheckedColor(Paint color) { this.checkedColor.set(color); } /** * checkbox color property when not selected */ private StyleableObjectProperty<Paint> unCheckedColor = new SimpleStyleableObjectProperty<>(StyleableProperties.UNCHECKED_COLOR, JFXCheckBox.this, "unCheckedColor", Color.valueOf( "#5A5A5A")); public Paint getUnCheckedColor() { return unCheckedColor == null ? Color.valueOf("#5A5A5A") : unCheckedColor.get(); } public StyleableObjectProperty<Paint> unCheckedColorProperty() { return this.unCheckedColor; } public void setUnCheckedColor(Paint color) { this.unCheckedColor.set(color); } /** * Disable the visual indicator for focus. */ private StyleableBooleanProperty disableVisualFocus = new SimpleStyleableBooleanProperty(StyleableProperties.DISABLE_VISUAL_FOCUS, JFXCheckBox.this, "disableVisualFocus", false); /** * Setting this property disables this {@link JFXCheckBox} from showing keyboard focus. * @return A property that will disable visual focus if true and enable it if false. */ public final StyleableBooleanProperty disableVisualFocusProperty() { return this.disableVisualFocus; } /** * Indicates whether or not this {@link JFXCheckBox} will show focus when it receives keyboard focus. * @return False if this {@link JFXCheckBox} will show visual focus and true if it will not. */ public final Boolean isDisableVisualFocus() { return disableVisualFocus != null && this.disableVisualFocusProperty().get(); } /** * Setting this to true will disable this {@link JFXCheckBox} from showing focus when it receives keyboard focus. * @param disabled True to disable visual focus and false to enable it. */ public final void setDisableVisualFocus(final Boolean disabled) { this.disableVisualFocusProperty().set(disabled); } private static class StyleableProperties { private static final CssMetaData<JFXCheckBox, Paint> CHECKED_COLOR = new CssMetaData<JFXCheckBox, Paint>("-jfx-checked-color", PaintConverter.getInstance(), Color.valueOf("#0F9D58")) { @Override public boolean isSettable(JFXCheckBox control) { return control.checkedColor == null || !control.checkedColor.isBound(); } @Override public StyleableProperty<Paint> getStyleableProperty(JFXCheckBox control) { return control.checkedColorProperty(); } }; private static final CssMetaData<JFXCheckBox, Paint> UNCHECKED_COLOR = new CssMetaData<JFXCheckBox, Paint>("-jfx-unchecked-color", PaintConverter.getInstance(), Color.valueOf("#5A5A5A")) { @Override public boolean isSettable(JFXCheckBox control) { return control.unCheckedColor == null || !control.unCheckedColor.isBound(); } @Override public StyleableProperty<Paint> getStyleableProperty(JFXCheckBox control) { return control.unCheckedColorProperty(); } }; private static final CssMetaData<JFXCheckBox, Boolean> DISABLE_VISUAL_FOCUS = new CssMetaData<JFXCheckBox, Boolean>("-jfx-disable-visual-focus", BooleanConverter.getInstance(), false) { @Override public boolean isSettable(JFXCheckBox control) { return control.disableVisualFocus == null || !control.disableVisualFocus.isBound(); } @Override public StyleableBooleanProperty getStyleableProperty(JFXCheckBox control) { return control.disableVisualFocusProperty(); } }; private static final List<CssMetaData<? extends Styleable, ?>> CHILD_STYLEABLES; static { final List<CssMetaData<? extends Styleable, ?>> styleables = new ArrayList<>(CheckBox.getClassCssMetaData()); Collections.addAll(styleables, CHECKED_COLOR, UNCHECKED_COLOR, DISABLE_VISUAL_FOCUS ); CHILD_STYLEABLES = Collections.unmodifiableList(styleables); } } @Override public List<CssMetaData<? extends Styleable, ?>> getControlCssMetaData() { return getClassCssMetaData(); } public static List<CssMetaData<? extends Styleable, ?>> getClassCssMetaData() { return StyleableProperties.CHILD_STYLEABLES; } }
package org.wikipedia.readinglist; import android.content.DialogInterface; import android.os.Bundle; import android.support.v7.app.AlertDialog; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import org.wikipedia.R; import org.wikipedia.WikipediaApp; import org.wikipedia.analytics.ReadingListsFunnel; import org.wikipedia.model.EnumCode; import org.wikipedia.model.EnumCodeMap; import org.wikipedia.page.ExtendedBottomSheetDialogFragment; import org.wikipedia.page.PageActivity; import org.wikipedia.page.PageTitle; import org.wikipedia.util.FeedbackUtil; import java.util.ArrayList; import java.util.List; public class AddToReadingListDialog extends ExtendedBottomSheetDialogFragment { public enum InvokeSource implements EnumCode { BOOKMARK_BUTTON(0), CONTEXT_MENU(1), LINK_PREVIEW_MENU(2); private static final EnumCodeMap<InvokeSource> MAP = new EnumCodeMap<>(InvokeSource.class); private final int code; public static InvokeSource of(int code) { return MAP.get(code); } @Override public int code() { return code; } InvokeSource(int code) { this.code = code; } } private PageTitle pageTitle; private ReadingListAdapter adapter; private View listsContainer; private View onboardingContainer; private View onboardingButton; private InvokeSource invokeSource; private boolean isOnboarding; private CreateButtonClickListener createClickListener = new CreateButtonClickListener(); private List<ReadingList> readingLists = new ArrayList<>(); private DialogInterface.OnDismissListener dismissListener; public static AddToReadingListDialog newInstance(PageTitle title, InvokeSource source) { AddToReadingListDialog dialog = new AddToReadingListDialog(); Bundle args = new Bundle(); args.putParcelable("title", title); args.putInt("source", source.code()); dialog.setArguments(args); return dialog; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); pageTitle = getArguments().getParcelable("title"); invokeSource = InvokeSource.of(getArguments().getInt("source")); adapter = new ReadingListAdapter(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.dialog_add_to_reading_list, container); listsContainer = rootView.findViewById(R.id.lists_container); onboardingContainer = rootView.findViewById(R.id.onboarding_container); onboardingButton = rootView.findViewById(R.id.onboarding_button); checkAndShowOnboarding(); RecyclerView readingListView = (RecyclerView) rootView.findViewById(R.id.list_of_lists); readingListView.setLayoutManager(new LinearLayoutManager(getActivity())); readingListView.setAdapter(adapter); View createButton = rootView.findViewById(R.id.create_button); createButton.setOnClickListener(createClickListener); View closeButton = rootView.findViewById(R.id.close_button); FeedbackUtil.setToolbarButtonLongPressToast(closeButton); closeButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { dismiss(); } }); updateLists(); return rootView; } @Override public void dismiss() { super.dismiss(); if (dismissListener != null) { dismissListener.onDismiss(null); } } public void setOnDismissListener(DialogInterface.OnDismissListener listener) { dismissListener = listener; } private void checkAndShowOnboarding() { isOnboarding = WikipediaApp.getInstance().getOnboardingStateMachine().isReadingListTutorialEnabled(); onboardingButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { onboardingContainer.setVisibility(View.GONE); listsContainer.setVisibility(View.VISIBLE); WikipediaApp.getInstance().getOnboardingStateMachine().setReadingListTutorial(); } }); listsContainer.setVisibility(isOnboarding ? View.GONE : View.VISIBLE); onboardingContainer.setVisibility(isOnboarding ? View.VISIBLE : View.GONE); } private void updateLists() { readingLists = ReadingList.DAO.queryMruLists(); adapter.notifyDataSetChanged(); } private class CreateButtonClickListener implements View.OnClickListener { @Override public void onClick(View v) { final ReadingList readingList = new ReadingList(); readingList.setTitle(getString(R.string.reading_list_name_sample)); AlertDialog dialog = ReadingListDialogs.createEditDialog(getContext(), readingList, false, new Runnable() { @Override public void run() { addAndDismiss(readingList); } }, null); dialog.show(); } } private void addAndDismiss(ReadingList readingList) { if (ReadingList.DAO.listContainsTitle(readingList, pageTitle)) { ((PageActivity) getActivity()) .showReadingListAddedSnackbar(getString(R.string.reading_list_already_exists), isOnboarding); } else { ((PageActivity) getActivity()) .showReadingListAddedSnackbar(TextUtils.isEmpty(readingList.getTitle()) ? getString(R.string.reading_list_added_to_unnamed) : String.format(getString(R.string.reading_list_added_to_named), readingList.getTitle()), isOnboarding); new ReadingListsFunnel(pageTitle.getSite()).logAddToList(readingList, readingLists.size(), invokeSource); ReadingList.DAO.addTitleToList(readingList, pageTitle); } ReadingList.DAO.makeListMostRecent(readingList); dismiss(); } private class ReadingListItemHolder extends RecyclerView.ViewHolder implements View.OnClickListener { private ReadingListItemView itemView; private ReadingList readingList; ReadingListItemHolder(ReadingListItemView itemView) { super(itemView); this.itemView = itemView; itemView.setOnClickListener(this); } public void bindItem(ReadingList readingList) { this.readingList = readingList; itemView.setReadingList(readingList); } @Override public void onClick(View v) { addAndDismiss(readingList); } } private final class ReadingListAdapter extends RecyclerView.Adapter<ReadingListItemHolder> { @Override public int getItemCount() { return readingLists.size(); } @Override public ReadingListItemHolder onCreateViewHolder(ViewGroup parent, int pos) { ReadingListItemView view = new ReadingListItemView(getContext()); return new ReadingListItemHolder(view); } @Override public void onBindViewHolder(ReadingListItemHolder holder, int pos) { holder.bindItem(readingLists.get(pos)); } } }
package com.qburst.android.twitter; import java.io.IOException; import java.net.MalformedURLException; import oauth.signpost.commonshttp.CommonsHttpOAuthConsumer; import oauth.signpost.commonshttp.CommonsHttpOAuthProvider; import twitter4j.TwitterException; import twitter4j.TwitterFactory; import twitter4j.auth.AccessToken; import android.Manifest; import android.app.Activity; import android.app.AlertDialog; import android.app.ProgressDialog; import android.content.Context; import android.content.DialogInterface; import android.content.pm.PackageManager; import android.media.AudioManager; import android.os.Bundle; import android.os.Handler; import android.util.Log; import android.view.Window; import android.webkit.CookieSyncManager; import com.qburst.android.facebook.Util; import com.qburst.share.R; public class Twitter { public static final String TAG = "twitter"; private Activity _activity; // public static final String CALLBACK_URI = "twitter://callback"; public static final String CANCEL_URI = "twitter://cancel"; public static final String ACCESS_TOKEN = "access_token"; public static final String SECRET_TOKEN = "secret_token"; public static final String REQUEST = "request"; public static final String AUTHORIZE = "authorize"; protected static String REQUEST_ENDPOINT = "https://api.twitter.com/1"; protected static String OAUTH_REQUEST_TOKEN = "https://api.twitter.com/oauth/request_token"; protected static String OAUTH_ACCESS_TOKEN = "https://api.twitter.com/oauth/access_token"; protected static String OAUTH_AUTHORIZE = "https://api.twitter.com/oauth/authorize"; private String mAccessToken = null; private String mSecretToken = null; private ProgressDialog _progressDialog; private int mIcon; private CommonsHttpOAuthConsumer mHttpOauthConsumer; private CommonsHttpOAuthProvider mHttpOauthProvider; public Twitter(int icon) { mIcon = icon; } public void authorize(final Context ctx, Handler handler, String consumerKey, String consumerSecret, final TwitterDialogListener listener) { mHttpOauthConsumer = new CommonsHttpOAuthConsumer( consumerKey, consumerSecret); mHttpOauthProvider = new CommonsHttpOAuthProvider( OAUTH_REQUEST_TOKEN, OAUTH_ACCESS_TOKEN, OAUTH_AUTHORIZE); CookieSyncManager.createInstance(ctx); dialog(ctx, handler, new TwitterDialogListener() { @Override public void onComplete(Bundle values) { CookieSyncManager.getInstance().sync(); setAccessToken(values.getString(ACCESS_TOKEN)); setSecretToken(values.getString(SECRET_TOKEN)); if (isSessionValid()) { Log.d(TAG, "token "+getAccessToken()+" "+getSecretToken()); listener.onComplete(values); } else { onTwitterError(new TwitterError(ctx.getString(R.string.Oauth_failed_received))); } } @Override public void onTwitterError(TwitterError e) { Log.d(TAG, "Login failed: "+e); listener.onTwitterError(e); } @Override public void onError(DialogError e) { Log.d(TAG, "Login failed: "+e); listener.onError(e); } @Override public void onCancel() { Log.d(TAG, "Login cancelled"); listener.onCancel(); } }); } public String logout(Context context) throws MalformedURLException, IOException { Util.clearCookies(context); Bundle b = new Bundle(); b.putString("method", "auth.expireSession"); //String response = request(b); setAccessToken(null); //setAccessExpires(0); return null; // return "true"; } public void dialog(final Context ctx, Handler handler, final TwitterDialogListener listener) { if (ctx.checkCallingOrSelfPermission(Manifest.permission.INTERNET) != PackageManager.PERMISSION_GRANTED) { Util.showAlert(ctx, "Error", ctx.getString(R.string.App_requires_internet)); return; } new TwDialog(ctx, mHttpOauthProvider, mHttpOauthConsumer, listener, mIcon).show(); } public boolean isSessionValid() { return getAccessToken() != null && getSecretToken() != null; } public String getAccessToken() { return mAccessToken; } public void setAccessToken(String accessToken) { mAccessToken = accessToken; } public String getSecretToken() { return mSecretToken; } public void setSecretToken(String secretToken) { mSecretToken = secretToken; } public static interface TwitterDialogListener { public void onComplete(Bundle values); public void onTwitterError(TwitterError e); public void onError(DialogError e); public void onCancel(); } public void tweetToTwitter(String message, Activity activity){ _activity=activity; AccessToken accessToken = new AccessToken(getAccessToken(), getSecretToken()); twitter4j.Twitter twitter = new TwitterFactory().getInstance(); twitter.setOAuthConsumer(mHttpOauthConsumer.getConsumerKey(), mHttpOauthConsumer.getConsumerSecret()); twitter.setOAuthAccessToken(accessToken); try { showProgressDialog(); twitter.updateStatus(message); dismissProgressDialog() ; showDialogOkWithGoBack("",activity.getString(R.string.Twitter_tweet_success),activity); } catch (TwitterException e) { // TODO Auto-generated catch block e.printStackTrace(); Log.e("twitter error", e.toString()); showDialogOkWithGoBack("",activity.getString(R.string.Twitter_tweet_failed),activity); } } private void showProgressDialog() { _progressDialog = new ProgressDialog(_activity); _progressDialog.setMessage(_activity.getString(R.string.wait)); _progressDialog.setCancelable(false); _progressDialog.requestWindowFeature(Window.FEATURE_NO_TITLE); _progressDialog.show(); } private void dismissProgressDialog() { // TODO Auto-generated method stub if (_progressDialog != null) { try { _progressDialog.dismiss(); } catch (Exception e) { Log.e(e.getClass().getName()+": dismissProgressDialog", e.getMessage(), e); } } } public static void showDialogOkWithGoBack(String title, String message, final Activity activity) { if(activity.getApplicationContext()!=null) { AlertDialog.Builder adb = new AlertDialog.Builder(activity); adb.setTitle(title); adb.setMessage(message); adb.setCancelable(false); adb.setNeutralButton("OK", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); activity.onBackPressed(); } }); AlertDialog ad = adb.create(); ad.setVolumeControlStream(AudioManager.STREAM_MUSIC); ad.show(); } } }
/*L * Copyright Oracle inc, SAIC-F * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cadsr-util/LICENSE.txt for details. */ package gov.nih.nci.ncicb.cadsr.common.persistence.dao.jdbc; import gov.nih.nci.ncicb.cadsr.common.dto.DataElementTransferObject; import gov.nih.nci.ncicb.cadsr.common.dto.ValueDomainTransferObject; import gov.nih.nci.ncicb.cadsr.common.exception.DMLException; import gov.nih.nci.ncicb.cadsr.common.persistence.dao.CDECartDAO; import gov.nih.nci.ncicb.cadsr.common.persistence.dao.ValueDomainDAO; import gov.nih.nci.ncicb.cadsr.common.resource.DataElement; import gov.nih.nci.ncicb.cadsr.common.resource.ValueDomain; import gov.nih.nci.ncicb.cadsr.common.servicelocator.ServiceLocator; import gov.nih.nci.ncicb.cadsr.common.servicelocator.SimpleServiceLocator; import gov.nih.nci.ncicb.cadsr.objectCart.CDECart; import gov.nih.nci.ncicb.cadsr.objectCart.CDECartItem; import gov.nih.nci.ncicb.cadsr.objectCart.CDECartItemTransferObject; import gov.nih.nci.ncicb.cadsr.objectCart.impl.CDECartImpl; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import javax.sql.DataSource; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.jdbc.core.SqlParameter; import org.springframework.jdbc.object.MappingSqlQuery; import org.springframework.jdbc.object.SqlUpdate; //import gov.nih.nci.ncicb.cadsr.common.dto.CDECartTransferObject; public class JDBCCDECartDAO extends JDBCBaseDAO implements CDECartDAO { private DataElementsInCartQuery deCartQuery; private FormsInCartQuery frmCartQuery; private ValueDomainDAO vdDAO; private DeleteCartItem deleteItemQuery; private InsertCartItem insertItemQuery; public JDBCCDECartDAO(ServiceLocator locator) { super(locator); deCartQuery = new DataElementsInCartQuery(this.getDataSource()); frmCartQuery = new FormsInCartQuery(this.getDataSource()); vdDAO = this.getDAOFactory().getValueDomainDAO(); deleteItemQuery = new DeleteCartItem (this.getDataSource()); insertItemQuery = new InsertCartItem (this.getDataSource()); } public CDECart findCDECart(String username) { //CDECart cart = new CDECartTransferObject(); CDECart cart = new CDECartImpl(); List deList = deCartQuery.execute(username.toUpperCase()); cart.setDataElements(deList); //List formList = frmCartQuery.execute(username); //cart.setForms(formList); return cart; } public int insertCartItem(CDECartItem item) { int res = 0; try { String ccmIdseq = generateGUID(); res = insertItemQuery.createItem(item, ccmIdseq); } catch (DataIntegrityViolationException dex) { log.info("Unique constraint voilated in creating cart item", dex); } return res; } public int deleteCartItem(String itemId, String username) { int res = deleteItemQuery.deleteItem(itemId,username.toUpperCase()); if (res != 1) { DMLException dmlExp = new DMLException("Did not succeed in deleting the " + " cde_cart_items table."); dmlExp.setErrorCode(ERROR_DELETING_CART_ITEM); throw dmlExp; } return 1; } /* public static void main(String[] args) { ServiceLocator locator = new SimpleServiceLocator(); JDBCCDECartDAO cTest = new JDBCCDECartDAO(locator); try { CDECartItem newItem = new CDECartItemTransferObject(); int res = cTest.deleteCartItem("D5378537-60EA-2B2C-E034-0003BA0B1A09","SBREXT"); System.out.println("\n*****Create Item Result 1: " + res); } catch (DMLException de) { System.out.println("******Printing DMLException*******"); de.printStackTrace(); System.out.println("******Finishing printing DMLException*******"); } try { int res = formTest.deleteForm("D4700045-2FD0-0DAA-E034-0003BA0B1A09"); System.out.println("\n*****Delete Form Result 1: " + res); } catch (DMLException de) { System.out.println("******Printing DMLException*******"); de.printStackTrace(); System.out.println("******Finishing printing DMLException*******"); } }*/ /** * Inner class */ class DataElementsInCartQuery extends MappingSqlQuery { DataElementsInCartQuery(DataSource ds) { super(ds,"SELECT * FROM FB_CART_DE_VIEW where UA_NAME = ? "); declareParameter(new SqlParameter("ua_name", Types.VARCHAR)); } protected Object mapRow( ResultSet rs, int rownum) throws SQLException { CDECartItem item = new CDECartItemTransferObject(); DataElement de = new DataElementTransferObject(); //Getting Value Domain Information ValueDomain vd = new ValueDomainTransferObject(); Collection vdColl = new ArrayList(1); vdColl.add(rs.getString(13)); Map hm = vdDAO.getValidValues(vdColl); List values = (List)hm.get(rs.getString(13)) ; vd.setValidValues(values); vd.setVdIdseq(rs.getString(13)); vd.setPublicId(rs.getInt(14)); vd.setPreferredName(rs.getString(16)); vd.setLongName(rs.getString(17)); //Getting Data Element Information de.setDeIdseq((String)rs.getString(3)); de.setPublicId(rs.getInt(4)); de.setVersion(new Float(rs.getFloat(5))); de.setContextName(rs.getString(6)); de.setAslName(rs.getString(8)); de.setPreferredName(rs.getString(9)); de.setLongName(rs.getString(10)); de.setPreferredDefinition(rs.getString(11)); de.setLongCDEName(rs.getString(12)); de.setValueDomain(vd); de.setRegistrationStatus(rs.getString(27)); item.setPersistedInd(true); item.setDeletedInd(false); item.setItem(de); return item; } } /** * Inner class */ class FormsInCartQuery extends MappingSqlQuery { FormsInCartQuery(DataSource ds) { super(ds,"SELECT * FROM FB_FORM_CART_VIEW where UA_NAME = ? "); declareParameter(new SqlParameter("ua_name", Types.VARCHAR)); } protected Object mapRow( ResultSet rs, int rownum) throws SQLException { CDECartItem item = new CDECartItemTransferObject(); return item; } } /** * Inner class that accesses database to create a cart item */ private class InsertCartItem extends SqlUpdate { public InsertCartItem(DataSource ds) { String itemInsertSql = " INSERT INTO cde_cart_items " + " (ccm_idseq, ac_idseq, ua_name, actl_name,created_by,date_created) " + " VALUES " + " (?, ?, ?, ?, ?, ?) "; this.setDataSource(ds); this.setSql(itemInsertSql); declareParameter(new SqlParameter("p_ccm_idseq", Types.VARCHAR)); declareParameter(new SqlParameter("p_ac_idseq", Types.VARCHAR)); declareParameter(new SqlParameter("p_ua_name", Types.VARCHAR)); declareParameter(new SqlParameter("p_actl_name", Types.VARCHAR)); declareParameter(new SqlParameter("p_created_by", Types.VARCHAR)); declareParameter(new SqlParameter("p_date_created", Types.TIMESTAMP)); compile(); } protected int createItem (CDECartItem sm, String ccmIdseq) { Object [] obj = new Object[] {ccmIdseq, sm.getId(), sm.getCreatedBy().toUpperCase(), sm.getType(), sm.getCreatedBy(), sm.getCreatedDate() }; int res = update(obj); return res; } } /** * Inner class that accesses database to delete an item. */ private class DeleteCartItem extends SqlUpdate { public DeleteCartItem(DataSource ds) { String itemDeleteSql = " DELETE FROM cde_cart_items " + " WHERE ac_idseq = ? " + " AND ua_name = ? "; this.setDataSource(ds); this.setSql(itemDeleteSql); declareParameter(new SqlParameter("p_ac_idseq", Types.VARCHAR)); declareParameter(new SqlParameter("p_user", Types.VARCHAR)); compile(); } protected int deleteItem (String ccmIdseq,String username) { Object [] obj = new Object[] {ccmIdseq, username }; int res = update(obj); return res; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.zookeeper; import static org.hamcrest.CoreMatchers.instanceOf; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Exchanger; import java.util.concurrent.ExecutionException; import java.util.concurrent.ThreadLocalRandom; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseZKTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Waiter.ExplainingPredicate; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.ZKTests; import org.apache.zookeeper.AsyncCallback; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.Code; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.ZooKeeper; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; @Category({ ZKTests.class, MediumTests.class }) public class TestReadOnlyZKClient { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestReadOnlyZKClient.class); private static HBaseZKTestingUtility UTIL = new HBaseZKTestingUtility(); private static String PATH = "/test"; private static byte[] DATA; private static int CHILDREN = 5; private static ReadOnlyZKClient RO_ZK; @BeforeClass public static void setUp() throws Exception { final int port = UTIL.startMiniZKCluster().getClientPort(); ZooKeeper zk = ZooKeeperHelper.getConnectedZooKeeper("localhost:" + port, 10000); DATA = new byte[10]; ThreadLocalRandom.current().nextBytes(DATA); zk.create(PATH, DATA, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); for (int i = 0; i < CHILDREN; i++) { zk.create(PATH + "/c" + i, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); } zk.close(); Configuration conf = UTIL.getConfiguration(); conf.set(HConstants.ZOOKEEPER_QUORUM, "localhost:" + port); conf.setInt(ReadOnlyZKClient.RECOVERY_RETRY, 3); conf.setInt(ReadOnlyZKClient.RECOVERY_RETRY_INTERVAL_MILLIS, 100); conf.setInt(ReadOnlyZKClient.KEEPALIVE_MILLIS, 3000); RO_ZK = new ReadOnlyZKClient(conf); // only connect when necessary assertNull(RO_ZK.zookeeper); } @AfterClass public static void tearDown() throws IOException { RO_ZK.close(); UTIL.shutdownMiniZKCluster(); UTIL.cleanupTestDir(); } private void waitForIdleConnectionClosed() throws Exception { // The zookeeper client should be closed finally after the keep alive time elapsed UTIL.waitFor(10000, new ExplainingPredicate<Exception>() { @Override public boolean evaluate() { return RO_ZK.zookeeper == null; } @Override public String explainFailure() { return "Connection to zookeeper is still alive"; } }); } @Test public void testRead() throws Exception { assertArrayEquals(DATA, RO_ZK.get(PATH).get()); assertEquals(CHILDREN, RO_ZK.exists(PATH).get().getNumChildren()); List<String> children = RO_ZK.list(PATH).get(); assertEquals(CHILDREN, children.size()); Collections.sort(children); for (int i = 0; i < CHILDREN; i++) { assertEquals("c" + i, children.get(i)); } assertNotNull(RO_ZK.zookeeper); waitForIdleConnectionClosed(); } @Test public void testNoNode() throws InterruptedException, ExecutionException { String pathNotExists = PATH + "_whatever"; try { RO_ZK.get(pathNotExists).get(); fail("should fail because of " + pathNotExists + " does not exist"); } catch (ExecutionException e) { assertThat(e.getCause(), instanceOf(KeeperException.class)); KeeperException ke = (KeeperException) e.getCause(); assertEquals(Code.NONODE, ke.code()); assertEquals(pathNotExists, ke.getPath()); } try { RO_ZK.list(pathNotExists).get(); fail("should fail because of " + pathNotExists + " does not exist"); } catch (ExecutionException e) { assertThat(e.getCause(), instanceOf(KeeperException.class)); KeeperException ke = (KeeperException) e.getCause(); assertEquals(Code.NONODE, ke.code()); assertEquals(pathNotExists, ke.getPath()); } // exists will not throw exception. assertNull(RO_ZK.exists(pathNotExists).get()); } @Test public void testSessionExpire() throws Exception { assertArrayEquals(DATA, RO_ZK.get(PATH).get()); ZooKeeper zk = RO_ZK.zookeeper; long sessionId = zk.getSessionId(); UTIL.getZkCluster().getZooKeeperServers().get(0).closeSession(sessionId); // should not reach keep alive so still the same instance assertSame(zk, RO_ZK.zookeeper); byte[] got = RO_ZK.get(PATH).get(); assertArrayEquals(DATA, got); assertNotNull(RO_ZK.zookeeper); assertNotSame(zk, RO_ZK.zookeeper); assertNotEquals(sessionId, RO_ZK.zookeeper.getSessionId()); } @Test public void testNotCloseZkWhenPending() throws Exception { ZooKeeper mockedZK = mock(ZooKeeper.class); Exchanger<AsyncCallback.DataCallback> exchanger = new Exchanger<>(); doAnswer(i -> { exchanger.exchange(i.getArgument(2)); return null; }).when(mockedZK).getData(anyString(), anyBoolean(), any(AsyncCallback.DataCallback.class), any()); doAnswer(i -> null).when(mockedZK).close(); when(mockedZK.getState()).thenReturn(ZooKeeper.States.CONNECTED); RO_ZK.zookeeper = mockedZK; CompletableFuture<byte[]> future = RO_ZK.get(PATH); AsyncCallback.DataCallback callback = exchanger.exchange(null); // 2 * keep alive time to ensure that we will not close the zk when there are pending requests Thread.sleep(6000); assertNotNull(RO_ZK.zookeeper); verify(mockedZK, never()).close(); callback.processResult(Code.OK.intValue(), PATH, null, DATA, null); assertArrayEquals(DATA, future.get()); // now we will close the idle connection. waitForIdleConnectionClosed(); verify(mockedZK, times(1)).close(); } }
package com.fasterxml.jackson.dataformat.yaml; import java.io.*; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Arrays; import java.util.Collections; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.emitter.Emitter; import org.yaml.snakeyaml.events.*; import com.fasterxml.jackson.core.*; import com.fasterxml.jackson.core.base.GeneratorBase; import com.fasterxml.jackson.core.json.JsonWriteContext; import com.fasterxml.jackson.core.io.IOContext; public class YAMLGenerator extends GeneratorBase { /** * Enumeration that defines all togglable features for YAML generators */ public enum Feature { BOGUS(false) // placeholder ; protected final boolean _defaultState; protected final int _mask; /** * Method that calculates bit set (flags) of all features that * are enabled by default. */ public static int collectDefaults() { int flags = 0; for (Feature f : values()) { if (f.enabledByDefault()) { flags |= f.getMask(); } } return flags; } private Feature(boolean defaultState) { _defaultState = defaultState; _mask = (1 << ordinal()); } public boolean enabledByDefault() { return _defaultState; } public int getMask() { return _mask; } }; protected final static long MIN_INT_AS_LONG = (long) Integer.MIN_VALUE; protected final static long MAX_INT_AS_LONG = (long) Integer.MAX_VALUE; /* /********************************************************** /* Configuration /********************************************************** */ final protected IOContext _ioContext; /** * Bit flag composed of bits that indicate which * {@link org.codehaus.jackson.smile.SmileGenerator.Feature}s * are enabled. */ protected int _yamlFeatures; protected Writer _writer; protected DumperOptions _outputOptions; // for field names, leave out quotes private final static Character STYLE_NAME = null; // numbers, booleans, should use implicit private final static Character STYLE_SCALAR = null; // Strings quoted for fun private final static Character STYLE_STRING = Character.valueOf('"'); // Which flow style to use for Base64? Maybe basic quoted? private final static Character STYLE_BASE64 = Character.valueOf('"'); /* /********************************************************** /* Output state /********************************************************** */ protected Emitter _emitter; /* /********************************************************** /* Life-cycle /********************************************************** */ public YAMLGenerator(IOContext ctxt, int jsonFeatures, int yamlFeatures, ObjectCodec codec, Writer out, DumperOptions outputOptions, Integer[] version ) throws IOException { super(jsonFeatures, codec); _ioContext = ctxt; _yamlFeatures = yamlFeatures; _writer = out; _emitter = new Emitter(_writer, outputOptions); _outputOptions = outputOptions; // should we start output now, or try to defer? _emitter.emit(new StreamStartEvent(null, null)); _emitter.emit(new DocumentStartEvent(null, null, /*explicit start*/ false, version, /*tags*/ Collections.<String,String>emptyMap())); } /* /********************************************************** /* Versioned /********************************************************** */ @Override public Version version() { return ModuleVersion.instance.version(); } /* /********************************************************** /* Overridden methods, configuration /********************************************************** */ /** * Not sure what to do here; could reset indentation to some value maybe? */ @Override public YAMLGenerator useDefaultPrettyPrinter() { return this; } /** * Not sure what to do here; will always indent, but uses * YAML-specific settings etc. */ @Override public YAMLGenerator setPrettyPrinter(PrettyPrinter pp) { return this; } @Override public Object getOutputTarget() { return _writer; } @Override public boolean canUseSchema(FormatSchema schema) { return false; } //@Override public void setSchema(FormatSchema schema) /* /********************************************************************** /* Overridden methods; writing field names /********************************************************************** */ /* And then methods overridden to make final, streamline some * aspects... */ @Override public final void writeFieldName(String name) throws IOException, JsonGenerationException { if (_writeContext.writeFieldName(name) == JsonWriteContext.STATUS_EXPECT_VALUE) { _reportError("Can not write a field name, expecting a value"); } _writeFieldName(name); } @Override public final void writeFieldName(SerializableString name) throws IOException, JsonGenerationException { // Object is a value, need to verify it's allowed if (_writeContext.writeFieldName(name.getValue()) == JsonWriteContext.STATUS_EXPECT_VALUE) { _reportError("Can not write a field name, expecting a value"); } _writeFieldName(name.getValue()); } @Override public final void writeStringField(String fieldName, String value) throws IOException, JsonGenerationException { if (_writeContext.writeFieldName(fieldName) == JsonWriteContext.STATUS_EXPECT_VALUE) { _reportError("Can not write a field name, expecting a value"); } _writeFieldName(fieldName); writeString(value); } private final void _writeFieldName(String name) throws IOException, JsonGenerationException { _writeScalar(name, "string", STYLE_NAME); } /* /********************************************************** /* Extended API, configuration /********************************************************** */ public YAMLGenerator enable(Feature f) { _yamlFeatures |= f.getMask(); return this; } public YAMLGenerator disable(Feature f) { _yamlFeatures &= ~f.getMask(); return this; } public final boolean isEnabled(Feature f) { return (_yamlFeatures & f.getMask()) != 0; } public YAMLGenerator configure(Feature f, boolean state) { if (state) { enable(f); } else { disable(f); } return this; } /* /********************************************************** /* Public API: low-level I/O /********************************************************** */ @Override public final void flush() throws IOException { _writer.flush(); } @Override public void close() throws IOException { _emitter.emit(new DocumentEndEvent(null, null, false)); _emitter.emit(new StreamEndEvent(null, null)); super.close(); _writer.close(); } /* /********************************************************** /* Public API: structural output /********************************************************** */ @Override public final void writeStartArray() throws IOException, JsonGenerationException { _verifyValueWrite("start an array"); _writeContext = _writeContext.createChildArrayContext(); Boolean style = _outputOptions.getDefaultFlowStyle().getStyleBoolean(); // note: can NOT be implicit, to avoid having to specify tag _emitter.emit(new SequenceStartEvent(/*anchor*/null, /*tag*/null, /*implicit*/ true, null, null, style)); } @Override public final void writeEndArray() throws IOException, JsonGenerationException { if (!_writeContext.inArray()) { _reportError("Current context not an ARRAY but "+_writeContext.getTypeDesc()); } _writeContext = _writeContext.getParent(); _emitter.emit(new SequenceEndEvent(null, null)); } @Override public final void writeStartObject() throws IOException, JsonGenerationException { _verifyValueWrite("start an object"); _writeContext = _writeContext.createChildObjectContext(); Boolean style = _outputOptions.getDefaultFlowStyle().getStyleBoolean(); // note: can NOT be implicit, to avoid having to specify tag _emitter.emit(new MappingStartEvent(/* anchor */null, null, //TAG_OBJECT, /*implicit*/true, null, null, style)); } @Override public final void writeEndObject() throws IOException, JsonGenerationException { if (!_writeContext.inObject()) { _reportError("Current context not an object but "+_writeContext.getTypeDesc()); } _writeContext = _writeContext.getParent(); _emitter.emit(new MappingEndEvent(null, null)); } /* /********************************************************** /* Output method implementations, textual /********************************************************** */ @Override public void writeString(String text) throws IOException,JsonGenerationException { if (text == null) { writeNull(); return; } _verifyValueWrite("write String value"); _writeScalar(text, "string", STYLE_STRING); } @Override public void writeString(char[] text, int offset, int len) throws IOException, JsonGenerationException { writeString(new String(text, offset, len)); } @Override public final void writeString(SerializableString sstr) throws IOException, JsonGenerationException { writeString(sstr.toString()); } @Override public void writeRawUTF8String(byte[] text, int offset, int len) throws IOException, JsonGenerationException { _reportUnsupportedOperation(); } @Override public final void writeUTF8String(byte[] text, int offset, int len) throws IOException, JsonGenerationException { writeString(new String(text, offset, len, "UTF-8")); } /* /********************************************************** /* Output method implementations, unprocessed ("raw") /********************************************************** */ @Override public void writeRaw(String text) throws IOException, JsonGenerationException { _reportUnsupportedOperation(); } @Override public void writeRaw(String text, int offset, int len) throws IOException, JsonGenerationException { _reportUnsupportedOperation(); } @Override public void writeRaw(char[] text, int offset, int len) throws IOException, JsonGenerationException { _reportUnsupportedOperation(); } @Override public void writeRaw(char c) throws IOException, JsonGenerationException { _reportUnsupportedOperation(); } @Override public void writeRawValue(String text) throws IOException, JsonGenerationException { _reportUnsupportedOperation(); } @Override public void writeRawValue(String text, int offset, int len) throws IOException, JsonGenerationException { _reportUnsupportedOperation(); } @Override public void writeRawValue(char[] text, int offset, int len) throws IOException, JsonGenerationException { _reportUnsupportedOperation(); } /* /********************************************************** /* Output method implementations, base64-encoded binary /********************************************************** */ @Override public void writeBinary(Base64Variant b64variant, byte[] data, int offset, int len) throws IOException, JsonGenerationException { if (data == null) { writeNull(); return; } _verifyValueWrite("write Binary value"); // ok, better just Base64 encode as a String... if (offset > 0 || (offset+len) != data.length) { data = copyOfRange(data, offset, offset+len); } String encoded = b64variant.encode(data); _writeScalar(encoded, "byte[]", STYLE_BASE64); } // JDK5 does not contain Arrays.copyOfRange private static byte[] copyOfRange(byte[] original, int from, int to) { int newLength = to - from; if (newLength < 0) throw new IllegalArgumentException(from + " > " + to); byte[] copy = new byte[newLength]; System.arraycopy(original, from, copy, 0, Math.min(original.length - from, newLength)); return copy; } /* /********************************************************** /* Output method implementations, primitive /********************************************************** */ @Override public void writeBoolean(boolean state) throws IOException, JsonGenerationException { _verifyValueWrite("write boolean value"); _writeScalar(state ? "true" : "false", "bool", STYLE_SCALAR); } @Override public void writeNull() throws IOException, JsonGenerationException { _verifyValueWrite("write null value"); // no real type for this, is there? _writeScalar("null", "object", STYLE_SCALAR); } @Override public void writeNumber(int i) throws IOException, JsonGenerationException { _verifyValueWrite("write number"); _writeScalar(String.valueOf(i), "int", STYLE_SCALAR); } @Override public void writeNumber(long l) throws IOException, JsonGenerationException { // First: maybe 32 bits is enough? if (l <= MAX_INT_AS_LONG && l >= MIN_INT_AS_LONG) { writeNumber((int) l); return; } _verifyValueWrite("write number"); _writeScalar(String.valueOf(l), "long", STYLE_SCALAR); } @Override public void writeNumber(BigInteger v) throws IOException, JsonGenerationException { if (v == null) { writeNull(); return; } _verifyValueWrite("write number"); _writeScalar(String.valueOf(v.toString()), "java.math.BigInteger", STYLE_SCALAR); } @Override public void writeNumber(double d) throws IOException, JsonGenerationException { _verifyValueWrite("write number"); _writeScalar(String.valueOf(d), "double", STYLE_SCALAR); } @Override public void writeNumber(float f) throws IOException, JsonGenerationException { _verifyValueWrite("write number"); _writeScalar(String.valueOf(f), "float", STYLE_SCALAR); } @Override public void writeNumber(BigDecimal dec) throws IOException, JsonGenerationException { if (dec == null) { writeNull(); return; } _verifyValueWrite("write number"); _writeScalar(dec.toString(), "java.math.BigDecimal", STYLE_SCALAR); } @Override public void writeNumber(String encodedValue) throws IOException,JsonGenerationException, UnsupportedOperationException { if (encodedValue == null) { writeNull(); return; } _verifyValueWrite("write number"); _writeScalar(encodedValue, "number", STYLE_SCALAR); } /* /********************************************************** /* Implementations for methods from base class /********************************************************** */ @Override protected final void _verifyValueWrite(String typeMsg) throws IOException, JsonGenerationException { int status = _writeContext.writeValue(); if (status == JsonWriteContext.STATUS_EXPECT_NAME) { _reportError("Can not "+typeMsg+", expecting field name"); } } @Override protected void _releaseBuffers() { // nothing special to do... } /* /********************************************************** /* Internal methods /********************************************************** */ // Implicit means that (type) tags won't be shown, right? private final static ImplicitTuple DEFAULT_IMPLICIT = new ImplicitTuple(true, true); protected void _writeScalar(String value, String type, Character style) throws IOException { _emitter.emit(_scalarEvent(value, type, style)); } protected ScalarEvent _scalarEvent(String value, String tag, Character style) { // 'type' can be used as 'tag'... but should we? return new ScalarEvent(null, null, DEFAULT_IMPLICIT, value, null, null, style); } }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2002-2010 Oracle. All rights reserved. * * $Id: SampleDatabase.java,v 1.35 2010/01/04 15:50:34 cwl Exp $ */ package collections.ship.index; import java.io.File; import com.sleepycat.bind.serial.ClassCatalog; import com.sleepycat.bind.serial.SerialSerialKeyCreator; import com.sleepycat.bind.serial.StoredClassCatalog; import com.sleepycat.je.Database; import com.sleepycat.je.DatabaseConfig; import com.sleepycat.je.DatabaseException; import com.sleepycat.je.Environment; import com.sleepycat.je.EnvironmentConfig; import com.sleepycat.je.ForeignKeyDeleteAction; import com.sleepycat.je.SecondaryConfig; import com.sleepycat.je.SecondaryDatabase; /** * SampleDatabase defines the storage containers, indices and foreign keys * for the sample database. * * @author Mark Hayes */ public class SampleDatabase { private static final String CLASS_CATALOG = "java_class_catalog"; private static final String SUPPLIER_STORE = "supplier_store"; private static final String PART_STORE = "part_store"; private static final String SHIPMENT_STORE = "shipment_store"; private static final String SHIPMENT_PART_INDEX = "shipment_part_index"; private static final String SHIPMENT_SUPPLIER_INDEX = "shipment_supplier_index"; private static final String SUPPLIER_CITY_INDEX = "supplier_city_index"; private Environment env; private Database partDb; private Database supplierDb; private Database shipmentDb; private SecondaryDatabase supplierByCityDb; private SecondaryDatabase shipmentByPartDb; private SecondaryDatabase shipmentBySupplierDb; private StoredClassCatalog javaCatalog; /** * Open all storage containers, indices, and catalogs. */ public SampleDatabase(String homeDirectory) throws DatabaseException { // Open the Berkeley DB environment in transactional mode. // System.out.println("Opening environment in: " + homeDirectory); EnvironmentConfig envConfig = new EnvironmentConfig(); envConfig.setTransactional(true); envConfig.setAllowCreate(true); env = new Environment(new File(homeDirectory), envConfig); // Set the Berkeley DB config for opening all stores. // DatabaseConfig dbConfig = new DatabaseConfig(); dbConfig.setTransactional(true); dbConfig.setAllowCreate(true); // Create the Serial class catalog. This holds the serialized class // format for all database records of serial format. // Database catalogDb = env.openDatabase(null, CLASS_CATALOG, dbConfig); javaCatalog = new StoredClassCatalog(catalogDb); // Open the Berkeley DB database for the part, supplier and shipment // stores. The stores are opened with no duplicate keys allowed. // partDb = env.openDatabase(null, PART_STORE, dbConfig); supplierDb = env.openDatabase(null, SUPPLIER_STORE, dbConfig); shipmentDb = env.openDatabase(null, SHIPMENT_STORE, dbConfig); // Open the SecondaryDatabase for the city index of the supplier store, // and for the part and supplier indices of the shipment store. // Duplicate keys are allowed since more than one supplier may be in // the same city, and more than one shipment may exist for the same // supplier or part. A foreign key constraint is defined for the // supplier and part indices to ensure that a shipment only refers to // existing part and supplier keys. The CASCADE delete action means // that shipments will be deleted if their associated part or supplier // is deleted. // SecondaryConfig secConfig = new SecondaryConfig(); secConfig.setTransactional(true); secConfig.setAllowCreate(true); secConfig.setSortedDuplicates(true); secConfig.setKeyCreator( new SupplierByCityKeyCreator(javaCatalog, SupplierKey.class, SupplierData.class, String.class)); supplierByCityDb = env.openSecondaryDatabase(null, SUPPLIER_CITY_INDEX, supplierDb, secConfig); secConfig.setForeignKeyDatabase(partDb); secConfig.setForeignKeyDeleteAction(ForeignKeyDeleteAction.CASCADE); secConfig.setKeyCreator( new ShipmentByPartKeyCreator(javaCatalog, ShipmentKey.class, ShipmentData.class, PartKey.class)); shipmentByPartDb = env.openSecondaryDatabase(null, SHIPMENT_PART_INDEX, shipmentDb, secConfig); secConfig.setForeignKeyDatabase(supplierDb); secConfig.setForeignKeyDeleteAction(ForeignKeyDeleteAction.CASCADE); secConfig.setKeyCreator( new ShipmentBySupplierKeyCreator(javaCatalog, ShipmentKey.class, ShipmentData.class, SupplierKey.class)); shipmentBySupplierDb = env.openSecondaryDatabase(null, SHIPMENT_SUPPLIER_INDEX, shipmentDb, secConfig); } /** * Return the storage environment for the database. */ public final Environment getEnvironment() { return env; } /** * Return the class catalog. */ public final StoredClassCatalog getClassCatalog() { return javaCatalog; } /** * Return the part storage container. */ public final Database getPartDatabase() { return partDb; } /** * Return the supplier storage container. */ public final Database getSupplierDatabase() { return supplierDb; } /** * Return the shipment storage container. */ public final Database getShipmentDatabase() { return shipmentDb; } /** * Return the shipment-by-part index. */ public final SecondaryDatabase getShipmentByPartDatabase() { return shipmentByPartDb; } /** * Return the shipment-by-supplier index. */ public final SecondaryDatabase getShipmentBySupplierDatabase() { return shipmentBySupplierDb; } /** * Return the supplier-by-city index. */ public final SecondaryDatabase getSupplierByCityDatabase() { return supplierByCityDb; } /** * Close all stores (closing a store automatically closes its indices). */ public void close() throws DatabaseException { // Close secondary databases, then primary databases. supplierByCityDb.close(); shipmentByPartDb.close(); shipmentBySupplierDb.close(); partDb.close(); supplierDb.close(); shipmentDb.close(); // And don't forget to close the catalog and the environment. javaCatalog.close(); env.close(); } /** * The SecondaryKeyCreator for the SupplierByCity index. This is an * extension of the abstract class SerialSerialKeyCreator, which implements * SecondaryKeyCreator for the case where the data keys and value are all * of the serial format. */ private static class SupplierByCityKeyCreator extends SerialSerialKeyCreator { /** * Construct the city key extractor. * @param catalog is the class catalog. * @param primaryKeyClass is the supplier key class. * @param valueClass is the supplier value class. * @param indexKeyClass is the city key class. */ private SupplierByCityKeyCreator(ClassCatalog catalog, Class primaryKeyClass, Class valueClass, Class indexKeyClass) { super(catalog, primaryKeyClass, valueClass, indexKeyClass); } /** * Extract the city key from a supplier key/value pair. The city key * is stored in the supplier value, so the supplier key is not used. */ public Object createSecondaryKey(Object primaryKeyInput, Object valueInput) { SupplierData supplierData = (SupplierData) valueInput; return supplierData.getCity(); } } /** * The SecondaryKeyCreator for the ShipmentByPart index. This is an * extension of the abstract class SerialSerialKeyCreator, which implements * SecondaryKeyCreator for the case where the data keys and value are all * of the serial format. */ private static class ShipmentByPartKeyCreator extends SerialSerialKeyCreator { /** * Construct the part key extractor. * @param catalog is the class catalog. * @param primaryKeyClass is the shipment key class. * @param valueClass is the shipment value class. * @param indexKeyClass is the part key class. */ private ShipmentByPartKeyCreator(ClassCatalog catalog, Class primaryKeyClass, Class valueClass, Class indexKeyClass) { super(catalog, primaryKeyClass, valueClass, indexKeyClass); } /** * Extract the part key from a shipment key/value pair. The part key * is stored in the shipment key, so the shipment value is not used. */ public Object createSecondaryKey(Object primaryKeyInput, Object valueInput) { ShipmentKey shipmentKey = (ShipmentKey) primaryKeyInput; return new PartKey(shipmentKey.getPartNumber()); } } /** * The SecondaryKeyCreator for the ShipmentBySupplier index. This is an * extension of the abstract class SerialSerialKeyCreator, which implements * SecondaryKeyCreator for the case where the data keys and value are all * of the serial format. */ private static class ShipmentBySupplierKeyCreator extends SerialSerialKeyCreator { /** * Construct the supplier key extractor. * @param catalog is the class catalog. * @param primaryKeyClass is the shipment key class. * @param valueClass is the shipment value class. * @param indexKeyClass is the supplier key class. */ private ShipmentBySupplierKeyCreator(ClassCatalog catalog, Class primaryKeyClass, Class valueClass, Class indexKeyClass) { super(catalog, primaryKeyClass, valueClass, indexKeyClass); } /** * Extract the supplier key from a shipment key/value pair. The part * key is stored in the shipment key, so the shipment value is not * used. */ public Object createSecondaryKey(Object primaryKeyInput, Object valueInput) { ShipmentKey shipmentKey = (ShipmentKey) primaryKeyInput; return new SupplierKey(shipmentKey.getSupplierNumber()); } } }
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.internal.telephony.cdma; import android.app.ActivityManagerNative; import android.content.ContentValues; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.database.SQLException; import android.net.Uri; import android.os.AsyncResult; import android.os.Handler; import android.os.Message; import android.os.PowerManager; import android.os.PowerManager.WakeLock; import android.os.Registrant; import android.os.RegistrantList; import android.os.SystemProperties; import android.os.UserHandle; import android.preference.PreferenceManager; import android.provider.Settings; import android.provider.Telephony; import android.telephony.CellLocation; import android.telephony.PhoneNumberUtils; import android.telephony.ServiceState; import android.telephony.cdma.CdmaCellLocation; import android.text.TextUtils; import android.telephony.Rlog; import com.android.internal.telephony.CallStateException; import com.android.internal.telephony.CallTracker; import com.android.internal.telephony.CommandException; import com.android.internal.telephony.CommandsInterface; import com.android.internal.telephony.Connection; import com.android.internal.telephony.IccPhoneBookInterfaceManager; import com.android.internal.telephony.IccSmsInterfaceManager; import com.android.internal.telephony.MccTable; import com.android.internal.telephony.MmiCode; import com.android.internal.telephony.OperatorInfo; import com.android.internal.telephony.PhoneBase; import com.android.internal.telephony.PhoneConstants; import com.android.internal.telephony.PhoneNotifier; import com.android.internal.telephony.PhoneProxy; import com.android.internal.telephony.PhoneSubInfo; import com.android.internal.telephony.ServiceStateTracker; import com.android.internal.telephony.SmsBroadcastUndelivered; import com.android.internal.telephony.TelephonyIntents; import com.android.internal.telephony.TelephonyProperties; import com.android.internal.telephony.UUSInfo; import com.android.internal.telephony.dataconnection.DcTracker; import com.android.internal.telephony.uicc.IccException; import com.android.internal.telephony.uicc.IccRecords; import com.android.internal.telephony.uicc.RuimRecords; import com.android.internal.telephony.uicc.UiccCardApplication; import com.android.internal.telephony.uicc.UiccController; import java.io.FileDescriptor; import java.io.PrintWriter; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import static com.android.internal.telephony.TelephonyProperties.PROPERTY_ICC_OPERATOR_ALPHA; import static com.android.internal.telephony.TelephonyProperties.PROPERTY_ICC_OPERATOR_ISO_COUNTRY; import static com.android.internal.telephony.TelephonyProperties.PROPERTY_ICC_OPERATOR_NUMERIC; /** * {@hide} */ public class CDMAPhone extends PhoneBase { static final String LOG_TAG = "CDMAPhone"; private static final boolean DBG = true; private static final boolean VDBG = false; /* STOP SHIP if true */ // Default Emergency Callback Mode exit timer private static final int DEFAULT_ECM_EXIT_TIMER_VALUE = 300000; static final String VM_COUNT_CDMA = "vm_count_key_cdma"; private static final String VM_NUMBER_CDMA = "vm_number_key_cdma"; private String mVmNumber = null; static final int RESTART_ECM_TIMER = 0; // restart Ecm timer static final int CANCEL_ECM_TIMER = 1; // cancel Ecm timer // Instance Variables CdmaCallTracker mCT; CdmaServiceStateTracker mSST; CdmaSubscriptionSourceManager mCdmaSSM; ArrayList <CdmaMmiCode> mPendingMmis = new ArrayList<CdmaMmiCode>(); RuimPhoneBookInterfaceManager mRuimPhoneBookInterfaceManager; int mCdmaSubscriptionSource = CdmaSubscriptionSourceManager.SUBSCRIPTION_SOURCE_UNKNOWN; PhoneSubInfo mSubInfo; EriManager mEriManager; WakeLock mWakeLock; // mEriFileLoadedRegistrants are informed after the ERI text has been loaded private final RegistrantList mEriFileLoadedRegistrants = new RegistrantList(); // mEcmTimerResetRegistrants are informed after Ecm timer is canceled or re-started private final RegistrantList mEcmTimerResetRegistrants = new RegistrantList(); // mEcmExitRespRegistrant is informed after the phone has been exited //the emergency callback mode //keep track of if phone is in emergency callback mode private boolean mIsPhoneInEcmState; private Registrant mEcmExitRespRegistrant; protected String mImei; protected String mImeiSv; private String mEsn; private String mMeid; // string to define how the carrier specifies its own ota sp number private String mCarrierOtaSpNumSchema; // A runnable which is used to automatically exit from Ecm after a period of time. private Runnable mExitEcmRunnable = new Runnable() { @Override public void run() { exitEmergencyCallbackMode(); } }; Registrant mPostDialHandler; static String PROPERTY_CDMA_HOME_OPERATOR_NUMERIC = "ro.cdma.home.operator.numeric"; // Constructors public CDMAPhone(Context context, CommandsInterface ci, PhoneNotifier notifier) { super("CDMA", notifier, context, ci, false); initSstIcc(); init(context, notifier); } public CDMAPhone(Context context, CommandsInterface ci, PhoneNotifier notifier, boolean unitTestMode) { super("CDMA", notifier, context, ci, unitTestMode); initSstIcc(); init(context, notifier); } protected void initSstIcc() { mSST = new CdmaServiceStateTracker(this); } protected void init(Context context, PhoneNotifier notifier) { mCi.setPhoneType(PhoneConstants.PHONE_TYPE_CDMA); mCT = new CdmaCallTracker(this); mCdmaSSM = CdmaSubscriptionSourceManager.getInstance(context, mCi, this, EVENT_CDMA_SUBSCRIPTION_SOURCE_CHANGED, null); mDcTracker = new DcTracker(this); mRuimPhoneBookInterfaceManager = new RuimPhoneBookInterfaceManager(this); mSubInfo = new PhoneSubInfo(this); mEriManager = new EriManager(this, context, EriManager.ERI_FROM_XML); mCi.registerForAvailable(this, EVENT_RADIO_AVAILABLE, null); mCi.registerForOffOrNotAvailable(this, EVENT_RADIO_OFF_OR_NOT_AVAILABLE, null); mCi.registerForOn(this, EVENT_RADIO_ON, null); mCi.setOnSuppServiceNotification(this, EVENT_SSN, null); mSST.registerForNetworkAttached(this, EVENT_REGISTERED_TO_NETWORK, null); mCi.setEmergencyCallbackMode(this, EVENT_EMERGENCY_CALLBACK_MODE_ENTER, null); mCi.registerForExitEmergencyCallbackMode(this, EVENT_EXIT_EMERGENCY_CALLBACK_RESPONSE, null); PowerManager pm = (PowerManager) context.getSystemService(Context.POWER_SERVICE); mWakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK,LOG_TAG); //Change the system setting SystemProperties.set(TelephonyProperties.CURRENT_ACTIVE_PHONE, Integer.toString(PhoneConstants.PHONE_TYPE_CDMA)); // This is needed to handle phone process crashes String inEcm=SystemProperties.get(TelephonyProperties.PROPERTY_INECM_MODE, "false"); mIsPhoneInEcmState = inEcm.equals("true"); if (mIsPhoneInEcmState) { // Send a message which will invoke handleExitEmergencyCallbackMode mCi.exitEmergencyCallbackMode(obtainMessage(EVENT_EXIT_EMERGENCY_CALLBACK_RESPONSE)); } // get the string that specifies the carrier OTA Sp number mCarrierOtaSpNumSchema = SystemProperties.get( TelephonyProperties.PROPERTY_OTASP_NUM_SCHEMA,""); // Sets operator properties by retrieving from build-time system property String operatorAlpha = SystemProperties.get("ro.cdma.home.operator.alpha"); String operatorNumeric = SystemProperties.get(PROPERTY_CDMA_HOME_OPERATOR_NUMERIC); log("init: operatorAlpha='" + operatorAlpha + "' operatorNumeric='" + operatorNumeric + "'"); if (mUiccController.getUiccCardApplication(UiccController.APP_FAM_3GPP) == null) { log("init: APP_FAM_3GPP == NULL"); if (!TextUtils.isEmpty(operatorAlpha)) { log("init: set 'gsm.sim.operator.alpha' to operator='" + operatorAlpha + "'"); setSystemProperty(PROPERTY_ICC_OPERATOR_ALPHA, operatorAlpha); } if (!TextUtils.isEmpty(operatorNumeric)) { log("init: set 'gsm.sim.operator.numeric' to operator='" + operatorNumeric + "'"); setSystemProperty(PROPERTY_ICC_OPERATOR_NUMERIC, operatorNumeric); } setIsoCountryProperty(operatorNumeric); } // Sets current entry in the telephony carrier table updateCurrentCarrierInProvider(operatorNumeric); // Notify voicemails. notifier.notifyMessageWaitingChanged(this); } @Override public void dispose() { synchronized(PhoneProxy.lockForRadioTechnologyChange) { super.dispose(); log("dispose"); //Unregister from all former registered events unregisterForRuimRecordEvents(); mCi.unregisterForAvailable(this); //EVENT_RADIO_AVAILABLE mCi.unregisterForOffOrNotAvailable(this); //EVENT_RADIO_OFF_OR_NOT_AVAILABLE mCi.unregisterForOn(this); //EVENT_RADIO_ON mSST.unregisterForNetworkAttached(this); //EVENT_REGISTERED_TO_NETWORK mCi.unSetOnSuppServiceNotification(this); mCi.unregisterForExitEmergencyCallbackMode(this); removeCallbacks(mExitEcmRunnable); mPendingMmis.clear(); //Force all referenced classes to unregister their former registered events mCT.dispose(); mDcTracker.dispose(); mSST.dispose(); mCdmaSSM.dispose(this); mRuimPhoneBookInterfaceManager.dispose(); mSubInfo.dispose(); mEriManager.dispose(); } } @Override public void removeReferences() { log("removeReferences"); mRuimPhoneBookInterfaceManager = null; mSubInfo = null; mCT = null; mSST = null; mEriManager = null; mExitEcmRunnable = null; super.removeReferences(); } @Override protected void finalize() { if(DBG) Rlog.d(LOG_TAG, "CDMAPhone finalized"); if (mWakeLock.isHeld()) { Rlog.e(LOG_TAG, "UNEXPECTED; mWakeLock is held when finalizing."); mWakeLock.release(); } } @Override public ServiceState getServiceState() { return mSST.mSS; } @Override public CallTracker getCallTracker() { return mCT; } @Override public PhoneConstants.State getState() { return mCT.mState; } @Override public ServiceStateTracker getServiceStateTracker() { return mSST; } @Override public int getPhoneType() { return PhoneConstants.PHONE_TYPE_CDMA; } @Override public boolean canTransfer() { Rlog.e(LOG_TAG, "canTransfer: not possible in CDMA"); return false; } @Override public CdmaCall getRingingCall() { return mCT.mRingingCall; } @Override public void setMute(boolean muted) { mCT.setMute(muted); } @Override public boolean getMute() { return mCT.getMute(); } @Override public void conference() { // three way calls in CDMA will be handled by feature codes Rlog.e(LOG_TAG, "conference: not possible in CDMA"); } @Override public void enableEnhancedVoicePrivacy(boolean enable, Message onComplete) { mCi.setPreferredVoicePrivacy(enable, onComplete); } @Override public void getEnhancedVoicePrivacy(Message onComplete) { mCi.getPreferredVoicePrivacy(onComplete); } @Override public void clearDisconnected() { mCT.clearDisconnected(); } @Override public DataActivityState getDataActivityState() { DataActivityState ret = DataActivityState.NONE; if (mSST.getCurrentDataConnectionState() == ServiceState.STATE_IN_SERVICE) { switch (mDcTracker.getActivity()) { case DATAIN: ret = DataActivityState.DATAIN; break; case DATAOUT: ret = DataActivityState.DATAOUT; break; case DATAINANDOUT: ret = DataActivityState.DATAINANDOUT; break; case DORMANT: ret = DataActivityState.DORMANT; break; default: ret = DataActivityState.NONE; break; } } return ret; } @Override public Connection dial (String dialString) throws CallStateException { // Need to make sure dialString gets parsed properly String newDialString = PhoneNumberUtils.stripSeparators(dialString); return mCT.dial(newDialString); } @Override public Connection dial(String dialString, UUSInfo uusInfo) throws CallStateException { throw new CallStateException("Sending UUS information NOT supported in CDMA!"); } @Override public boolean getMessageWaitingIndicator() { return (getVoiceMessageCount() > 0); } @Override public List<? extends MmiCode> getPendingMmiCodes() { return mPendingMmis; } @Override public void registerForSuppServiceNotification( Handler h, int what, Object obj) { Rlog.e(LOG_TAG, "method registerForSuppServiceNotification is NOT supported in CDMA!"); } @Override public CdmaCall getBackgroundCall() { return mCT.mBackgroundCall; } @Override public boolean handleInCallMmiCommands(String dialString) { Rlog.e(LOG_TAG, "method handleInCallMmiCommands is NOT supported in CDMA!"); return false; } boolean isInCall() { CdmaCall.State foregroundCallState = getForegroundCall().getState(); CdmaCall.State backgroundCallState = getBackgroundCall().getState(); CdmaCall.State ringingCallState = getRingingCall().getState(); return (foregroundCallState.isAlive() || backgroundCallState.isAlive() || ringingCallState .isAlive()); } @Override public void setNetworkSelectionModeAutomatic(Message response) { Rlog.e(LOG_TAG, "method setNetworkSelectionModeAutomatic is NOT supported in CDMA!"); } @Override public void unregisterForSuppServiceNotification(Handler h) { Rlog.e(LOG_TAG, "method unregisterForSuppServiceNotification is NOT supported in CDMA!"); } @Override public void acceptCall() throws CallStateException { mCT.acceptCall(); } @Override public void rejectCall() throws CallStateException { mCT.rejectCall(); } @Override public void switchHoldingAndActive() throws CallStateException { mCT.switchWaitingOrHoldingAndActive(); } @Override public String getIccSerialNumber() { IccRecords r = mIccRecords.get(); if (r == null) { // to get ICCID form SIMRecords because it is on MF. r = mUiccController.getIccRecords(UiccController.APP_FAM_3GPP); } return (r != null) ? r.getIccId() : null; } @Override public String getLine1Number() { return mSST.getMdnNumber(); } @Override public String getCdmaPrlVersion(){ return mSST.getPrlVersion(); } @Override public String getCdmaMin() { return mSST.getCdmaMin(); } @Override public boolean isMinInfoReady() { return mSST.isMinInfoReady(); } @Override public void getCallWaiting(Message onComplete) { mCi.queryCallWaiting(CommandsInterface.SERVICE_CLASS_VOICE, onComplete); } @Override public void setRadioPower(boolean power) { mSST.setRadioPower(power); } @Override public String getEsn() { return mEsn; } @Override public String getMeid() { return mMeid; } //returns MEID or ESN in CDMA @Override public String getDeviceId() { String id = getMeid(); if ((id == null) || id.matches("^0*$")) { Rlog.d(LOG_TAG, "getDeviceId(): MEID is not initialized use ESN"); id = getEsn(); } return id; } @Override public String getDeviceSvn() { Rlog.d(LOG_TAG, "getDeviceSvn(): return 0"); return "0"; } @Override public String getSubscriberId() { return mSST.getImsi(); } @Override public String getGroupIdLevel1() { Rlog.e(LOG_TAG, "GID1 is not available in CDMA"); return null; } @Override public String getImei() { Rlog.e(LOG_TAG, "IMEI is not available in CDMA"); return null; } @Override public boolean canConference() { Rlog.e(LOG_TAG, "canConference: not possible in CDMA"); return false; } @Override public CellLocation getCellLocation() { CdmaCellLocation loc = mSST.mCellLoc; int mode = Settings.Secure.getInt(getContext().getContentResolver(), Settings.Secure.LOCATION_MODE, Settings.Secure.LOCATION_MODE_OFF); if (mode == Settings.Secure.LOCATION_MODE_OFF) { // clear lat/long values for location privacy CdmaCellLocation privateLoc = new CdmaCellLocation(); privateLoc.setCellLocationData(loc.getBaseStationId(), CdmaCellLocation.INVALID_LAT_LONG, CdmaCellLocation.INVALID_LAT_LONG, loc.getSystemId(), loc.getNetworkId()); loc = privateLoc; } return loc; } @Override public CdmaCall getForegroundCall() { return mCT.mForegroundCall; } @Override public void selectNetworkManually(OperatorInfo network, Message response) { Rlog.e(LOG_TAG, "selectNetworkManually: not possible in CDMA"); } @Override public void setOnPostDialCharacter(Handler h, int what, Object obj) { mPostDialHandler = new Registrant(h, what, obj); } @Override public boolean handlePinMmi(String dialString) { CdmaMmiCode mmi = CdmaMmiCode.newFromDialString(dialString, this, mUiccApplication.get()); if (mmi == null) { Rlog.e(LOG_TAG, "Mmi is NULL!"); return false; } else if (mmi.isPinPukCommand()) { mPendingMmis.add(mmi); mMmiRegistrants.notifyRegistrants(new AsyncResult(null, mmi, null)); mmi.processCode(); return true; } Rlog.e(LOG_TAG, "Unrecognized mmi!"); return false; } /** * Removes the given MMI from the pending list and notifies registrants that * it is complete. * * @param mmi MMI that is done */ void onMMIDone(CdmaMmiCode mmi) { /* * Only notify complete if it's on the pending list. Otherwise, it's * already been handled (eg, previously canceled). */ if (mPendingMmis.remove(mmi)) { mMmiCompleteRegistrants.notifyRegistrants(new AsyncResult(null, mmi, null)); } } @Override public void setLine1Number(String alphaTag, String number, Message onComplete) { Rlog.e(LOG_TAG, "setLine1Number: not possible in CDMA"); } @Override public void setCallWaiting(boolean enable, Message onComplete) { Rlog.e(LOG_TAG, "method setCallWaiting is NOT supported in CDMA!"); } @Override public void updateServiceLocation() { mSST.enableSingleLocationUpdate(); } @Override public void setDataRoamingEnabled(boolean enable) { mDcTracker.setDataOnRoamingEnabled(enable); } @Override public void registerForCdmaOtaStatusChange(Handler h, int what, Object obj) { mCi.registerForCdmaOtaProvision(h, what, obj); } @Override public void unregisterForCdmaOtaStatusChange(Handler h) { mCi.unregisterForCdmaOtaProvision(h); } @Override public void registerForSubscriptionInfoReady(Handler h, int what, Object obj) { mSST.registerForSubscriptionInfoReady(h, what, obj); } @Override public void unregisterForSubscriptionInfoReady(Handler h) { mSST.unregisterForSubscriptionInfoReady(h); } @Override public void setOnEcbModeExitResponse(Handler h, int what, Object obj) { mEcmExitRespRegistrant = new Registrant (h, what, obj); } @Override public void unsetOnEcbModeExitResponse(Handler h) { mEcmExitRespRegistrant.clear(); } @Override public void registerForCallWaiting(Handler h, int what, Object obj) { mCT.registerForCallWaiting(h, what, obj); } @Override public void unregisterForCallWaiting(Handler h) { mCT.unregisterForCallWaiting(h); } @Override public void getNeighboringCids(Message response) { /* * This is currently not implemented. At least as of June * 2009, there is no neighbor cell information available for * CDMA because some party is resisting making this * information readily available. Consequently, calling this * function can have no useful effect. This situation may * (and hopefully will) change in the future. */ if (response != null) { CommandException ce = new CommandException( CommandException.Error.REQUEST_NOT_SUPPORTED); AsyncResult.forMessage(response).exception = ce; response.sendToTarget(); } } @Override public PhoneConstants.DataState getDataConnectionState(String apnType) { PhoneConstants.DataState ret = PhoneConstants.DataState.DISCONNECTED; if (mSST == null) { // Radio Technology Change is ongoning, dispose() and removeReferences() have // already been called ret = PhoneConstants.DataState.DISCONNECTED; } else if (mSST.getCurrentDataConnectionState() != ServiceState.STATE_IN_SERVICE) { // If we're out of service, open TCP sockets may still work // but no data will flow ret = PhoneConstants.DataState.DISCONNECTED; } else if (mDcTracker.isApnTypeEnabled(apnType) == false || mDcTracker.isApnTypeActive(apnType) == false) { ret = PhoneConstants.DataState.DISCONNECTED; } else { switch (mDcTracker.getState(apnType)) { case RETRYING: case FAILED: case IDLE: ret = PhoneConstants.DataState.DISCONNECTED; break; case CONNECTED: case DISCONNECTING: if ( mCT.mState != PhoneConstants.State.IDLE && !mSST.isConcurrentVoiceAndDataAllowed()) { ret = PhoneConstants.DataState.SUSPENDED; } else { ret = PhoneConstants.DataState.CONNECTED; } break; case CONNECTING: case SCANNING: ret = PhoneConstants.DataState.CONNECTING; break; } } log("getDataConnectionState apnType=" + apnType + " ret=" + ret); return ret; } @Override public void sendUssdResponse(String ussdMessge) { Rlog.e(LOG_TAG, "sendUssdResponse: not possible in CDMA"); } @Override public void sendDtmf(char c) { if (!PhoneNumberUtils.is12Key(c)) { Rlog.e(LOG_TAG, "sendDtmf called with invalid character '" + c + "'"); } else { if (mCT.mState == PhoneConstants.State.OFFHOOK) { mCi.sendDtmf(c, null); } } } @Override public void startDtmf(char c) { if (!PhoneNumberUtils.is12Key(c)) { Rlog.e(LOG_TAG, "startDtmf called with invalid character '" + c + "'"); } else { mCi.startDtmf(c, null); } } @Override public void stopDtmf() { mCi.stopDtmf(null); } @Override public void sendBurstDtmf(String dtmfString, int on, int off, Message onComplete) { boolean check = true; for (int itr = 0;itr < dtmfString.length(); itr++) { if (!PhoneNumberUtils.is12Key(dtmfString.charAt(itr))) { Rlog.e(LOG_TAG, "sendDtmf called with invalid character '" + dtmfString.charAt(itr)+ "'"); check = false; break; } } if ((mCT.mState == PhoneConstants.State.OFFHOOK)&&(check)) { mCi.sendBurstDtmf(dtmfString, on, off, onComplete); } } @Override public void getAvailableNetworks(Message response) { Rlog.e(LOG_TAG, "getAvailableNetworks: not possible in CDMA"); } @Override public void setOutgoingCallerIdDisplay(int commandInterfaceCLIRMode, Message onComplete) { Rlog.e(LOG_TAG, "setOutgoingCallerIdDisplay: not possible in CDMA"); } @Override public void enableLocationUpdates() { mSST.enableLocationUpdates(); } @Override public void disableLocationUpdates() { mSST.disableLocationUpdates(); } @Override public void getDataCallList(Message response) { mCi.getDataCallList(response); } @Override public boolean getDataRoamingEnabled() { return mDcTracker.getDataOnRoamingEnabled(); } @Override public void setVoiceMailNumber(String alphaTag, String voiceMailNumber, Message onComplete) { Message resp; mVmNumber = voiceMailNumber; resp = obtainMessage(EVENT_SET_VM_NUMBER_DONE, 0, 0, onComplete); IccRecords r = mIccRecords.get(); if (r != null) { r.setVoiceMailNumber(alphaTag, mVmNumber, resp); } } @Override public String getVoiceMailNumber() { String number = null; SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getContext()); // TODO: The default value of voicemail number should be read from a system property // Read platform settings for dynamic voicemail number if (getContext().getResources().getBoolean(com.android.internal .R.bool.config_telephony_use_own_number_for_voicemail)) { number = sp.getString(VM_NUMBER_CDMA, getLine1Number()); } else { number = sp.getString(VM_NUMBER_CDMA, "*86"); } return number; } /* Returns Number of Voicemails * @hide */ @Override public int getVoiceMessageCount() { IccRecords r = mIccRecords.get(); int voicemailCount = (r != null) ? r.getVoiceMessageCount() : 0; // If mRuimRecords.getVoiceMessageCount returns zero, then there is possibility // that phone was power cycled and would have lost the voicemail count. // So get the count from preferences. if (voicemailCount == 0) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getContext()); voicemailCount = sp.getInt(VM_COUNT_CDMA, 0); } return voicemailCount; } @Override public String getVoiceMailAlphaTag() { // TODO: Where can we get this value has to be clarified with QC. String ret = "";//TODO: Remove = "", if we know where to get this value. //ret = mSIMRecords.getVoiceMailAlphaTag(); if (ret == null || ret.length() == 0) { return mContext.getText( com.android.internal.R.string.defaultVoiceMailAlphaTag).toString(); } return ret; } @Override public void getCallForwardingOption(int commandInterfaceCFReason, Message onComplete) { Rlog.e(LOG_TAG, "getCallForwardingOption: not possible in CDMA"); } @Override public void setCallForwardingOption(int commandInterfaceCFAction, int commandInterfaceCFReason, String dialingNumber, int timerSeconds, Message onComplete) { Rlog.e(LOG_TAG, "setCallForwardingOption: not possible in CDMA"); } @Override public void getOutgoingCallerIdDisplay(Message onComplete) { Rlog.e(LOG_TAG, "getOutgoingCallerIdDisplay: not possible in CDMA"); } @Override public boolean getCallForwardingIndicator() { Rlog.e(LOG_TAG, "getCallForwardingIndicator: not possible in CDMA"); return false; } @Override public void explicitCallTransfer() { Rlog.e(LOG_TAG, "explicitCallTransfer: not possible in CDMA"); } @Override public String getLine1AlphaTag() { Rlog.e(LOG_TAG, "getLine1AlphaTag: not possible in CDMA"); return null; } /** * Notify any interested party of a Phone state change * {@link com.android.internal.telephony.PhoneConstants.State} */ /*package*/ void notifyPhoneStateChanged() { mNotifier.notifyPhoneState(this); } /** * Notify registrants of a change in the call state. This notifies changes in * {@link com.android.internal.telephony.Call.State}. Use this when changes * in the precise call state are needed, else use notifyPhoneStateChanged. */ /*package*/ void notifyPreciseCallStateChanged() { /* we'd love it if this was package-scoped*/ super.notifyPreciseCallStateChangedP(); } void notifyServiceStateChanged(ServiceState ss) { super.notifyServiceStateChangedP(ss); } void notifyLocationChanged() { mNotifier.notifyCellLocation(this); } /*package*/ void notifyNewRingingConnection(Connection c) { /* we'd love it if this was package-scoped*/ super.notifyNewRingingConnectionP(c); } /*package*/ void notifyDisconnect(Connection cn) { mDisconnectRegistrants.notifyResult(cn); } void notifyUnknownConnection() { mUnknownConnectionRegistrants.notifyResult(this); } @Override public boolean isInEmergencyCall() { return mCT.isInEmergencyCall(); } @Override public boolean isInEcm() { return mIsPhoneInEcmState; } void sendEmergencyCallbackModeChange(){ //Send an Intent Intent intent = new Intent(TelephonyIntents.ACTION_EMERGENCY_CALLBACK_MODE_CHANGED); intent.putExtra(PhoneConstants.PHONE_IN_ECM_STATE, mIsPhoneInEcmState); ActivityManagerNative.broadcastStickyIntent(intent,null,UserHandle.USER_ALL); if (DBG) Rlog.d(LOG_TAG, "sendEmergencyCallbackModeChange"); } @Override public void exitEmergencyCallbackMode() { if (mWakeLock.isHeld()) { mWakeLock.release(); } // Send a message which will invoke handleExitEmergencyCallbackMode mCi.exitEmergencyCallbackMode(obtainMessage(EVENT_EXIT_EMERGENCY_CALLBACK_RESPONSE)); } private void handleEnterEmergencyCallbackMode(Message msg) { if (DBG) { Rlog.d(LOG_TAG, "handleEnterEmergencyCallbackMode,mIsPhoneInEcmState= " + mIsPhoneInEcmState); } // if phone is not in Ecm mode, and it's changed to Ecm mode if (mIsPhoneInEcmState == false) { mIsPhoneInEcmState = true; // notify change sendEmergencyCallbackModeChange(); setSystemProperty(TelephonyProperties.PROPERTY_INECM_MODE, "true"); // Post this runnable so we will automatically exit // if no one invokes exitEmergencyCallbackMode() directly. long delayInMillis = SystemProperties.getLong( TelephonyProperties.PROPERTY_ECM_EXIT_TIMER, DEFAULT_ECM_EXIT_TIMER_VALUE); postDelayed(mExitEcmRunnable, delayInMillis); // We don't want to go to sleep while in Ecm mWakeLock.acquire(); } } private void handleExitEmergencyCallbackMode(Message msg) { AsyncResult ar = (AsyncResult)msg.obj; if (DBG) { Rlog.d(LOG_TAG, "handleExitEmergencyCallbackMode,ar.exception , mIsPhoneInEcmState " + ar.exception + mIsPhoneInEcmState); } // Remove pending exit Ecm runnable, if any removeCallbacks(mExitEcmRunnable); if (mEcmExitRespRegistrant != null) { mEcmExitRespRegistrant.notifyRegistrant(ar); } // if exiting ecm success if (ar.exception == null) { if (mIsPhoneInEcmState) { mIsPhoneInEcmState = false; setSystemProperty(TelephonyProperties.PROPERTY_INECM_MODE, "false"); } // send an Intent sendEmergencyCallbackModeChange(); // Re-initiate data connection mDcTracker.setInternalDataEnabled(true); } } /** * Handle to cancel or restart Ecm timer in emergency call back mode * if action is CANCEL_ECM_TIMER, cancel Ecm timer and notify apps the timer is canceled; * otherwise, restart Ecm timer and notify apps the timer is restarted. */ void handleTimerInEmergencyCallbackMode(int action) { switch(action) { case CANCEL_ECM_TIMER: removeCallbacks(mExitEcmRunnable); mEcmTimerResetRegistrants.notifyResult(Boolean.TRUE); break; case RESTART_ECM_TIMER: long delayInMillis = SystemProperties.getLong( TelephonyProperties.PROPERTY_ECM_EXIT_TIMER, DEFAULT_ECM_EXIT_TIMER_VALUE); postDelayed(mExitEcmRunnable, delayInMillis); mEcmTimerResetRegistrants.notifyResult(Boolean.FALSE); break; default: Rlog.e(LOG_TAG, "handleTimerInEmergencyCallbackMode, unsupported action " + action); } } /** * Registration point for Ecm timer reset * @param h handler to notify * @param what User-defined message code * @param obj placed in Message.obj */ @Override public void registerForEcmTimerReset(Handler h, int what, Object obj) { mEcmTimerResetRegistrants.addUnique(h, what, obj); } @Override public void unregisterForEcmTimerReset(Handler h) { mEcmTimerResetRegistrants.remove(h); } @Override public void handleMessage(Message msg) { AsyncResult ar; Message onComplete; if (!mIsTheCurrentActivePhone) { Rlog.e(LOG_TAG, "Received message " + msg + "[" + msg.what + "] while being destroyed. Ignoring."); return; } switch(msg.what) { case EVENT_RADIO_AVAILABLE: { mCi.getBasebandVersion(obtainMessage(EVENT_GET_BASEBAND_VERSION_DONE)); mCi.getDeviceIdentity(obtainMessage(EVENT_GET_DEVICE_IDENTITY_DONE)); } break; case EVENT_GET_BASEBAND_VERSION_DONE:{ ar = (AsyncResult)msg.obj; if (ar.exception != null) { break; } if (DBG) Rlog.d(LOG_TAG, "Baseband version: " + ar.result); setSystemProperty(TelephonyProperties.PROPERTY_BASEBAND_VERSION, (String)ar.result); } break; case EVENT_GET_DEVICE_IDENTITY_DONE:{ ar = (AsyncResult)msg.obj; if (ar.exception != null) { break; } String[] respId = (String[])ar.result; mImei = respId[0]; mImeiSv = respId[1]; mEsn = respId[2]; mMeid = respId[3]; } break; case EVENT_EMERGENCY_CALLBACK_MODE_ENTER:{ handleEnterEmergencyCallbackMode(msg); } break; case EVENT_ICC_RECORD_EVENTS: ar = (AsyncResult)msg.obj; processIccRecordEvents((Integer)ar.result); break; case EVENT_EXIT_EMERGENCY_CALLBACK_RESPONSE:{ handleExitEmergencyCallbackMode(msg); } break; case EVENT_RUIM_RECORDS_LOADED:{ Rlog.d(LOG_TAG, "Event EVENT_RUIM_RECORDS_LOADED Received"); updateCurrentCarrierInProvider(); } break; case EVENT_RADIO_OFF_OR_NOT_AVAILABLE:{ Rlog.d(LOG_TAG, "Event EVENT_RADIO_OFF_OR_NOT_AVAILABLE Received"); } break; case EVENT_RADIO_ON:{ Rlog.d(LOG_TAG, "Event EVENT_RADIO_ON Received"); handleCdmaSubscriptionSource(mCdmaSSM.getCdmaSubscriptionSource()); } break; case EVENT_CDMA_SUBSCRIPTION_SOURCE_CHANGED:{ Rlog.d(LOG_TAG, "EVENT_CDMA_SUBSCRIPTION_SOURCE_CHANGED"); handleCdmaSubscriptionSource(mCdmaSSM.getCdmaSubscriptionSource()); } break; case EVENT_SSN:{ Rlog.d(LOG_TAG, "Event EVENT_SSN Received"); } break; case EVENT_REGISTERED_TO_NETWORK:{ Rlog.d(LOG_TAG, "Event EVENT_REGISTERED_TO_NETWORK Received"); } break; case EVENT_NV_READY:{ Rlog.d(LOG_TAG, "Event EVENT_NV_READY Received"); prepareEri(); } break; case EVENT_SET_VM_NUMBER_DONE:{ ar = (AsyncResult)msg.obj; if (IccException.class.isInstance(ar.exception)) { storeVoiceMailNumber(mVmNumber); ar.exception = null; } onComplete = (Message) ar.userObj; if (onComplete != null) { AsyncResult.forMessage(onComplete, ar.result, ar.exception); onComplete.sendToTarget(); } } break; default:{ super.handleMessage(msg); } } } @Override protected void onUpdateIccAvailability() { if (mUiccController == null ) { return; } UiccCardApplication newUiccApplication = mUiccController.getUiccCardApplication(UiccController.APP_FAM_3GPP2); if (newUiccApplication == null) { log("can't find 3GPP2 application; trying APP_FAM_3GPP"); newUiccApplication = mUiccController .getUiccCardApplication(UiccController.APP_FAM_3GPP); } UiccCardApplication app = mUiccApplication.get(); if (app != newUiccApplication) { if (app != null) { log("Removing stale icc objects."); if (mIccRecords.get() != null) { unregisterForRuimRecordEvents(); mRuimPhoneBookInterfaceManager.updateIccRecords(null); } mIccRecords.set(null); mUiccApplication.set(null); } if (newUiccApplication != null) { log("New Uicc application found"); mUiccApplication.set(newUiccApplication); mIccRecords.set(newUiccApplication.getIccRecords()); registerForRuimRecordEvents(); mRuimPhoneBookInterfaceManager.updateIccRecords(mIccRecords.get()); } } } private void processIccRecordEvents(int eventCode) { switch (eventCode) { case RuimRecords.EVENT_MWI: notifyMessageWaitingIndicator(); break; default: Rlog.e(LOG_TAG,"Unknown icc records event code " + eventCode); break; } } /** * Handles the call to get the subscription source * * @param newSubscriptionSource holds the new CDMA subscription source value */ private void handleCdmaSubscriptionSource(int newSubscriptionSource) { if (newSubscriptionSource != mCdmaSubscriptionSource) { mCdmaSubscriptionSource = newSubscriptionSource; if (newSubscriptionSource == CDMA_SUBSCRIPTION_NV) { // NV is ready when subscription source is NV sendMessage(obtainMessage(EVENT_NV_READY)); } } } /** * Retrieves the PhoneSubInfo of the CDMAPhone */ @Override public PhoneSubInfo getPhoneSubInfo() { return mSubInfo; } /** * Retrieves the IccPhoneBookInterfaceManager of the CDMAPhone */ @Override public IccPhoneBookInterfaceManager getIccPhoneBookInterfaceManager() { return mRuimPhoneBookInterfaceManager; } public void registerForEriFileLoaded(Handler h, int what, Object obj) { Registrant r = new Registrant (h, what, obj); mEriFileLoadedRegistrants.add(r); } public void unregisterForEriFileLoaded(Handler h) { mEriFileLoadedRegistrants.remove(h); } // override for allowing access from other classes of this package /** * {@inheritDoc} */ @Override public final void setSystemProperty(String property, String value) { super.setSystemProperty(property, value); } /** * Activate or deactivate cell broadcast SMS. * * @param activate 0 = activate, 1 = deactivate * @param response Callback message is empty on completion */ @Override public void activateCellBroadcastSms(int activate, Message response) { Rlog.e(LOG_TAG, "[CDMAPhone] activateCellBroadcastSms() is obsolete; use SmsManager"); response.sendToTarget(); } /** * Query the current configuration of cdma cell broadcast SMS. * * @param response Callback message is empty on completion */ @Override public void getCellBroadcastSmsConfig(Message response) { Rlog.e(LOG_TAG, "[CDMAPhone] getCellBroadcastSmsConfig() is obsolete; use SmsManager"); response.sendToTarget(); } /** * Configure cdma cell broadcast SMS. * * @param response Callback message is empty on completion */ @Override public void setCellBroadcastSmsConfig(int[] configValuesArray, Message response) { Rlog.e(LOG_TAG, "[CDMAPhone] setCellBroadcastSmsConfig() is obsolete; use SmsManager"); response.sendToTarget(); } /** * Returns true if OTA Service Provisioning needs to be performed. */ @Override public boolean needsOtaServiceProvisioning() { return mSST.getOtasp() != ServiceStateTracker.OTASP_NOT_NEEDED; } private static final String IS683A_FEATURE_CODE = "*228"; private static final int IS683A_FEATURE_CODE_NUM_DIGITS = 4; private static final int IS683A_SYS_SEL_CODE_NUM_DIGITS = 2; private static final int IS683A_SYS_SEL_CODE_OFFSET = 4; private static final int IS683_CONST_800MHZ_A_BAND = 0; private static final int IS683_CONST_800MHZ_B_BAND = 1; private static final int IS683_CONST_1900MHZ_A_BLOCK = 2; private static final int IS683_CONST_1900MHZ_B_BLOCK = 3; private static final int IS683_CONST_1900MHZ_C_BLOCK = 4; private static final int IS683_CONST_1900MHZ_D_BLOCK = 5; private static final int IS683_CONST_1900MHZ_E_BLOCK = 6; private static final int IS683_CONST_1900MHZ_F_BLOCK = 7; private static final int INVALID_SYSTEM_SELECTION_CODE = -1; private static boolean isIs683OtaSpDialStr(String dialStr) { int sysSelCodeInt; boolean isOtaspDialString = false; int dialStrLen = dialStr.length(); if (dialStrLen == IS683A_FEATURE_CODE_NUM_DIGITS) { if (dialStr.equals(IS683A_FEATURE_CODE)) { isOtaspDialString = true; } } else { sysSelCodeInt = extractSelCodeFromOtaSpNum(dialStr); switch (sysSelCodeInt) { case IS683_CONST_800MHZ_A_BAND: case IS683_CONST_800MHZ_B_BAND: case IS683_CONST_1900MHZ_A_BLOCK: case IS683_CONST_1900MHZ_B_BLOCK: case IS683_CONST_1900MHZ_C_BLOCK: case IS683_CONST_1900MHZ_D_BLOCK: case IS683_CONST_1900MHZ_E_BLOCK: case IS683_CONST_1900MHZ_F_BLOCK: isOtaspDialString = true; break; default: break; } } return isOtaspDialString; } /** * This function extracts the system selection code from the dial string. */ private static int extractSelCodeFromOtaSpNum(String dialStr) { int dialStrLen = dialStr.length(); int sysSelCodeInt = INVALID_SYSTEM_SELECTION_CODE; if ((dialStr.regionMatches(0, IS683A_FEATURE_CODE, 0, IS683A_FEATURE_CODE_NUM_DIGITS)) && (dialStrLen >= (IS683A_FEATURE_CODE_NUM_DIGITS + IS683A_SYS_SEL_CODE_NUM_DIGITS))) { // Since we checked the condition above, the system selection code // extracted from dialStr will not cause any exception sysSelCodeInt = Integer.parseInt ( dialStr.substring (IS683A_FEATURE_CODE_NUM_DIGITS, IS683A_FEATURE_CODE_NUM_DIGITS + IS683A_SYS_SEL_CODE_NUM_DIGITS)); } if (DBG) Rlog.d(LOG_TAG, "extractSelCodeFromOtaSpNum " + sysSelCodeInt); return sysSelCodeInt; } /** * This function checks if the system selection code extracted from * the dial string "sysSelCodeInt' is the system selection code specified * in the carrier ota sp number schema "sch". */ private static boolean checkOtaSpNumBasedOnSysSelCode (int sysSelCodeInt, String sch[]) { boolean isOtaSpNum = false; try { // Get how many number of system selection code ranges int selRc = Integer.parseInt(sch[1]); for (int i = 0; i < selRc; i++) { if (!TextUtils.isEmpty(sch[i+2]) && !TextUtils.isEmpty(sch[i+3])) { int selMin = Integer.parseInt(sch[i+2]); int selMax = Integer.parseInt(sch[i+3]); // Check if the selection code extracted from the dial string falls // within any of the range pairs specified in the schema. if ((sysSelCodeInt >= selMin) && (sysSelCodeInt <= selMax)) { isOtaSpNum = true; break; } } } } catch (NumberFormatException ex) { // If the carrier ota sp number schema is not correct, we still allow dial // and only log the error: Rlog.e(LOG_TAG, "checkOtaSpNumBasedOnSysSelCode, error", ex); } return isOtaSpNum; } // Define the pattern/format for carrier specified OTASP number schema. // It separates by comma and/or whitespace. private static Pattern pOtaSpNumSchema = Pattern.compile("[,\\s]+"); /** * The following function checks if a dial string is a carrier specified * OTASP number or not by checking against the OTASP number schema stored * in PROPERTY_OTASP_NUM_SCHEMA. * * Currently, there are 2 schemas for carriers to specify the OTASP number: * 1) Use system selection code: * The schema is: * SELC,the # of code pairs,min1,max1,min2,max2,... * e.g "SELC,3,10,20,30,40,60,70" indicates that there are 3 pairs of * selection codes, and they are {10,20}, {30,40} and {60,70} respectively. * * 2) Use feature code: * The schema is: * "FC,length of feature code,feature code". * e.g "FC,2,*2" indicates that the length of the feature code is 2, * and the code itself is "*2". */ private boolean isCarrierOtaSpNum(String dialStr) { boolean isOtaSpNum = false; int sysSelCodeInt = extractSelCodeFromOtaSpNum(dialStr); if (sysSelCodeInt == INVALID_SYSTEM_SELECTION_CODE) { return isOtaSpNum; } // mCarrierOtaSpNumSchema is retrieved from PROPERTY_OTASP_NUM_SCHEMA: if (!TextUtils.isEmpty(mCarrierOtaSpNumSchema)) { Matcher m = pOtaSpNumSchema.matcher(mCarrierOtaSpNumSchema); if (DBG) { Rlog.d(LOG_TAG, "isCarrierOtaSpNum,schema" + mCarrierOtaSpNumSchema); } if (m.find()) { String sch[] = pOtaSpNumSchema.split(mCarrierOtaSpNumSchema); // If carrier uses system selection code mechanism if (!TextUtils.isEmpty(sch[0]) && sch[0].equals("SELC")) { if (sysSelCodeInt!=INVALID_SYSTEM_SELECTION_CODE) { isOtaSpNum=checkOtaSpNumBasedOnSysSelCode(sysSelCodeInt,sch); } else { if (DBG) { Rlog.d(LOG_TAG, "isCarrierOtaSpNum,sysSelCodeInt is invalid"); } } } else if (!TextUtils.isEmpty(sch[0]) && sch[0].equals("FC")) { int fcLen = Integer.parseInt(sch[1]); String fc = sch[2]; if (dialStr.regionMatches(0,fc,0,fcLen)) { isOtaSpNum = true; } else { if (DBG) Rlog.d(LOG_TAG, "isCarrierOtaSpNum,not otasp number"); } } else { if (DBG) { Rlog.d(LOG_TAG, "isCarrierOtaSpNum,ota schema not supported" + sch[0]); } } } else { if (DBG) { Rlog.d(LOG_TAG, "isCarrierOtaSpNum,ota schema pattern not right" + mCarrierOtaSpNumSchema); } } } else { if (DBG) Rlog.d(LOG_TAG, "isCarrierOtaSpNum,ota schema pattern empty"); } return isOtaSpNum; } /** * isOTASPNumber: checks a given number against the IS-683A OTASP dial string and carrier * OTASP dial string. * * @param dialStr the number to look up. * @return true if the number is in IS-683A OTASP dial string or carrier OTASP dial string */ @Override public boolean isOtaSpNumber(String dialStr){ boolean isOtaSpNum = false; String dialableStr = PhoneNumberUtils.extractNetworkPortionAlt(dialStr); if (dialableStr != null) { isOtaSpNum = isIs683OtaSpDialStr(dialableStr); if (isOtaSpNum == false) { isOtaSpNum = isCarrierOtaSpNum(dialableStr); } } if (DBG) Rlog.d(LOG_TAG, "isOtaSpNumber " + isOtaSpNum); return isOtaSpNum; } @Override public int getCdmaEriIconIndex() { return getServiceState().getCdmaEriIconIndex(); } /** * Returns the CDMA ERI icon mode, * 0 - ON * 1 - FLASHING */ @Override public int getCdmaEriIconMode() { return getServiceState().getCdmaEriIconMode(); } /** * Returns the CDMA ERI text, */ @Override public String getCdmaEriText() { int roamInd = getServiceState().getCdmaRoamingIndicator(); int defRoamInd = getServiceState().getCdmaDefaultRoamingIndicator(); return mEriManager.getCdmaEriText(roamInd, defRoamInd); } /** * Store the voicemail number in preferences */ private void storeVoiceMailNumber(String number) { // Update the preference value of voicemail number SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getContext()); SharedPreferences.Editor editor = sp.edit(); editor.putString(VM_NUMBER_CDMA, number); editor.apply(); } /** * Sets PROPERTY_ICC_OPERATOR_ISO_COUNTRY property * */ private void setIsoCountryProperty(String operatorNumeric) { if (TextUtils.isEmpty(operatorNumeric)) { log("setIsoCountryProperty: clear 'gsm.sim.operator.iso-country'"); setSystemProperty(PROPERTY_ICC_OPERATOR_ISO_COUNTRY, ""); } else { String iso = ""; try { iso = MccTable.countryCodeForMcc(Integer.parseInt( operatorNumeric.substring(0,3))); } catch (NumberFormatException ex) { loge("setIsoCountryProperty: countryCodeForMcc error", ex); } catch (StringIndexOutOfBoundsException ex) { loge("setIsoCountryProperty: countryCodeForMcc error", ex); } log("setIsoCountryProperty: set 'gsm.sim.operator.iso-country' to iso=" + iso); setSystemProperty(PROPERTY_ICC_OPERATOR_ISO_COUNTRY, iso); } } /** * Sets the "current" field in the telephony provider according to the * build-time operator numeric property * * @return true for success; false otherwise. */ boolean updateCurrentCarrierInProvider(String operatorNumeric) { log("CDMAPhone: updateCurrentCarrierInProvider called"); if (!TextUtils.isEmpty(operatorNumeric)) { try { Uri uri = Uri.withAppendedPath(Telephony.Carriers.CONTENT_URI, "current"); ContentValues map = new ContentValues(); map.put(Telephony.Carriers.NUMERIC, operatorNumeric); log("updateCurrentCarrierInProvider from system: numeric=" + operatorNumeric); getContext().getContentResolver().insert(uri, map); // Updates MCC MNC device configuration information MccTable.updateMccMncConfiguration(mContext, operatorNumeric, false); return true; } catch (SQLException e) { Rlog.e(LOG_TAG, "Can't store current operator", e); } } return false; } /** * Sets the "current" field in the telephony provider according to the SIM's operator. * Implemented in {@link CDMALTEPhone} for CDMA/LTE devices. * * @return true for success; false otherwise. */ boolean updateCurrentCarrierInProvider() { return true; } public void prepareEri() { mEriManager.loadEriFile(); if(mEriManager.isEriFileLoaded()) { // when the ERI file is loaded log("ERI read, notify registrants"); mEriFileLoadedRegistrants.notifyRegistrants(); } } public boolean isEriFileLoaded() { return mEriManager.isEriFileLoaded(); } protected void registerForRuimRecordEvents() { IccRecords r = mIccRecords.get(); if (r == null) { return; } r.registerForRecordsEvents(this, EVENT_ICC_RECORD_EVENTS, null); r.registerForRecordsLoaded(this, EVENT_RUIM_RECORDS_LOADED, null); } protected void unregisterForRuimRecordEvents() { IccRecords r = mIccRecords.get(); if (r == null) { return; } r.unregisterForRecordsEvents(this); r.unregisterForRecordsLoaded(this); } protected void log(String s) { if (DBG) Rlog.d(LOG_TAG, s); } protected void loge(String s, Exception e) { if (DBG) Rlog.e(LOG_TAG, s, e); } @Override public void dump(FileDescriptor fd, PrintWriter pw, String[] args) { pw.println("CDMAPhone extends:"); super.dump(fd, pw, args); pw.println(" mVmNumber=" + mVmNumber); pw.println(" mCT=" + mCT); pw.println(" mSST=" + mSST); pw.println(" mCdmaSSM=" + mCdmaSSM); pw.println(" mPendingMmis=" + mPendingMmis); pw.println(" mRuimPhoneBookInterfaceManager=" + mRuimPhoneBookInterfaceManager); pw.println(" mCdmaSubscriptionSource=" + mCdmaSubscriptionSource); pw.println(" mSubInfo=" + mSubInfo); pw.println(" mEriManager=" + mEriManager); pw.println(" mWakeLock=" + mWakeLock); pw.println(" mIsPhoneInEcmState=" + mIsPhoneInEcmState); if (VDBG) pw.println(" mImei=" + mImei); if (VDBG) pw.println(" mImeiSv=" + mImeiSv); if (VDBG) pw.println(" mEsn=" + mEsn); if (VDBG) pw.println(" mMeid=" + mMeid); pw.println(" mCarrierOtaSpNumSchema=" + mCarrierOtaSpNumSchema); pw.println(" getCdmaEriIconIndex()=" + getCdmaEriIconIndex()); pw.println(" getCdmaEriIconMode()=" + getCdmaEriIconMode()); pw.println(" getCdmaEriText()=" + getCdmaEriText()); pw.println(" isMinInfoReady()=" + isMinInfoReady()); pw.println(" isCspPlmnEnabled()=" + isCspPlmnEnabled()); } }
/* * Copyright 2013 The Regents of The University California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.berkeley.sparrow.examples; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Random; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicInteger; import edu.berkeley.sparrow.daemon.util.Network; import joptsimple.OptionParser; import joptsimple.OptionSet; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.thrift.TException; import com.google.common.collect.Lists; import edu.berkeley.sparrow.daemon.nodemonitor.NodeMonitorThrift; import edu.berkeley.sparrow.daemon.util.TClients; import edu.berkeley.sparrow.daemon.util.TServers; import edu.berkeley.sparrow.thrift.BackendService; import edu.berkeley.sparrow.thrift.NodeMonitorService.Client; import edu.berkeley.sparrow.thrift.TFullTaskId; import edu.berkeley.sparrow.thrift.TUserGroupInfo; /** * A prototype Sparrow backend. * * This backend is capable of performing a number of benchmark tasks, each representing * distinct resource consumption profiles. It initiates a thrift server with a bounded * size thread pool (of at most {@code WORKER_THREADS} threads). To makes sure that * we never queue tasks, we additionally spawn a new thread each time a task is launched. * In the future, we will have launchTask() directly execute the task and rely on queuing * in the underlying thread pool to queue if task launches exceed capacity. */ public class ProtoBackend implements BackendService.Iface { /** Benchmark which, on each iteration, runs 1 million random floating point * multiplications.*/ public static int BENCHMARK_TYPE_FP_CPU = 1; /** Benchmark which allocates a heap buffer of 200 million bytes, then on each iteration * accesses 1 million contiguous bytes of the buffer, starting at a random offset.*/ public static int BENCHMARK_TYPE_RANDOM_MEMACCESS = 2; // NOTE: we do not use an enum for the above because it is not possible to serialize // an enum with our current simple serialization technique. /** Tracks the total number of tasks launched since execution began. Updated on * each task launch. This is helpful for diagnosing unwanted queuing in various parts * of the system (i.e. if we notice the backend is launching fewer tasks than we expect * based on the frontend task launch rate). */ public static AtomicInteger numTasks = new AtomicInteger(0); public static long startTime = -1; private static final int DEFAULT_LISTEN_PORT = 20101; private static final String NIC_NAME ="nic_name"; private static final String DEFAULT_NIC_NAME = "eth2"; /** * This indicates how many threads can concurrently be answering function calls * from the NM. Each task is launched in a new thread from one of these threads. If tasks * launches arrive fast enough that all worker threads are concurrently executing * a task, this will queue. We currently launch new threads for each task to prevent * this from happening. */ private static final int THRIFT_WORKER_THREADS = 16; private static final int TASK_WORKER_THREADS = 16; private static final String APP_ID = "testApp"; /** We assume we are speaking to local Node Manager. */ private static final String NM_HOST = "localhost"; private static int NM_PORT; private static Client client; private static final Logger LOG = Logger.getLogger(ProtoBackend.class); private static final ExecutorService executor = Executors.newFixedThreadPool(TASK_WORKER_THREADS); /** * Keeps track of finished tasks. * * A single thread pulls items off of this queue and uses * the client to notify the node monitor that tasks have finished. */ private final BlockingQueue<TFullTaskId> finishedTasks = new LinkedBlockingQueue<TFullTaskId>(); /** * Thread that sends taskFinished() RPCs to the node monitor. * * We do this in a single thread so that we just need a single client to the node monitor * and don't need to create a new client for each task. */ private class TasksFinishedRpcRunnable implements Runnable { @Override public void run() { while (true) { try { TFullTaskId task = finishedTasks.take(); client.tasksFinished(Lists.newArrayList(task)); } catch (InterruptedException e) { LOG.error("Error taking a task from the queue: " + e.getMessage()); } catch (TException e) { LOG.error("Error with tasksFinished() RPC:" + e.getMessage()); } } } } /** * Thread spawned for each task. It runs for a given amount of time (and adds * its resources to the total resources for that time) then stops. It updates * the NodeMonitor when it launches and again when it finishes. */ private class TaskRunnable implements Runnable { private int benchmarkId; private int benchmarkIterations; private TFullTaskId taskId; public TaskRunnable(String requestId, TFullTaskId taskId, ByteBuffer message) { this.benchmarkId = message.getInt(); this.benchmarkIterations = message.getInt(); this.taskId = taskId; } @Override public void run() { if (startTime == -1) { startTime = System.currentTimeMillis(); } long taskStart = System.currentTimeMillis(); int tasks = numTasks.addAndGet(1); double taskRate = ((double) tasks) * 1000 / (System.currentTimeMillis() - startTime); LOG.debug("Aggregate task rate: " + taskRate + " (" + tasks + " launched)"); Random r = new Random(); long benchmarkStart = System.currentTimeMillis(); runBenchmark(benchmarkId, benchmarkIterations, r); LOG.debug("Benchmark runtime: " + (System.currentTimeMillis() - benchmarkStart)); finishedTasks.add(taskId); LOG.debug("Task running for " + (System.currentTimeMillis() - taskStart) + " ms"); } } /** * Run the benchmark identified by {@code benchmarkId} for {@code iterations} * iterations using random generator {@code r}. Return true if benchmark is recognized * and false otherwise. */ public static boolean runBenchmark(int benchmarkId, int iterations, Random r) { if (benchmarkId == BENCHMARK_TYPE_RANDOM_MEMACCESS) { LOG.debug("Running random access benchmark for " + iterations + " iterations."); runRandomMemAcessBenchmark(iterations, r); } else if (benchmarkId == BENCHMARK_TYPE_FP_CPU) { LOG.debug("Running CPU benchmark for " + iterations + " iterations."); runFloatingPointBenchmark(iterations, r); } else { LOG.error("Received unrecognized benchmark type"); return false; } return true; } /** * Benchmark that runs random floating point multiplications for the specified amount of * "iterations", where each iteration is one millisecond. */ public static void runFloatingPointBenchmark(int iterations, Random r) { int runtimeMillis = iterations; long startTime = System.nanoTime(); int opsPerIteration = 1000; /* We keep a running result here and print it out so that the JVM doesn't * optimize all this computation away. */ float result = r.nextFloat(); while ((System.nanoTime() - startTime) / (1000.0 * 1000.0) < runtimeMillis) { for (int j = 0; j < opsPerIteration; j++) { // On each iteration, perform a floating point multiplication float x = r.nextFloat(); float y = r.nextFloat(); result += (x * y); } } LOG.debug("Benchmark result " + result); } /** Benchmark which allocates a heap buffer of 200 million bytes, then on each iteration * accesses 1 million contiguous bytes of the buffer, starting at a random offset.*/ public static void runRandomMemAcessBenchmark(int iterations, Random r) { // 2 hundred million byte buffer int buffSize = 1000 * 1000 * 200; byte[] buff = new byte[buffSize]; // scan 1 million bytes at a time int runLength = 1000 * 1000; // We keep a running result here and print it out so that the JVM doesn't // optimize all this computation away. byte result = 1; for (int i = 0; i < iterations; i++) { // On each iteration, start at a random index, and scan runLength contiguous // bytes, potentially wrapping if we hit the end of the buffer. int start = r.nextInt(buff.length); for (int j = 0; j < runLength; j++) { result = (byte) (result ^ buff[(start + j) % (buff.length - 1)]); } } LOG.debug("Benchmark result " + result); } /** * Initializes the backend by registering with the node monitor. * * Also starts a thread that handles finished tasks (by sending an RPC to the node monitor). */ public void initialize(String nicName, int listenPort) { // Register server. try { client = TClients.createBlockingNmClient(NM_HOST, NM_PORT); } catch (IOException e) { LOG.debug("Error creating Thrift client: " + e.getMessage()); } try { String localIp = Network.getIPAddressByNICName(nicName); client.registerBackend(APP_ID, localIp + listenPort); LOG.debug("Client successfully registered"); } catch (TException e) { LOG.debug("Error while registering backend: " + e.getMessage()); } new Thread(new TasksFinishedRpcRunnable()).start(); } @Override public void launchTask(ByteBuffer message, TFullTaskId taskId, TUserGroupInfo user) throws TException { LOG.debug("Submitting task " + taskId.getTaskId() + " at " + System.currentTimeMillis()); // Note we ignore user here executor.submit(new TaskRunnable( taskId.requestId, taskId, message)); } public static void main(String[] args) throws IOException, TException { OptionParser parser = new OptionParser(); parser.accepts("c", "configuration file"). withRequiredArg().ofType(String.class); parser.accepts("help", "print help statement"); OptionSet options = parser.parse(args); if (options.has("help")) { parser.printHelpOn(System.out); System.exit(-1); } // Logger configuration: log to the console BasicConfigurator.configure(); LOG.setLevel(Level.DEBUG); LOG.debug("debug logging on"); Configuration conf = new PropertiesConfiguration(); if (options.has("c")) { String configFile = (String) options.valueOf("c"); try { conf = new PropertiesConfiguration(configFile); } catch (ConfigurationException e) {} } // Start backend server ProtoBackend protoBackend = new ProtoBackend(); BackendService.Processor<BackendService.Iface> processor = new BackendService.Processor<BackendService.Iface>(protoBackend); int listenPort = conf.getInt("listen_port", DEFAULT_LISTEN_PORT); NM_PORT = conf.getInt("node_monitor_port", NodeMonitorThrift.DEFAULT_NM_THRIFT_PORT); TServers.launchThreadedThriftServer(listenPort, THRIFT_WORKER_THREADS, processor); String nicName = conf.getString(NIC_NAME,DEFAULT_NIC_NAME); protoBackend.initialize(nicName, listenPort); } }
/* * Copyright 2012-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.quartz; import java.time.Duration; import java.time.Instant; import java.time.LocalTime; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.TimeZone; import java.util.stream.Stream; import org.assertj.core.api.InstanceOfAssertFactories; import org.assertj.core.api.InstanceOfAssertFactory; import org.assertj.core.api.MapAssert; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.quartz.CalendarIntervalScheduleBuilder; import org.quartz.CalendarIntervalTrigger; import org.quartz.CronScheduleBuilder; import org.quartz.CronTrigger; import org.quartz.DailyTimeIntervalScheduleBuilder; import org.quartz.DailyTimeIntervalTrigger; import org.quartz.DateBuilder.IntervalUnit; import org.quartz.Job; import org.quartz.JobBuilder; import org.quartz.JobDetail; import org.quartz.JobKey; import org.quartz.Scheduler; import org.quartz.SchedulerException; import org.quartz.SimpleScheduleBuilder; import org.quartz.SimpleTrigger; import org.quartz.TimeOfDay; import org.quartz.Trigger; import org.quartz.Trigger.TriggerState; import org.quartz.TriggerBuilder; import org.quartz.TriggerKey; import org.quartz.impl.matchers.GroupMatcher; import org.quartz.spi.OperableTrigger; import org.springframework.boot.actuate.endpoint.Sanitizer; import org.springframework.boot.actuate.quartz.QuartzEndpoint.QuartzJobDetails; import org.springframework.boot.actuate.quartz.QuartzEndpoint.QuartzJobGroupSummary; import org.springframework.boot.actuate.quartz.QuartzEndpoint.QuartzJobSummary; import org.springframework.boot.actuate.quartz.QuartzEndpoint.QuartzReport; import org.springframework.boot.actuate.quartz.QuartzEndpoint.QuartzTriggerGroupSummary; import org.springframework.scheduling.quartz.DelegatingJob; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.entry; import static org.mockito.BDDMockito.given; import static org.mockito.BDDMockito.then; import static org.mockito.Mockito.mock; /** * Tests for {@link QuartzEndpoint}. * * @author Vedran Pavic * @author Stephane Nicoll */ class QuartzEndpointTests { private static final JobDetail jobOne = JobBuilder.newJob(Job.class).withIdentity("jobOne").build(); private static final JobDetail jobTwo = JobBuilder.newJob(DelegatingJob.class).withIdentity("jobTwo").build(); private static final JobDetail jobThree = JobBuilder.newJob(Job.class).withIdentity("jobThree", "samples").build(); private static final Trigger triggerOne = TriggerBuilder.newTrigger().forJob(jobOne).withIdentity("triggerOne") .build(); private static final Trigger triggerTwo = TriggerBuilder.newTrigger().forJob(jobOne).withIdentity("triggerTwo") .build(); private static final Trigger triggerThree = TriggerBuilder.newTrigger().forJob(jobThree) .withIdentity("triggerThree", "samples").build(); private final Scheduler scheduler; private final QuartzEndpoint endpoint; QuartzEndpointTests() { this.scheduler = mock(Scheduler.class); this.endpoint = new QuartzEndpoint(this.scheduler); } @Test void quartzReport() throws SchedulerException { given(this.scheduler.getJobGroupNames()).willReturn(Arrays.asList("jobSamples", "DEFAULT")); given(this.scheduler.getTriggerGroupNames()).willReturn(Collections.singletonList("triggerSamples")); QuartzReport quartzReport = this.endpoint.quartzReport(); assertThat(quartzReport.getJobs().getGroups()).containsOnly("jobSamples", "DEFAULT"); assertThat(quartzReport.getTriggers().getGroups()).containsOnly("triggerSamples"); then(this.scheduler).should().getJobGroupNames(); then(this.scheduler).should().getTriggerGroupNames(); then(this.scheduler).shouldHaveNoMoreInteractions(); } @Test void quartzReportWithNoJob() throws SchedulerException { given(this.scheduler.getJobGroupNames()).willReturn(Collections.emptyList()); given(this.scheduler.getTriggerGroupNames()).willReturn(Arrays.asList("triggerSamples", "DEFAULT")); QuartzReport quartzReport = this.endpoint.quartzReport(); assertThat(quartzReport.getJobs().getGroups()).isEmpty(); assertThat(quartzReport.getTriggers().getGroups()).containsOnly("triggerSamples", "DEFAULT"); } @Test void quartzReportWithNoTrigger() throws SchedulerException { given(this.scheduler.getJobGroupNames()).willReturn(Collections.singletonList("jobSamples")); given(this.scheduler.getTriggerGroupNames()).willReturn(Collections.emptyList()); QuartzReport quartzReport = this.endpoint.quartzReport(); assertThat(quartzReport.getJobs().getGroups()).containsOnly("jobSamples"); assertThat(quartzReport.getTriggers().getGroups()).isEmpty(); } @Test void quartzJobGroupsWithExistingGroups() throws SchedulerException { mockJobs(jobOne, jobTwo, jobThree); Map<String, Object> jobGroups = this.endpoint.quartzJobGroups().getGroups(); assertThat(jobGroups).containsOnlyKeys("DEFAULT", "samples"); assertThat(jobGroups).extractingByKey("DEFAULT", nestedMap()) .containsOnly(entry("jobs", Arrays.asList("jobOne", "jobTwo"))); assertThat(jobGroups).extractingByKey("samples", nestedMap()) .containsOnly(entry("jobs", Collections.singletonList("jobThree"))); } @Test void quartzJobGroupsWithNoGroup() throws SchedulerException { given(this.scheduler.getJobGroupNames()).willReturn(Collections.emptyList()); Map<String, Object> jobGroups = this.endpoint.quartzJobGroups().getGroups(); assertThat(jobGroups).isEmpty(); } @Test void quartzTriggerGroupsWithExistingGroups() throws SchedulerException { mockTriggers(triggerOne, triggerTwo, triggerThree); given(this.scheduler.getPausedTriggerGroups()).willReturn(Collections.singleton("samples")); Map<String, Object> triggerGroups = this.endpoint.quartzTriggerGroups().getGroups(); assertThat(triggerGroups).containsOnlyKeys("DEFAULT", "samples"); assertThat(triggerGroups).extractingByKey("DEFAULT", nestedMap()).containsOnly(entry("paused", false), entry("triggers", Arrays.asList("triggerOne", "triggerTwo"))); assertThat(triggerGroups).extractingByKey("samples", nestedMap()).containsOnly(entry("paused", true), entry("triggers", Collections.singletonList("triggerThree"))); } @Test void quartzTriggerGroupsWithNoGroup() throws SchedulerException { given(this.scheduler.getTriggerGroupNames()).willReturn(Collections.emptyList()); Map<String, Object> triggerGroups = this.endpoint.quartzTriggerGroups().getGroups(); assertThat(triggerGroups).isEmpty(); } @Test void quartzJobGroupSummaryWithInvalidGroup() throws SchedulerException { given(this.scheduler.getJobGroupNames()).willReturn(Collections.singletonList("DEFAULT")); QuartzJobGroupSummary summary = this.endpoint.quartzJobGroupSummary("unknown"); assertThat(summary).isNull(); } @Test void quartzJobGroupSummaryWithEmptyGroup() throws SchedulerException { given(this.scheduler.getJobGroupNames()).willReturn(Collections.singletonList("samples")); given(this.scheduler.getJobKeys(GroupMatcher.jobGroupEquals("samples"))).willReturn(Collections.emptySet()); QuartzJobGroupSummary summary = this.endpoint.quartzJobGroupSummary("samples"); assertThat(summary).isNotNull(); assertThat(summary.getGroup()).isEqualTo("samples"); assertThat(summary.getJobs()).isEmpty(); } @Test void quartzJobGroupSummaryWithJobs() throws SchedulerException { mockJobs(jobOne, jobTwo); QuartzJobGroupSummary summary = this.endpoint.quartzJobGroupSummary("DEFAULT"); assertThat(summary).isNotNull(); assertThat(summary.getGroup()).isEqualTo("DEFAULT"); Map<String, QuartzJobSummary> jobSummaries = summary.getJobs(); assertThat(jobSummaries).containsOnlyKeys("jobOne", "jobTwo"); assertThat(jobSummaries.get("jobOne").getClassName()).isEqualTo(Job.class.getName()); assertThat(jobSummaries.get("jobTwo").getClassName()).isEqualTo(DelegatingJob.class.getName()); } @Test void quartzTriggerGroupSummaryWithInvalidGroup() throws SchedulerException { given(this.scheduler.getTriggerGroupNames()).willReturn(Collections.singletonList("DEFAULT")); QuartzTriggerGroupSummary summary = this.endpoint.quartzTriggerGroupSummary("unknown"); assertThat(summary).isNull(); } @Test void quartzTriggerGroupSummaryWithEmptyGroup() throws SchedulerException { given(this.scheduler.getTriggerGroupNames()).willReturn(Collections.singletonList("samples")); given(this.scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals("samples"))) .willReturn(Collections.emptySet()); QuartzTriggerGroupSummary summary = this.endpoint.quartzTriggerGroupSummary("samples"); assertThat(summary).isNotNull(); assertThat(summary.getGroup()).isEqualTo("samples"); assertThat(summary.isPaused()).isFalse(); assertThat(summary.getTriggers().getCron()).isEmpty(); assertThat(summary.getTriggers().getSimple()).isEmpty(); assertThat(summary.getTriggers().getDailyTimeInterval()).isEmpty(); assertThat(summary.getTriggers().getCalendarInterval()).isEmpty(); assertThat(summary.getTriggers().getCustom()).isEmpty(); } @Test void quartzTriggerGroupSummaryWithCronTrigger() throws SchedulerException { CronTrigger cronTrigger = TriggerBuilder.newTrigger().withIdentity("3am-every-day", "samples") .withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0)).build(); mockTriggers(cronTrigger); QuartzTriggerGroupSummary summary = this.endpoint.quartzTriggerGroupSummary("samples"); assertThat(summary.getGroup()).isEqualTo("samples"); assertThat(summary.isPaused()).isFalse(); assertThat(summary.getTriggers().getCron()).containsOnlyKeys("3am-every-day"); assertThat(summary.getTriggers().getSimple()).isEmpty(); assertThat(summary.getTriggers().getDailyTimeInterval()).isEmpty(); assertThat(summary.getTriggers().getCalendarInterval()).isEmpty(); assertThat(summary.getTriggers().getCustom()).isEmpty(); } @Test void quartzTriggerGroupSummaryWithCronTriggerDetails() throws SchedulerException { Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z")); Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); TimeZone timeZone = TimeZone.getTimeZone("Europe/Paris"); CronTrigger cronTrigger = TriggerBuilder.newTrigger().withIdentity("3am-every-day", "samples").withPriority(3) .withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0).inTimeZone(timeZone)).build(); ((OperableTrigger) cronTrigger).setPreviousFireTime(previousFireTime); ((OperableTrigger) cronTrigger).setNextFireTime(nextFireTime); mockTriggers(cronTrigger); QuartzTriggerGroupSummary summary = this.endpoint.quartzTriggerGroupSummary("samples"); Map<String, Object> triggers = summary.getTriggers().getCron(); assertThat(triggers).containsOnlyKeys("3am-every-day"); assertThat(triggers).extractingByKey("3am-every-day", nestedMap()).containsOnly( entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime), entry("priority", 3), entry("expression", "0 0 3 ? * *"), entry("timeZone", timeZone)); } @Test void quartzTriggerGroupSummaryWithSimpleTrigger() throws SchedulerException { SimpleTrigger simpleTrigger = TriggerBuilder.newTrigger().withIdentity("every-hour", "samples") .withSchedule(SimpleScheduleBuilder.repeatHourlyForever(1)).build(); mockTriggers(simpleTrigger); QuartzTriggerGroupSummary summary = this.endpoint.quartzTriggerGroupSummary("samples"); assertThat(summary.getGroup()).isEqualTo("samples"); assertThat(summary.isPaused()).isFalse(); assertThat(summary.getTriggers().getCron()).isEmpty(); assertThat(summary.getTriggers().getSimple()).containsOnlyKeys("every-hour"); assertThat(summary.getTriggers().getDailyTimeInterval()).isEmpty(); assertThat(summary.getTriggers().getCalendarInterval()).isEmpty(); assertThat(summary.getTriggers().getCustom()).isEmpty(); } @Test void quartzTriggerGroupSummaryWithSimpleTriggerDetails() throws SchedulerException { Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z")); Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); SimpleTrigger simpleTrigger = TriggerBuilder.newTrigger().withIdentity("every-hour", "samples").withPriority(7) .withSchedule(SimpleScheduleBuilder.repeatHourlyForever(1)).build(); ((OperableTrigger) simpleTrigger).setPreviousFireTime(previousFireTime); ((OperableTrigger) simpleTrigger).setNextFireTime(nextFireTime); mockTriggers(simpleTrigger); QuartzTriggerGroupSummary summary = this.endpoint.quartzTriggerGroupSummary("samples"); Map<String, Object> triggers = summary.getTriggers().getSimple(); assertThat(triggers).containsOnlyKeys("every-hour"); assertThat(triggers).extractingByKey("every-hour", nestedMap()).containsOnly( entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime), entry("priority", 7), entry("interval", 3600000L)); } @Test void quartzTriggerGroupSummaryWithDailyIntervalTrigger() throws SchedulerException { DailyTimeIntervalTrigger trigger = TriggerBuilder.newTrigger().withIdentity("every-hour-9am", "samples") .withSchedule(DailyTimeIntervalScheduleBuilder.dailyTimeIntervalSchedule() .startingDailyAt(TimeOfDay.hourAndMinuteOfDay(9, 0)).withInterval(1, IntervalUnit.HOUR)) .build(); mockTriggers(trigger); QuartzTriggerGroupSummary summary = this.endpoint.quartzTriggerGroupSummary("samples"); assertThat(summary.getGroup()).isEqualTo("samples"); assertThat(summary.isPaused()).isFalse(); assertThat(summary.getTriggers().getCron()).isEmpty(); assertThat(summary.getTriggers().getSimple()).isEmpty(); assertThat(summary.getTriggers().getDailyTimeInterval()).containsOnlyKeys("every-hour-9am"); assertThat(summary.getTriggers().getCalendarInterval()).isEmpty(); assertThat(summary.getTriggers().getCustom()).isEmpty(); } @Test void quartzTriggerGroupSummaryWithDailyIntervalTriggerDetails() throws SchedulerException { Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z")); Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); DailyTimeIntervalTrigger trigger = TriggerBuilder.newTrigger().withIdentity("every-hour-tue-thu", "samples") .withPriority(4) .withSchedule(DailyTimeIntervalScheduleBuilder.dailyTimeIntervalSchedule() .onDaysOfTheWeek(Calendar.TUESDAY, Calendar.THURSDAY) .startingDailyAt(TimeOfDay.hourAndMinuteOfDay(9, 0)) .endingDailyAt(TimeOfDay.hourAndMinuteOfDay(18, 0)).withInterval(1, IntervalUnit.HOUR)) .build(); ((OperableTrigger) trigger).setPreviousFireTime(previousFireTime); ((OperableTrigger) trigger).setNextFireTime(nextFireTime); mockTriggers(trigger); QuartzTriggerGroupSummary summary = this.endpoint.quartzTriggerGroupSummary("samples"); Map<String, Object> triggers = summary.getTriggers().getDailyTimeInterval(); assertThat(triggers).containsOnlyKeys("every-hour-tue-thu"); assertThat(triggers).extractingByKey("every-hour-tue-thu", nestedMap()).containsOnly( entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime), entry("priority", 4), entry("interval", 3600000L), entry("startTimeOfDay", LocalTime.of(9, 0)), entry("endTimeOfDay", LocalTime.of(18, 0)), entry("daysOfWeek", new LinkedHashSet<>(Arrays.asList(3, 5)))); } @Test void quartzTriggerGroupSummaryWithCalendarIntervalTrigger() throws SchedulerException { CalendarIntervalTrigger trigger = TriggerBuilder.newTrigger().withIdentity("once-a-week", "samples") .withSchedule(CalendarIntervalScheduleBuilder.calendarIntervalSchedule().withIntervalInWeeks(1)) .build(); mockTriggers(trigger); QuartzTriggerGroupSummary summary = this.endpoint.quartzTriggerGroupSummary("samples"); assertThat(summary.getGroup()).isEqualTo("samples"); assertThat(summary.isPaused()).isFalse(); assertThat(summary.getTriggers().getCron()).isEmpty(); assertThat(summary.getTriggers().getSimple()).isEmpty(); assertThat(summary.getTriggers().getDailyTimeInterval()).isEmpty(); assertThat(summary.getTriggers().getCalendarInterval()).containsOnlyKeys("once-a-week"); assertThat(summary.getTriggers().getCustom()).isEmpty(); } @Test void quartzTriggerGroupSummaryWithCalendarIntervalTriggerDetails() throws SchedulerException { TimeZone timeZone = TimeZone.getTimeZone("Europe/Paris"); Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z")); Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); CalendarIntervalTrigger trigger = TriggerBuilder.newTrigger().withIdentity("once-a-week", "samples") .withPriority(8).withSchedule(CalendarIntervalScheduleBuilder.calendarIntervalSchedule() .withIntervalInWeeks(1).inTimeZone(timeZone)) .build(); ((OperableTrigger) trigger).setPreviousFireTime(previousFireTime); ((OperableTrigger) trigger).setNextFireTime(nextFireTime); mockTriggers(trigger); QuartzTriggerGroupSummary summary = this.endpoint.quartzTriggerGroupSummary("samples"); Map<String, Object> triggers = summary.getTriggers().getCalendarInterval(); assertThat(triggers).containsOnlyKeys("once-a-week"); assertThat(triggers).extractingByKey("once-a-week", nestedMap()).containsOnly( entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime), entry("priority", 8), entry("interval", 604800000L), entry("timeZone", timeZone)); } @Test void quartzTriggerGroupSummaryWithCustomTrigger() throws SchedulerException { Trigger trigger = mock(Trigger.class); given(trigger.getKey()).willReturn(TriggerKey.triggerKey("custom", "samples")); mockTriggers(trigger); QuartzTriggerGroupSummary summary = this.endpoint.quartzTriggerGroupSummary("samples"); assertThat(summary.getGroup()).isEqualTo("samples"); assertThat(summary.isPaused()).isFalse(); assertThat(summary.getTriggers().getCron()).isEmpty(); assertThat(summary.getTriggers().getSimple()).isEmpty(); assertThat(summary.getTriggers().getDailyTimeInterval()).isEmpty(); assertThat(summary.getTriggers().getCalendarInterval()).isEmpty(); assertThat(summary.getTriggers().getCustom()).containsOnlyKeys("custom"); } @Test void quartzTriggerGroupSummaryWithCustomTriggerDetails() throws SchedulerException { Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z")); Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); Trigger trigger = mock(Trigger.class); given(trigger.getKey()).willReturn(TriggerKey.triggerKey("custom", "samples")); given(trigger.getPreviousFireTime()).willReturn(previousFireTime); given(trigger.getNextFireTime()).willReturn(nextFireTime); given(trigger.getPriority()).willReturn(9); mockTriggers(trigger); QuartzTriggerGroupSummary summary = this.endpoint.quartzTriggerGroupSummary("samples"); Map<String, Object> triggers = summary.getTriggers().getCustom(); assertThat(triggers).containsOnlyKeys("custom"); assertThat(triggers).extractingByKey("custom", nestedMap()).containsOnly( entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime), entry("priority", 9), entry("trigger", trigger.toString())); } @Test void quartzTriggerWithCronTrigger() throws SchedulerException { Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z")); Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); TimeZone timeZone = TimeZone.getTimeZone("Europe/Paris"); CronTrigger trigger = TriggerBuilder.newTrigger().withIdentity("3am-every-day", "samples").withPriority(3) .withDescription("Sample description") .withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0).inTimeZone(timeZone)).build(); ((OperableTrigger) trigger).setPreviousFireTime(previousFireTime); ((OperableTrigger) trigger).setNextFireTime(nextFireTime); mockTriggers(trigger); given(this.scheduler.getTriggerState(TriggerKey.triggerKey("3am-every-day", "samples"))) .willReturn(TriggerState.NORMAL); Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "3am-every-day"); assertThat(triggerDetails).contains(entry("group", "samples"), entry("name", "3am-every-day"), entry("description", "Sample description"), entry("type", "cron"), entry("state", TriggerState.NORMAL), entry("priority", 3)); assertThat(triggerDetails).contains(entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime)); assertThat(triggerDetails).doesNotContainKeys("simple", "dailyTimeInterval", "calendarInterval", "custom"); assertThat(triggerDetails).extractingByKey("cron", nestedMap()).containsOnly(entry("expression", "0 0 3 ? * *"), entry("timeZone", timeZone)); } @Test void quartzTriggerWithSimpleTrigger() throws SchedulerException { Date startTime = Date.from(Instant.parse("2020-01-01T09:00:00Z")); Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z")); Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); Date endTime = Date.from(Instant.parse("2020-01-31T09:00:00Z")); SimpleTrigger trigger = TriggerBuilder.newTrigger().withIdentity("every-hour", "samples").withPriority(20) .withDescription("Every hour").startAt(startTime).endAt(endTime) .withSchedule(SimpleScheduleBuilder.simpleSchedule().withIntervalInHours(1).withRepeatCount(2000)) .build(); ((OperableTrigger) trigger).setPreviousFireTime(previousFireTime); ((OperableTrigger) trigger).setNextFireTime(nextFireTime); mockTriggers(trigger); given(this.scheduler.getTriggerState(TriggerKey.triggerKey("every-hour", "samples"))) .willReturn(TriggerState.COMPLETE); Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "every-hour"); assertThat(triggerDetails).contains(entry("group", "samples"), entry("name", "every-hour"), entry("description", "Every hour"), entry("type", "simple"), entry("state", TriggerState.COMPLETE), entry("priority", 20)); assertThat(triggerDetails).contains(entry("startTime", startTime), entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime), entry("endTime", endTime)); assertThat(triggerDetails).doesNotContainKeys("cron", "dailyTimeInterval", "calendarInterval", "custom"); assertThat(triggerDetails).extractingByKey("simple", nestedMap()).containsOnly(entry("interval", 3600000L), entry("repeatCount", 2000), entry("timesTriggered", 0)); } @Test void quartzTriggerWithDailyTimeIntervalTrigger() throws SchedulerException { Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z")); Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); DailyTimeIntervalTrigger trigger = TriggerBuilder.newTrigger().withIdentity("every-hour-mon-wed", "samples") .withDescription("Every working hour Mon Wed").withPriority(4) .withSchedule(DailyTimeIntervalScheduleBuilder.dailyTimeIntervalSchedule() .onDaysOfTheWeek(Calendar.MONDAY, Calendar.WEDNESDAY) .startingDailyAt(TimeOfDay.hourAndMinuteOfDay(9, 0)) .endingDailyAt(TimeOfDay.hourAndMinuteOfDay(18, 0)).withInterval(1, IntervalUnit.HOUR)) .build(); ((OperableTrigger) trigger).setPreviousFireTime(previousFireTime); ((OperableTrigger) trigger).setNextFireTime(nextFireTime); mockTriggers(trigger); given(this.scheduler.getTriggerState(TriggerKey.triggerKey("every-hour-mon-wed", "samples"))) .willReturn(TriggerState.NORMAL); Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "every-hour-mon-wed"); assertThat(triggerDetails).contains(entry("group", "samples"), entry("name", "every-hour-mon-wed"), entry("description", "Every working hour Mon Wed"), entry("type", "dailyTimeInterval"), entry("state", TriggerState.NORMAL), entry("priority", 4)); assertThat(triggerDetails).contains(entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime)); assertThat(triggerDetails).doesNotContainKeys("cron", "simple", "calendarInterval", "custom"); assertThat(triggerDetails).extractingByKey("dailyTimeInterval", nestedMap()).containsOnly( entry("interval", 3600000L), entry("startTimeOfDay", LocalTime.of(9, 0)), entry("endTimeOfDay", LocalTime.of(18, 0)), entry("daysOfWeek", new LinkedHashSet<>(Arrays.asList(2, 4))), entry("repeatCount", -1), entry("timesTriggered", 0)); } @Test void quartzTriggerWithCalendarTimeIntervalTrigger() throws SchedulerException { TimeZone timeZone = TimeZone.getTimeZone("Europe/Paris"); Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z")); Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); CalendarIntervalTrigger trigger = TriggerBuilder.newTrigger().withIdentity("once-a-week", "samples") .withDescription("Once a week").withPriority(8) .withSchedule(CalendarIntervalScheduleBuilder.calendarIntervalSchedule().withIntervalInWeeks(1) .inTimeZone(timeZone).preserveHourOfDayAcrossDaylightSavings(true)) .build(); ((OperableTrigger) trigger).setPreviousFireTime(previousFireTime); ((OperableTrigger) trigger).setNextFireTime(nextFireTime); mockTriggers(trigger); given(this.scheduler.getTriggerState(TriggerKey.triggerKey("once-a-week", "samples"))) .willReturn(TriggerState.BLOCKED); Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "once-a-week"); assertThat(triggerDetails).contains(entry("group", "samples"), entry("name", "once-a-week"), entry("description", "Once a week"), entry("type", "calendarInterval"), entry("state", TriggerState.BLOCKED), entry("priority", 8)); assertThat(triggerDetails).contains(entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime)); assertThat(triggerDetails).doesNotContainKeys("cron", "simple", "dailyTimeInterval", "custom"); assertThat(triggerDetails).extractingByKey("calendarInterval", nestedMap()).containsOnly( entry("interval", 604800000L), entry("timeZone", timeZone), entry("preserveHourOfDayAcrossDaylightSavings", true), entry("skipDayIfHourDoesNotExist", false), entry("timesTriggered", 0)); } @Test void quartzTriggerWithCustomTrigger() throws SchedulerException { Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z")); Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); Trigger trigger = mock(Trigger.class); given(trigger.getKey()).willReturn(TriggerKey.triggerKey("custom", "samples")); given(trigger.getPreviousFireTime()).willReturn(previousFireTime); given(trigger.getNextFireTime()).willReturn(nextFireTime); given(trigger.getPriority()).willReturn(9); mockTriggers(trigger); given(this.scheduler.getTriggerState(TriggerKey.triggerKey("custom", "samples"))) .willReturn(TriggerState.ERROR); Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "custom"); assertThat(triggerDetails).contains(entry("group", "samples"), entry("name", "custom"), entry("type", "custom"), entry("state", TriggerState.ERROR), entry("priority", 9)); assertThat(triggerDetails).contains(entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime)); assertThat(triggerDetails).doesNotContainKeys("cron", "simple", "calendarInterval", "dailyTimeInterval"); assertThat(triggerDetails).extractingByKey("custom", nestedMap()) .containsOnly(entry("trigger", trigger.toString())); } @Test void quartzTriggerWithDataMap() throws SchedulerException { CronTrigger trigger = TriggerBuilder.newTrigger().withIdentity("3am-every-day", "samples") .withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0)).usingJobData("user", "user") .usingJobData("password", "secret").usingJobData("url", "https://user:secret@example.com").build(); mockTriggers(trigger); given(this.scheduler.getTriggerState(TriggerKey.triggerKey("3am-every-day", "samples"))) .willReturn(TriggerState.NORMAL); Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "3am-every-day"); assertThat(triggerDetails).extractingByKey("data", nestedMap()).containsOnly(entry("user", "user"), entry("password", "******"), entry("url", "https://user:******@example.com")); } @ParameterizedTest(name = "unit {1}") @MethodSource("intervalUnitParameters") void canConvertIntervalUnit(int amount, IntervalUnit unit, Duration expectedDuration) throws SchedulerException { CalendarIntervalTrigger trigger = TriggerBuilder.newTrigger().withIdentity("trigger", "samples") .withSchedule(CalendarIntervalScheduleBuilder.calendarIntervalSchedule().withInterval(amount, unit)) .build(); mockTriggers(trigger); Map<String, Object> triggerDetails = this.endpoint.quartzTrigger("samples", "trigger"); assertThat(triggerDetails).extractingByKey("calendarInterval", nestedMap()) .contains(entry("interval", expectedDuration.toMillis())); } static Stream<Arguments> intervalUnitParameters() { return Stream.of(Arguments.of(3, IntervalUnit.DAY, Duration.ofDays(3)), Arguments.of(2, IntervalUnit.HOUR, Duration.ofHours(2)), Arguments.of(5, IntervalUnit.MINUTE, Duration.ofMinutes(5)), Arguments.of(1, IntervalUnit.MONTH, ChronoUnit.MONTHS.getDuration()), Arguments.of(30, IntervalUnit.SECOND, Duration.ofSeconds(30)), Arguments.of(100, IntervalUnit.MILLISECOND, Duration.ofMillis(100)), Arguments.of(1, IntervalUnit.WEEK, ChronoUnit.WEEKS.getDuration()), Arguments.of(1, IntervalUnit.YEAR, ChronoUnit.YEARS.getDuration())); } @Test void quartzJobWithoutTrigger() throws SchedulerException { JobDetail job = JobBuilder.newJob(Job.class).withIdentity("hello", "samples").withDescription("A sample job") .storeDurably().requestRecovery(false).build(); mockJobs(job); QuartzJobDetails jobDetails = this.endpoint.quartzJob("samples", "hello"); assertThat(jobDetails.getGroup()).isEqualTo("samples"); assertThat(jobDetails.getName()).isEqualTo("hello"); assertThat(jobDetails.getDescription()).isEqualTo("A sample job"); assertThat(jobDetails.getClassName()).isEqualTo(Job.class.getName()); assertThat(jobDetails.isDurable()).isTrue(); assertThat(jobDetails.isRequestRecovery()).isFalse(); assertThat(jobDetails.getData()).isEmpty(); assertThat(jobDetails.getTriggers()).isEmpty(); } @Test void quartzJobWithTrigger() throws SchedulerException { Date previousFireTime = Date.from(Instant.parse("2020-11-30T03:00:00Z")); Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); JobDetail job = JobBuilder.newJob(Job.class).withIdentity("hello", "samples").build(); TimeZone timeZone = TimeZone.getTimeZone("Europe/Paris"); Trigger trigger = TriggerBuilder.newTrigger().withIdentity("3am-every-day", "samples").withPriority(4) .withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0).inTimeZone(timeZone)).build(); ((OperableTrigger) trigger).setPreviousFireTime(previousFireTime); ((OperableTrigger) trigger).setNextFireTime(nextFireTime); mockJobs(job); mockTriggers(trigger); given(this.scheduler.getTriggersOfJob(JobKey.jobKey("hello", "samples"))) .willAnswer((invocation) -> Collections.singletonList(trigger)); QuartzJobDetails jobDetails = this.endpoint.quartzJob("samples", "hello"); assertThat(jobDetails.getTriggers()).hasSize(1); Map<String, Object> triggerDetails = jobDetails.getTriggers().get(0); assertThat(triggerDetails).containsOnly(entry("group", "samples"), entry("name", "3am-every-day"), entry("previousFireTime", previousFireTime), entry("nextFireTime", nextFireTime), entry("priority", 4)); } @Test void quartzJobOrdersTriggersAccordingToNextFireTime() throws SchedulerException { JobDetail job = JobBuilder.newJob(Job.class).withIdentity("hello", "samples").build(); mockJobs(job); Date triggerOneNextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); CronTrigger triggerOne = TriggerBuilder.newTrigger().withIdentity("one", "samples").withPriority(5) .withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0)).build(); ((OperableTrigger) triggerOne).setNextFireTime(triggerOneNextFireTime); Date triggerTwoNextFireTime = Date.from(Instant.parse("2020-12-01T02:00:00Z")); CronTrigger triggerTwo = TriggerBuilder.newTrigger().withIdentity("two", "samples").withPriority(10) .withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(2, 0)).build(); ((OperableTrigger) triggerTwo).setNextFireTime(triggerTwoNextFireTime); mockTriggers(triggerOne, triggerTwo); given(this.scheduler.getTriggersOfJob(JobKey.jobKey("hello", "samples"))) .willAnswer((invocation) -> Arrays.asList(triggerOne, triggerTwo)); QuartzJobDetails jobDetails = this.endpoint.quartzJob("samples", "hello"); assertThat(jobDetails.getTriggers()).hasSize(2); assertThat(jobDetails.getTriggers().get(0)).containsEntry("name", "two"); assertThat(jobDetails.getTriggers().get(1)).containsEntry("name", "one"); } @Test void quartzJobOrdersTriggersAccordingNextFireTimeAndPriority() throws SchedulerException { JobDetail job = JobBuilder.newJob(Job.class).withIdentity("hello", "samples").build(); mockJobs(job); Date nextFireTime = Date.from(Instant.parse("2020-12-01T03:00:00Z")); CronTrigger triggerOne = TriggerBuilder.newTrigger().withIdentity("one", "samples").withPriority(3) .withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0)).build(); ((OperableTrigger) triggerOne).setNextFireTime(nextFireTime); CronTrigger triggerTwo = TriggerBuilder.newTrigger().withIdentity("two", "samples").withPriority(7) .withSchedule(CronScheduleBuilder.dailyAtHourAndMinute(3, 0)).build(); ((OperableTrigger) triggerTwo).setNextFireTime(nextFireTime); mockTriggers(triggerOne, triggerTwo); given(this.scheduler.getTriggersOfJob(JobKey.jobKey("hello", "samples"))) .willAnswer((invocation) -> Arrays.asList(triggerOne, triggerTwo)); QuartzJobDetails jobDetails = this.endpoint.quartzJob("samples", "hello"); assertThat(jobDetails.getTriggers()).hasSize(2); assertThat(jobDetails.getTriggers().get(0)).containsEntry("name", "two"); assertThat(jobDetails.getTriggers().get(1)).containsEntry("name", "one"); } @Test void quartzJobWithSensitiveDataMap() throws SchedulerException { JobDetail job = JobBuilder.newJob(Job.class).withIdentity("hello", "samples").usingJobData("user", "user") .usingJobData("password", "secret").usingJobData("url", "https://user:secret@example.com").build(); mockJobs(job); QuartzJobDetails jobDetails = this.endpoint.quartzJob("samples", "hello"); assertThat(jobDetails.getData()).containsOnly(entry("user", "user"), entry("password", "******"), entry("url", "https://user:******@example.com")); } @Test void quartzJobWithSensitiveDataMapAndCustomSanitizier() throws SchedulerException { JobDetail job = JobBuilder.newJob(Job.class).withIdentity("hello", "samples").usingJobData("test", "value") .usingJobData("secret", "value").build(); mockJobs(job); Sanitizer sanitizer = mock(Sanitizer.class); given(sanitizer.sanitize("test", "value")).willReturn("value"); given(sanitizer.sanitize("secret", "value")).willReturn("----"); QuartzJobDetails jobDetails = new QuartzEndpoint(this.scheduler, sanitizer).quartzJob("samples", "hello"); assertThat(jobDetails.getData()).containsOnly(entry("test", "value"), entry("secret", "----")); then(sanitizer).should().sanitize("test", "value"); then(sanitizer).should().sanitize("secret", "value"); then(sanitizer).shouldHaveNoMoreInteractions(); } private void mockJobs(JobDetail... jobs) throws SchedulerException { MultiValueMap<String, JobKey> jobKeys = new LinkedMultiValueMap<>(); for (JobDetail jobDetail : jobs) { JobKey key = jobDetail.getKey(); given(this.scheduler.getJobDetail(key)).willReturn(jobDetail); jobKeys.add(key.getGroup(), key); } given(this.scheduler.getJobGroupNames()).willReturn(new ArrayList<>(jobKeys.keySet())); for (Entry<String, List<JobKey>> entry : jobKeys.entrySet()) { given(this.scheduler.getJobKeys(GroupMatcher.jobGroupEquals(entry.getKey()))) .willReturn(new LinkedHashSet<>(entry.getValue())); } } private void mockTriggers(Trigger... triggers) throws SchedulerException { MultiValueMap<String, TriggerKey> triggerKeys = new LinkedMultiValueMap<>(); for (Trigger trigger : triggers) { TriggerKey key = trigger.getKey(); given(this.scheduler.getTrigger(key)).willReturn(trigger); triggerKeys.add(key.getGroup(), key); } given(this.scheduler.getTriggerGroupNames()).willReturn(new ArrayList<>(triggerKeys.keySet())); for (Entry<String, List<TriggerKey>> entry : triggerKeys.entrySet()) { given(this.scheduler.getTriggerKeys(GroupMatcher.triggerGroupEquals(entry.getKey()))) .willReturn(new LinkedHashSet<>(entry.getValue())); } } @SuppressWarnings("rawtypes") private static InstanceOfAssertFactory<Map, MapAssert<String, Object>> nestedMap() { return InstanceOfAssertFactories.map(String.class, Object.class); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ram.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ram-2018-01-04/ListResourceTypes" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListResourceTypesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * An array of objects that contain information about the resource types that can be shared using RAM. * </p> */ private java.util.List<ServiceNameAndResourceType> resourceTypes; /** * <p> * If present, this value indicates that more output is available than is included in the current response. Use this * value in the <code>NextToken</code> request parameter in a subsequent call to the operation to get the next part * of the output. You should repeat this until the <code>NextToken</code> response element comes back as * <code>null</code>. This indicates that this is the last page of results. * </p> */ private String nextToken; /** * <p> * An array of objects that contain information about the resource types that can be shared using RAM. * </p> * * @return An array of objects that contain information about the resource types that can be shared using RAM. */ public java.util.List<ServiceNameAndResourceType> getResourceTypes() { return resourceTypes; } /** * <p> * An array of objects that contain information about the resource types that can be shared using RAM. * </p> * * @param resourceTypes * An array of objects that contain information about the resource types that can be shared using RAM. */ public void setResourceTypes(java.util.Collection<ServiceNameAndResourceType> resourceTypes) { if (resourceTypes == null) { this.resourceTypes = null; return; } this.resourceTypes = new java.util.ArrayList<ServiceNameAndResourceType>(resourceTypes); } /** * <p> * An array of objects that contain information about the resource types that can be shared using RAM. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setResourceTypes(java.util.Collection)} or {@link #withResourceTypes(java.util.Collection)} if you want * to override the existing values. * </p> * * @param resourceTypes * An array of objects that contain information about the resource types that can be shared using RAM. * @return Returns a reference to this object so that method calls can be chained together. */ public ListResourceTypesResult withResourceTypes(ServiceNameAndResourceType... resourceTypes) { if (this.resourceTypes == null) { setResourceTypes(new java.util.ArrayList<ServiceNameAndResourceType>(resourceTypes.length)); } for (ServiceNameAndResourceType ele : resourceTypes) { this.resourceTypes.add(ele); } return this; } /** * <p> * An array of objects that contain information about the resource types that can be shared using RAM. * </p> * * @param resourceTypes * An array of objects that contain information about the resource types that can be shared using RAM. * @return Returns a reference to this object so that method calls can be chained together. */ public ListResourceTypesResult withResourceTypes(java.util.Collection<ServiceNameAndResourceType> resourceTypes) { setResourceTypes(resourceTypes); return this; } /** * <p> * If present, this value indicates that more output is available than is included in the current response. Use this * value in the <code>NextToken</code> request parameter in a subsequent call to the operation to get the next part * of the output. You should repeat this until the <code>NextToken</code> response element comes back as * <code>null</code>. This indicates that this is the last page of results. * </p> * * @param nextToken * If present, this value indicates that more output is available than is included in the current response. * Use this value in the <code>NextToken</code> request parameter in a subsequent call to the operation to * get the next part of the output. You should repeat this until the <code>NextToken</code> response element * comes back as <code>null</code>. This indicates that this is the last page of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * If present, this value indicates that more output is available than is included in the current response. Use this * value in the <code>NextToken</code> request parameter in a subsequent call to the operation to get the next part * of the output. You should repeat this until the <code>NextToken</code> response element comes back as * <code>null</code>. This indicates that this is the last page of results. * </p> * * @return If present, this value indicates that more output is available than is included in the current response. * Use this value in the <code>NextToken</code> request parameter in a subsequent call to the operation to * get the next part of the output. You should repeat this until the <code>NextToken</code> response element * comes back as <code>null</code>. This indicates that this is the last page of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * If present, this value indicates that more output is available than is included in the current response. Use this * value in the <code>NextToken</code> request parameter in a subsequent call to the operation to get the next part * of the output. You should repeat this until the <code>NextToken</code> response element comes back as * <code>null</code>. This indicates that this is the last page of results. * </p> * * @param nextToken * If present, this value indicates that more output is available than is included in the current response. * Use this value in the <code>NextToken</code> request parameter in a subsequent call to the operation to * get the next part of the output. You should repeat this until the <code>NextToken</code> response element * comes back as <code>null</code>. This indicates that this is the last page of results. * @return Returns a reference to this object so that method calls can be chained together. */ public ListResourceTypesResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getResourceTypes() != null) sb.append("ResourceTypes: ").append(getResourceTypes()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListResourceTypesResult == false) return false; ListResourceTypesResult other = (ListResourceTypesResult) obj; if (other.getResourceTypes() == null ^ this.getResourceTypes() == null) return false; if (other.getResourceTypes() != null && other.getResourceTypes().equals(this.getResourceTypes()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getResourceTypes() == null) ? 0 : getResourceTypes().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListResourceTypesResult clone() { try { return (ListResourceTypesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.daemon.impl.quickfix; import com.intellij.codeInsight.*; import com.intellij.codeInsight.completion.proc.VariablesProcessor; import com.intellij.codeInsight.daemon.JavaErrorMessages; import com.intellij.codeInsight.daemon.QuickFixBundle; import com.intellij.codeInsight.generation.OverrideImplementUtil; import com.intellij.codeInsight.generation.PsiGenerationInfo; import com.intellij.codeInsight.intention.impl.CreateClassDialog; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupElementBuilder; import com.intellij.codeInsight.template.*; import com.intellij.codeInsight.template.ExpressionUtil; import com.intellij.ide.fileTemplates.FileTemplate; import com.intellij.ide.fileTemplates.FileTemplateManager; import com.intellij.ide.fileTemplates.FileTemplateUtil; import com.intellij.ide.fileTemplates.JavaTemplateUtil; import com.intellij.lang.java.JavaLanguage; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorModificationUtil; import com.intellij.openapi.editor.ScrollType; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.FileTypeManager; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.codeStyle.SuggestedNameInfo; import com.intellij.psi.codeStyle.VariableKind; import com.intellij.psi.impl.source.PostprocessReformattingAspect; import com.intellij.psi.scope.util.PsiScopesUtil; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.PsiShortNamesCache; import com.intellij.psi.search.searches.ClassInheritorsSearch; import com.intellij.psi.statistics.JavaStatisticsManager; import com.intellij.psi.util.ProximityLocation; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiTypesUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.proximity.PsiProximityComparator; import com.intellij.refactoring.util.RefactoringUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * @author mike */ public class CreateFromUsageUtils { private static final Logger LOG = Logger.getInstance( "#com.intellij.codeInsight.daemon.impl.quickfix.CreateFromUsageUtils"); private static final int MAX_GUESSED_MEMBERS_COUNT = 10; private static final int MAX_RAW_GUESSED_MEMBERS_COUNT = 2 * MAX_GUESSED_MEMBERS_COUNT; static boolean isValidReference(PsiReference reference, boolean unresolvedOnly) { if (!(reference instanceof PsiJavaReference)) return false; JavaResolveResult[] results = ((PsiJavaReference)reference).multiResolve(true); if(results.length == 0) return false; if (!unresolvedOnly) { for (JavaResolveResult result : results) { if (!result.isValidResult()) return false; if (result.getElement() instanceof PsiPackage) return false; } } return true; } public static boolean isValidMethodReference(PsiReference reference, PsiMethodCallExpression call) { if (!(reference instanceof PsiJavaReference)) return false; try { JavaResolveResult candidate = ((PsiJavaReference) reference).advancedResolve(true); PsiElement result = candidate.getElement(); return result instanceof PsiMethod && PsiUtil.isApplicable((PsiMethod)result, candidate.getSubstitutor(), call.getArgumentList()); } catch (ClassCastException cce) { // rear case return false; } } static boolean shouldCreateConstructor(PsiClass targetClass, PsiExpressionList argList, PsiMethod candidate) { if (argList == null) return false; if (candidate == null) { return targetClass != null && !targetClass.isInterface() && !(targetClass instanceof PsiTypeParameter) && !(argList.isEmpty() && targetClass.getConstructors().length == 0); } else { return !PsiUtil.isApplicable(candidate, PsiSubstitutor.EMPTY, argList); } } public static void setupMethodBody(@NotNull PsiMethod method) throws IncorrectOperationException { PsiClass aClass = method.getContainingClass(); setupMethodBody(method, aClass); } public static void setupMethodBody(final PsiMethod method, final PsiClass aClass) throws IncorrectOperationException { FileTemplate template = FileTemplateManager.getInstance(method.getProject()).getCodeTemplate(JavaTemplateUtil.TEMPLATE_FROM_USAGE_METHOD_BODY); setupMethodBody(method, aClass, template); } public static void setupMethodBody(final PsiMethod method, final PsiClass aClass, final FileTemplate template) throws IncorrectOperationException { PsiType returnType = method.getReturnType(); if (returnType == null) { returnType = PsiType.VOID; } JVMElementFactory factory = JVMElementFactories.getFactory(aClass.getLanguage(), aClass.getProject()); LOG.assertTrue(!aClass.isInterface() || PsiUtil.isLanguageLevel8OrHigher(method) || method.getLanguage() != JavaLanguage.INSTANCE, "Interface bodies should be already set up"); FileType fileType = FileTypeManager.getInstance().getFileTypeByExtension(template.getExtension()); Properties properties = new Properties(); properties.setProperty(FileTemplate.ATTRIBUTE_RETURN_TYPE, returnType.getPresentableText()); properties.setProperty(FileTemplate.ATTRIBUTE_DEFAULT_RETURN_VALUE, PsiTypesUtil.getDefaultValueOfType(returnType)); JavaTemplateUtil.setClassAndMethodNameProperties(properties, aClass, method); @NonNls String methodText; CodeStyleManager csManager = CodeStyleManager.getInstance(method.getProject()); try { String bodyText = template.getText(properties); if (!bodyText.isEmpty()) bodyText += "\n"; methodText = returnType.getPresentableText() + " foo () {\n" + bodyText + "}"; methodText = FileTemplateUtil.indent(methodText, method.getProject(), fileType); } catch (ProcessCanceledException e) { throw e; } catch (Exception e) { throw new IncorrectOperationException("Failed to parse file template", (Throwable)e); } if (methodText != null) { PsiMethod m; try { m = factory.createMethodFromText(methodText, aClass); } catch (IncorrectOperationException e) { ApplicationManager.getApplication().invokeLater( () -> Messages.showErrorDialog(QuickFixBundle.message("new.method.body.template.error.text"), QuickFixBundle.message("new.method.body.template.error.title"))); return; } PsiElement newBody = m.getBody(); LOG.assertTrue(newBody != null); PsiElement oldBody = method.getBody(); if (oldBody == null) { PsiElement last = method.getLastChild(); if (last instanceof PsiErrorElement && JavaErrorMessages.message("expected.lbrace.or.semicolon").equals(((PsiErrorElement)last).getErrorDescription())) { oldBody = last; } } if (oldBody != null) { oldBody.replace(newBody); } else { method.add(newBody); } csManager.reformat(method); } } public static void setupEditor(@NotNull PsiMethod method, @NotNull Editor newEditor) { PsiCodeBlock body = method.getBody(); if (body != null) { setupEditor(body, newEditor); } } public static void setupEditor(@NotNull PsiCodeBlock body, @NotNull Editor newEditor) { PsiElement l = PsiTreeUtil.skipWhitespacesForward(body.getLBrace()); PsiElement r = PsiTreeUtil.skipWhitespacesBackward(body.getRBrace()); if (l != null && r != null) { int start = l.getTextRange().getStartOffset(); int end = r.getTextRange().getEndOffset(); newEditor.getCaretModel().moveToOffset(Math.max(start, end)); if (end < start) { newEditor.getCaretModel().moveToOffset(end + 1); CodeStyleManager styleManager = CodeStyleManager.getInstance(body.getProject()); PsiFile containingFile = body.getContainingFile(); final String lineIndent = styleManager.getLineIndent(containingFile, Math.min(start, end)); PsiDocumentManager manager = PsiDocumentManager.getInstance(body.getProject()); manager.doPostponedOperationsAndUnblockDocument(manager.getDocument(containingFile)); EditorModificationUtil.insertStringAtCaret(newEditor, lineIndent); EditorModificationUtil.insertStringAtCaret(newEditor, "\n", false, false); } else { //correct position caret for groovy and java methods if (body.getParent() instanceof PsiMethod) { final PsiGenerationInfo<PsiMethod> info = OverrideImplementUtil.createGenerationInfo((PsiMethod)body.getParent()); info.positionCaret(newEditor, true); } } newEditor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE); } } static void setupMethodParameters(PsiMethod method, TemplateBuilder builder, PsiExpressionList argumentList, PsiSubstitutor substitutor) throws IncorrectOperationException { if (argumentList == null) return; PsiExpression[] args = argumentList.getExpressions(); setupMethodParameters(method, builder, argumentList, substitutor, args); } public static void setupMethodParameters(final PsiMethod method, final TemplateBuilder builder, final PsiElement contextElement, final PsiSubstitutor substitutor, final PsiExpression[] arguments) { setupMethodParameters(method, builder, contextElement, substitutor, ContainerUtil.map2List(arguments, Pair.createFunction(null))); } static void setupMethodParameters(final PsiMethod method, final TemplateBuilder builder, final PsiElement contextElement, final PsiSubstitutor substitutor, final List<Pair<PsiExpression, PsiType>> arguments) throws IncorrectOperationException { final PsiManager psiManager = method.getManager(); final Project project = psiManager.getProject(); JVMElementFactory factory = JVMElementFactories.getFactory(method.getLanguage(), project); if (factory == null) return; PsiParameterList parameterList = method.getParameterList(); GlobalSearchScope resolveScope = method.getResolveScope(); GuessTypeParameters guesser = new GuessTypeParameters(project, JavaPsiFacade.getElementFactory(project), builder, substitutor); PostprocessReformattingAspect postprocessReformattingAspect = PostprocessReformattingAspect.getInstance(project); final PsiClass containingClass = method.getContainingClass(); final boolean isInterface = containingClass != null && containingClass.isInterface(); //255 is the maximum number of method parameters for (int i = 0; i < Math.min(arguments.size(), 255); i++) { Pair<PsiExpression, PsiType> arg = arguments.get(i); PsiExpression exp = arg.first; PsiType argType = exp == null ? arg.second : RefactoringUtil.getTypeByExpression(exp); SuggestedNameInfo suggestedInfo = JavaCodeStyleManager.getInstance(project).suggestVariableName( VariableKind.PARAMETER, null, exp, argType); @NonNls String[] names = suggestedInfo.names; //TODO: callback about used name if (names.length == 0) { names = new String[]{"p" + i}; } if (argType == null || PsiType.NULL.equals(argType) || LambdaUtil.notInferredType(argType)) { argType = PsiType.getJavaLangObject(psiManager, resolveScope); } else if (argType instanceof PsiDisjunctionType) { argType = ((PsiDisjunctionType)argType).getLeastUpperBound(); } else if (argType instanceof PsiWildcardType) { argType = ((PsiWildcardType)argType).isBounded() ? ((PsiWildcardType)argType).getBound() : PsiType.getJavaLangObject(psiManager, resolveScope); } PsiParameter parameter; if (parameterList.getParametersCount() <= i) { PsiParameter param = factory.createParameter(names[0], argType); if (isInterface) { PsiUtil.setModifierProperty(param, PsiModifier.FINAL, false); } parameter = postprocessReformattingAspect.postponeFormattingInside(() -> (PsiParameter) parameterList.add(param)); } else { parameter = parameterList.getParameters()[i]; } ExpectedTypeInfo info = ExpectedTypesProvider.createInfo(argType, ExpectedTypeInfo.TYPE_OR_SUPERTYPE, argType, TailType.NONE); PsiElement context = PsiTreeUtil.getParentOfType(contextElement, PsiClass.class, PsiMethod.class); guesser.setupTypeElement(parameter.getTypeElement(), new ExpectedTypeInfo[]{info}, context, containingClass); Expression expression = new ParameterNameExpression(names); builder.replaceElement(parameter.getNameIdentifier(), expression); } } @Nullable public static PsiClass createClass(final PsiJavaCodeReferenceElement referenceElement, final CreateClassKind classKind, final String superClassName) { assert !ApplicationManager.getApplication().isWriteAccessAllowed() : "You must not run createClass() from under write action"; final String name = referenceElement.getReferenceName(); String qualifierName; final PsiElement qualifierElement; PsiElement qualifier = referenceElement.getQualifier(); if (qualifier instanceof PsiJavaCodeReferenceElement) { qualifierName = ((PsiJavaCodeReferenceElement)qualifier).getQualifiedName(); qualifierElement = ((PsiJavaCodeReferenceElement)qualifier).resolve(); if (qualifierElement instanceof PsiClass) { if (!FileModificationService.getInstance().preparePsiElementForWrite(qualifierElement)) return null; return WriteAction.compute(() -> createClassInQualifier((PsiClass)qualifierElement, classKind, name, referenceElement)); } } else { qualifierName = null; qualifierElement = null; } final PsiManager manager = referenceElement.getManager(); final PsiFile sourceFile = referenceElement.getContainingFile(); final Module module = ModuleUtilCore.findModuleForPsiElement(sourceFile); if (qualifierName == null) { PsiPackage aPackage = findTargetPackage(qualifierElement, manager, sourceFile); if (aPackage == null) return null; qualifierName = aPackage.getQualifiedName(); } final PsiDirectory targetDirectory; if (!ApplicationManager.getApplication().isUnitTestMode()) { Project project = manager.getProject(); String title = QuickFixBundle.message("create.class.title", StringUtil.capitalize(classKind.getDescription())); CreateClassDialog dialog = new CreateClassDialog(project, title, name, qualifierName, classKind, false, module){ @Override protected boolean reportBaseInSourceSelectionInTest() { return true; } }; dialog.show(); if (dialog.getExitCode() != DialogWrapper.OK_EXIT_CODE) return null; targetDirectory = dialog.getTargetDirectory(); if (targetDirectory == null) return null; } else { targetDirectory = null; } return createClass(classKind, targetDirectory, name, manager, referenceElement, sourceFile, superClassName); } @Nullable private static PsiPackage findTargetPackage(PsiElement qualifierElement, PsiManager manager, PsiFile sourceFile) { PsiPackage aPackage = null; if (qualifierElement instanceof PsiPackage) { aPackage = (PsiPackage)qualifierElement; } else { final PsiDirectory directory = sourceFile.getContainingDirectory(); if (directory != null) { aPackage = JavaDirectoryService.getInstance().getPackage(directory); } if (aPackage == null) { aPackage = JavaPsiFacade.getInstance(manager.getProject()).findPackage(""); } } if (aPackage == null) return null; return aPackage; } private static PsiClass createClassInQualifier(PsiClass psiClass, CreateClassKind classKind, String name, PsiJavaCodeReferenceElement referenceElement) { PsiManager manager = psiClass.getManager(); PsiElementFactory elementFactory = JavaPsiFacade.getInstance(manager.getProject()).getElementFactory(); PsiClass result = classKind == CreateClassKind.INTERFACE ? elementFactory.createInterface(name) : classKind == CreateClassKind.CLASS ? elementFactory.createClass(name) : classKind == CreateClassKind.ANNOTATION ? elementFactory.createAnnotationType(name) : elementFactory.createEnum(name); CreateFromUsageBaseFix.setupGenericParameters(result, referenceElement); result = (PsiClass)CodeStyleManager.getInstance(manager.getProject()).reformat(result); return (PsiClass) psiClass.add(result); } public static PsiClass createClass(final CreateClassKind classKind, final PsiDirectory directory, final String name, final PsiManager manager, @NotNull final PsiElement contextElement, final PsiFile sourceFile, final String superClassName) { final JavaPsiFacade facade = JavaPsiFacade.getInstance(manager.getProject()); final PsiElementFactory factory = facade.getElementFactory(); return WriteAction.compute(() -> { try { PsiClass targetClass; if (directory != null) { try { if (classKind == CreateClassKind.INTERFACE) { targetClass = JavaDirectoryService.getInstance().createInterface(directory, name); } else if (classKind == CreateClassKind.CLASS) { targetClass = JavaDirectoryService.getInstance().createClass(directory, name); } else if (classKind == CreateClassKind.ENUM) { targetClass = JavaDirectoryService.getInstance().createEnum(directory, name); } else if (classKind == CreateClassKind.ANNOTATION) { targetClass = JavaDirectoryService.getInstance().createAnnotationType(directory, name); } else { LOG.error("Unknown kind of a class to create"); return null; } } catch (final IncorrectOperationException e) { scheduleFileOrPackageCreationFailedMessageBox(e, name, directory, false); return null; } if (!facade.getResolveHelper().isAccessible(targetClass, contextElement, null)) { PsiUtil.setModifierProperty(targetClass, PsiModifier.PUBLIC, true); } } else { //tests PsiClass aClass; if (classKind == CreateClassKind.INTERFACE) { aClass = factory.createInterface(name); } else if (classKind == CreateClassKind.CLASS) { aClass = factory.createClass(name); } else if (classKind == CreateClassKind.ENUM) { aClass = factory.createEnum(name); } else if (classKind == CreateClassKind.ANNOTATION) { aClass = factory.createAnnotationType(name); } else { LOG.error("Unknown kind of a class to create"); return null; } targetClass = (PsiClass)sourceFile.add(aClass); } if (superClassName != null && (classKind != CreateClassKind.ENUM || !superClassName.equals(CommonClassNames.JAVA_LANG_ENUM))) { setupSuperClassReference(targetClass, superClassName); } if (contextElement instanceof PsiJavaCodeReferenceElement) { CreateFromUsageBaseFix.setupGenericParameters(targetClass, (PsiJavaCodeReferenceElement)contextElement); } return targetClass; } catch (IncorrectOperationException e) { LOG.error(e); return null; } }); } public static void setupSuperClassReference(PsiClass targetClass, String superClassName) { JavaPsiFacade facade = JavaPsiFacade.getInstance(targetClass.getProject()); PsiElementFactory factory = facade.getElementFactory(); final PsiClass superClass = facade.findClass(superClassName, targetClass.getResolveScope()); final PsiJavaCodeReferenceElement superClassReference = factory.createReferenceElementByFQClassName(superClassName, targetClass.getResolveScope()); final PsiReferenceList list = targetClass.isInterface() || superClass == null || !superClass.isInterface() ? targetClass.getExtendsList() : targetClass.getImplementsList(); list.add(superClassReference); } public static void scheduleFileOrPackageCreationFailedMessageBox(final IncorrectOperationException e, final String name, final PsiDirectory directory, final boolean isPackage) { ApplicationManager.getApplication().invokeLater(() -> Messages.showErrorDialog(QuickFixBundle.message( isPackage ? "cannot.create.java.package.error.text" : "cannot.create.java.file.error.text", name, directory.getVirtualFile().getName(), e.getLocalizedMessage()), QuickFixBundle.message( isPackage ? "cannot.create.java.package.error.title" : "cannot.create.java.file.error.title"))); } @SafeVarargs @NotNull public static PsiReferenceExpression[] collectExpressions(final PsiExpression expression, @NotNull Class<? extends PsiElement>... scopes) { PsiElement parent = PsiTreeUtil.getParentOfType(expression, scopes); final List<PsiReferenceExpression> result = new ArrayList<>(); JavaRecursiveElementWalkingVisitor visitor = new JavaRecursiveElementWalkingVisitor() { @Override public void visitReferenceExpression(PsiReferenceExpression expr) { if (expression instanceof PsiReferenceExpression && (expr.getParent() instanceof PsiMethodCallExpression == expression.getParent() instanceof PsiMethodCallExpression)) { if (Comparing.equal(expr.getReferenceName(), ((PsiReferenceExpression)expression).getReferenceName()) && !isValidReference(expr, false)) { result.add(expr); } } visitElement(expr); } @Override public void visitMethodCallExpression(PsiMethodCallExpression expr) { if (expression instanceof PsiMethodCallExpression) { PsiReferenceExpression methodExpression = expr.getMethodExpression(); if (Comparing.equal(methodExpression.getReferenceName(), ((PsiMethodCallExpression) expression).getMethodExpression().getReferenceName())) { result.add(expr.getMethodExpression()); } } visitElement(expr); } }; if (parent != null) { parent.accept(visitor); } return result.toArray(new PsiReferenceExpression[0]); } @NotNull static PsiVariable[] guessMatchingVariables(final PsiExpression expression) { List<ExpectedTypeInfo[]> typesList = new ArrayList<>(); List<String> expectedMethodNames = new ArrayList<>(); List<String> expectedFieldNames = new ArrayList<>(); getExpectedInformation(expression, typesList, expectedMethodNames, expectedFieldNames); final List<PsiVariable> list = new ArrayList<>(); VariablesProcessor varproc = new VariablesProcessor("", true, list){ @Override public boolean execute(@NotNull PsiElement element, @NotNull ResolveState state) { if(!(element instanceof PsiField) || JavaPsiFacade.getInstance(element.getProject()).getResolveHelper().isAccessible((PsiField)element, expression, null)) { return super.execute(element, state); } return true; } }; PsiScopesUtil.treeWalkUp(varproc, expression, null); PsiVariable[] allVars = varproc.getResultsAsArray(); ExpectedTypeInfo[] infos = ExpectedTypeUtil.intersect(typesList); List<PsiVariable> result = new ArrayList<>(); nextVar: for (PsiVariable variable : allVars) { PsiType varType = variable.getType(); boolean matched = infos.length == 0; for (ExpectedTypeInfo info : infos) { if (ExpectedTypeUtil.matches(varType, info)) { matched = true; break; } } if (matched) { if (!expectedFieldNames.isEmpty() && !expectedMethodNames.isEmpty()) { if (!(varType instanceof PsiClassType)) continue; PsiClass aClass = ((PsiClassType)varType).resolve(); if (aClass == null) continue; for (String name : expectedFieldNames) { if (aClass.findFieldByName(name, true) == null) continue nextVar; } for (String name : expectedMethodNames) { PsiMethod[] methods = aClass.findMethodsByName(name, true); if (methods.length == 0) continue nextVar; } } result.add(variable); } } return result.toArray(new PsiVariable[0]); } private static void getExpectedInformation(final PsiExpression expression, List<ExpectedTypeInfo[]> types, List<String> expectedMethodNames, List<String> expectedFieldNames) { Comparator<ExpectedTypeInfo> expectedTypesComparator = (o1, o2) -> compareExpectedTypes(o1, o2, expression); for (PsiExpression expr : collectExpressions(expression, PsiMember.class, PsiFile.class)) { PsiElement parent = expr.getParent(); if (!(parent instanceof PsiReferenceExpression)) { boolean isAssignmentToFunctionalExpression = PsiUtil.isOnAssignmentLeftHand(expr) && ((PsiAssignmentExpression)PsiUtil.skipParenthesizedExprUp(parent)).getRExpression() instanceof PsiFunctionalExpression; PsiExpressionList expressionList = ObjectUtils .tryCast(PsiUtil.skipParenthesizedExprUp(isAssignmentToFunctionalExpression ? parent.getParent() : parent), PsiExpressionList.class); boolean forCompletion = expressionList != null || parent.getParent() instanceof PsiPolyadicExpression; ExpectedTypeInfo[] someExpectedTypes = ExpectedTypesProvider.getExpectedTypes(expr, forCompletion); if (someExpectedTypes.length > 0) { Comparator<ExpectedTypeInfo> comparator = expectedTypesComparator; if (expressionList != null) { int argCount = expressionList.getExpressionCount(); Comparator<ExpectedTypeInfo> mostSuitableMethodComparator = Comparator.comparingInt(typeInfo -> typeInfo.getCalledMethod().getParameterList().getParametersCount() == argCount ? 0 : 1); comparator = mostSuitableMethodComparator.thenComparing(comparator); } Arrays.sort(someExpectedTypes, comparator); types.add(someExpectedTypes); } continue; } String refName = ((PsiReferenceExpression)parent).getReferenceName(); if (refName == null) { continue; } PsiElement pparent = parent.getParent(); if (pparent instanceof PsiMethodCallExpression) { expectedMethodNames.add(refName); if (refName.equals("equals")) { ExpectedTypeInfo[] someExpectedTypes = equalsExpectedTypes((PsiMethodCallExpression)pparent); if (someExpectedTypes.length > 0) { Arrays.sort(someExpectedTypes, expectedTypesComparator); types.add(someExpectedTypes); } } continue; } if (pparent instanceof PsiReferenceExpression || pparent instanceof PsiVariable || pparent instanceof PsiExpression) { expectedFieldNames.add(refName); } } } private static int compareExpectedTypes(ExpectedTypeInfo o1, ExpectedTypeInfo o2, PsiExpression expression) { PsiClass c1 = PsiUtil.resolveClassInType(o1.getDefaultType()); PsiClass c2 = PsiUtil.resolveClassInType(o2.getDefaultType()); if (c1 == null && c2 == null) return 0; if (c1 == null || c2 == null) return c1 == null ? -1 : 1; return compareMembers(c1, c2, expression); } @NotNull private static ExpectedTypeInfo[] equalsExpectedTypes(PsiMethodCallExpression methodCall) { final PsiType[] argumentTypes = methodCall.getArgumentList().getExpressionTypes(); if (argumentTypes.length != 1) { return ExpectedTypeInfo.EMPTY_ARRAY; } PsiType type = argumentTypes[0]; if (type instanceof PsiPrimitiveType) { type = ((PsiPrimitiveType)type).getBoxedType(methodCall); } if (type == null) return ExpectedTypeInfo.EMPTY_ARRAY; return new ExpectedTypeInfo[]{ExpectedTypesProvider.createInfo(type, ExpectedTypeInfo.TYPE_STRICTLY, type, TailType.NONE)}; } @NotNull public static ExpectedTypeInfo[] guessExpectedTypes(@NotNull PsiExpression expression, boolean allowVoidType) { PsiManager manager = expression.getManager(); GlobalSearchScope resolveScope = expression.getResolveScope(); List<ExpectedTypeInfo[]> typesList = new ArrayList<>(); List<String> expectedMethodNames = new ArrayList<>(); List<String> expectedFieldNames = new ArrayList<>(); getExpectedInformation(expression, typesList, expectedMethodNames, expectedFieldNames); if (typesList.size() == 1 && (!expectedFieldNames.isEmpty() || !expectedMethodNames.isEmpty())) { ExpectedTypeInfo[] infos = typesList.get(0); if (infos.length == 1 && infos[0].getKind() == ExpectedTypeInfo.TYPE_OR_SUBTYPE && infos[0].getType().equals(PsiType.getJavaLangObject(manager, resolveScope))) { typesList.clear(); } } if (typesList.isEmpty()) { final JavaPsiFacade facade = JavaPsiFacade.getInstance(expression.getProject()); final PsiShortNamesCache cache = PsiShortNamesCache.getInstance(expression.getProject()); PsiElementFactory factory = facade.getElementFactory(); for (String fieldName : expectedFieldNames) { PsiField[] fields = cache.getFieldsByNameIfNotMoreThan(fieldName, resolveScope, MAX_RAW_GUESSED_MEMBERS_COUNT); addMemberInfo(fields, expression, typesList, factory); } for (String methodName : expectedMethodNames) { PsiMethod[] projectMethods = cache.getMethodsByNameIfNotMoreThan(methodName, resolveScope.intersectWith(GlobalSearchScope.projectScope(manager.getProject())), MAX_RAW_GUESSED_MEMBERS_COUNT); PsiMethod[] libraryMethods = cache.getMethodsByNameIfNotMoreThan(methodName, resolveScope.intersectWith(GlobalSearchScope.notScope(GlobalSearchScope.projectScope(manager.getProject()))), MAX_RAW_GUESSED_MEMBERS_COUNT); PsiMethod[] methods = ArrayUtil.mergeArrays(projectMethods, libraryMethods); addMemberInfo(methods, expression, typesList, factory); } } ExpectedTypeInfo[] expectedTypes = ExpectedTypeUtil.intersect(typesList); if (expectedTypes.length == 0 && !typesList.isEmpty()) { List<ExpectedTypeInfo> union = new ArrayList<>(); for (ExpectedTypeInfo[] aTypesList : typesList) { ContainerUtil.addAll(union, (ExpectedTypeInfo[])aTypesList); } expectedTypes = union.toArray(ExpectedTypeInfo.EMPTY_ARRAY); } if (expectedTypes.length == 0) { PsiType t = allowVoidType ? PsiType.VOID : PsiType.getJavaLangObject(manager, resolveScope); expectedTypes = new ExpectedTypeInfo[] {ExpectedTypesProvider.createInfo(t, ExpectedTypeInfo.TYPE_OR_SUBTYPE, t, TailType.NONE)}; } return expectedTypes; } @NotNull static PsiType[] guessType(PsiExpression expression, final boolean allowVoidType) { final PsiManager manager = expression.getManager(); final GlobalSearchScope resolveScope = expression.getResolveScope(); List<ExpectedTypeInfo[]> typesList = new ArrayList<>(); final List<String> expectedMethodNames = new ArrayList<>(); final List<String> expectedFieldNames = new ArrayList<>(); getExpectedInformation(expression, typesList, expectedMethodNames, expectedFieldNames); if (typesList.size() == 1 && (!expectedFieldNames.isEmpty() || !expectedMethodNames.isEmpty())) { ExpectedTypeInfo[] infos = typesList.get(0); if (infos.length == 1 && infos[0].getKind() == ExpectedTypeInfo.TYPE_OR_SUBTYPE && infos[0].getType().equals(PsiType.getJavaLangObject(manager, resolveScope))) { typesList.clear(); } } if (typesList.isEmpty()) { final JavaPsiFacade facade = JavaPsiFacade.getInstance(manager.getProject()); final PsiShortNamesCache cache = PsiShortNamesCache.getInstance(expression.getProject()); PsiElementFactory factory = facade.getElementFactory(); for (String fieldName : expectedFieldNames) { PsiField[] fields = cache.getFieldsByNameIfNotMoreThan(fieldName, resolveScope, MAX_RAW_GUESSED_MEMBERS_COUNT); addMemberInfo(fields, expression, typesList, factory); } for (String methodName : expectedMethodNames) { PsiMethod[] methods = cache.getMethodsByNameIfNotMoreThan(methodName, resolveScope, MAX_RAW_GUESSED_MEMBERS_COUNT); addMemberInfo(methods, expression, typesList, factory); } } ExpectedTypeInfo[] expectedTypes = ExpectedTypeUtil.intersect(typesList); if (expectedTypes.length == 0 && !typesList.isEmpty()) { List<ExpectedTypeInfo> union = new ArrayList<>(); for (ExpectedTypeInfo[] aTypesList : typesList) { ContainerUtil.addAll(union, (ExpectedTypeInfo[])aTypesList); } expectedTypes = union.toArray(ExpectedTypeInfo.EMPTY_ARRAY); } if (expectedTypes.length == 0) { return allowVoidType ? new PsiType[]{PsiType.VOID} : new PsiType[]{PsiType.getJavaLangObject(manager, resolveScope)}; } else { //Double check to avoid expensive operations on PsiClassTypes final Set<PsiType> typesSet = new HashSet<>(); PsiTypeVisitor<PsiType> visitor = new PsiTypeVisitor<PsiType>() { @Override @Nullable public PsiType visitType(PsiType type) { if (PsiType.NULL.equals(type) || PsiType.VOID.equals(type) && !allowVoidType) { type = PsiType.getJavaLangObject(manager, resolveScope); } if (!typesSet.contains(type)) { if (type instanceof PsiClassType && (!expectedFieldNames.isEmpty() || !expectedMethodNames.isEmpty())) { PsiClass aClass = ((PsiClassType) type).resolve(); if (aClass != null) { for (String fieldName : expectedFieldNames) { if (aClass.findFieldByName(fieldName, true) == null) return null; } for (String methodName : expectedMethodNames) { PsiMethod[] methods = aClass.findMethodsByName(methodName, true); if (methods.length == 0) return null; } } } typesSet.add(type); return type; } return null; } @Override public PsiType visitCapturedWildcardType(PsiCapturedWildcardType capturedWildcardType) { return capturedWildcardType.getUpperBound().accept(this); } }; PsiType[] types = ExpectedTypesProvider.processExpectedTypes(expectedTypes, visitor, manager.getProject()); if (types.length == 0) { return Arrays.stream(expectedTypes).map(type -> type.getType()).toArray(PsiType[]::new); } return types; } } private static void addMemberInfo(PsiMember[] members, final PsiExpression expression, List<ExpectedTypeInfo[]> types, PsiElementFactory factory) { Arrays.sort(members, (m1, m2) -> compareMembers(m1, m2, expression)); List<ExpectedTypeInfo> l = new ArrayList<>(); PsiManager manager = expression.getManager(); JavaPsiFacade facade = JavaPsiFacade.getInstance(manager.getProject()); for (PsiMember member : members) { ProgressManager.checkCanceled(); PsiClass aClass = member.getContainingClass(); if (aClass instanceof PsiAnonymousClass || aClass == null) continue; if (facade.getResolveHelper().isAccessible(member, expression, null)) { PsiClassType type; final PsiElement pparent = expression.getParent().getParent(); if (pparent instanceof PsiMethodCallExpression && member instanceof PsiMethod) { PsiSubstitutor substitutor = ExpectedTypeUtil.inferSubstitutor((PsiMethod)member, (PsiMethodCallExpression)pparent, false); if (substitutor == null) { type = factory.createType(aClass); } else { type = factory.createType(aClass, substitutor); } } else { type = factory.createType(aClass); } l.add(ExpectedTypesProvider.createInfo(type, ExpectedTypeInfo.TYPE_OR_SUBTYPE, type, TailType.NONE)); if (l.size() == MAX_GUESSED_MEMBERS_COUNT) break; } } if (!l.isEmpty()) { types.add(l.toArray(ExpectedTypeInfo.EMPTY_ARRAY)); } } private static int compareMembers(PsiMember m1, PsiMember m2, PsiExpression context) { ProgressManager.checkCanceled(); int result = JavaStatisticsManager.createInfo(null, m2).getUseCount() - JavaStatisticsManager.createInfo(null, m1).getUseCount(); if (result != 0) return result; final PsiClass aClass = m1.getContainingClass(); final PsiClass bClass = m2.getContainingClass(); if (aClass != null && bClass != null) { result = JavaStatisticsManager.createInfo(null, bClass).getUseCount() - JavaStatisticsManager.createInfo(null, aClass).getUseCount(); if (result != 0) return result; } WeighingComparable<PsiElement,ProximityLocation> proximity1 = PsiProximityComparator.getProximity(m1, context); WeighingComparable<PsiElement,ProximityLocation> proximity2 = PsiProximityComparator.getProximity(m2, context); if (proximity1 != null && proximity2 != null) { result = proximity2.compareTo(proximity1); if (result != 0) return result; } String name1 = PsiUtil.getMemberQualifiedName(m1); String name2 = PsiUtil.getMemberQualifiedName(m2); return Comparing.compare(name1, name2); } public static boolean isAccessedForWriting(final PsiExpression[] expressionOccurences) { for (PsiExpression expression : expressionOccurences) { if(expression.isValid() && PsiUtil.isAccessedForWriting(expression)) return true; } return false; } static boolean shouldShowTag(int offset, PsiElement namedElement, PsiElement element) { if (namedElement == null) return false; TextRange range = namedElement.getTextRange(); if (range.getLength() == 0) return false; boolean isInNamedElement = range.contains(offset); return isInNamedElement || element.getTextRange().contains(offset-1); } public static void addClassesWithMember(final String memberName, final PsiFile file, final Set<String> possibleClassNames, final boolean method, final boolean staticAccess) { addClassesWithMember(memberName, file, possibleClassNames, method, staticAccess, true); } public static void addClassesWithMember(final String memberName, final PsiFile file, final Set<String> possibleClassNames, final boolean method, final boolean staticAccess, final boolean addObjectInheritors) { final Project project = file.getProject(); final Module moduleForFile = ModuleUtilCore.findModuleForPsiElement(file); if (moduleForFile == null) return; final GlobalSearchScope searchScope = ReadAction.compute(file::getResolveScope); GlobalSearchScope descendantsSearchScope = GlobalSearchScope.moduleWithDependenciesScope(moduleForFile); final JavaPsiFacade facade = JavaPsiFacade.getInstance(project); final PsiShortNamesCache cache = PsiShortNamesCache.getInstance(project); if (handleObjectMethod(possibleClassNames, facade, searchScope, method, memberName, staticAccess, addObjectInheritors)) { return; } final PsiMember[] members = ReadAction.compute( () -> method ? cache.getMethodsByName(memberName, searchScope) : cache.getFieldsByName(memberName, searchScope)); for (int i = 0; i < members.length; ++i) { final PsiMember member = members[i]; if (hasCorrectModifiers(member, staticAccess)) { final PsiClass containingClass = member.getContainingClass(); if (containingClass != null) { final String qName = getQualifiedName(containingClass); if (qName == null) continue; ClassInheritorsSearch.search(containingClass, descendantsSearchScope, true, true, false).forEach(psiClass -> { ContainerUtil.addIfNotNull(possibleClassNames, getQualifiedName(psiClass)); return true; }); possibleClassNames.add(qName); } } members[i] = null; } } private static boolean handleObjectMethod(Set<String> possibleClassNames, final JavaPsiFacade facade, final GlobalSearchScope searchScope, final boolean method, final String memberName, final boolean staticAccess, boolean addInheritors) { final PsiShortNamesCache cache = PsiShortNamesCache.getInstance(facade.getProject()); final boolean[] allClasses = {false}; ReadAction.run(() -> { final PsiClass objectClass = facade.findClass(CommonClassNames.JAVA_LANG_OBJECT, searchScope); if (objectClass != null) { if (method && objectClass.findMethodsByName(memberName, false).length > 0) { allClasses[0] = true; } else if (!method) { final PsiField field = objectClass.findFieldByName(memberName, false); if (hasCorrectModifiers(field, staticAccess)) { allClasses[0] = true; } } } }); if (allClasses[0]) { possibleClassNames.add(CommonClassNames.JAVA_LANG_OBJECT); if (!addInheritors) { return true; } final String[] strings = ReadAction.compute(cache::getAllClassNames); for (final String className : strings) { final PsiClass[] classes = ReadAction.compute(() -> cache.getClassesByName(className, searchScope)); for (final PsiClass aClass : classes) { final String qname = getQualifiedName(aClass); ContainerUtil.addIfNotNull(possibleClassNames, qname); } } return true; } return false; } @Nullable private static String getQualifiedName(final PsiClass aClass) { return ReadAction.compute(aClass::getQualifiedName); } private static boolean hasCorrectModifiers(@Nullable final PsiMember member, final boolean staticAccess) { if (member == null) { return false; } return ReadAction.compute(() -> !member.hasModifierProperty(PsiModifier.PRIVATE) && member.hasModifierProperty(PsiModifier.STATIC) == staticAccess).booleanValue(); } public static class ParameterNameExpression extends Expression { private final String[] myNames; public ParameterNameExpression(String[] names) { myNames = names; } @Override public Result calculateResult(ExpressionContext context) { LookupElement[] lookupItems = calculateLookupItems(context); if (lookupItems.length == 0) return new TextResult(""); return new TextResult(lookupItems[0].getLookupString()); } @Override public Result calculateQuickResult(ExpressionContext context) { return null; } @Override @NotNull public LookupElement[] calculateLookupItems(ExpressionContext context) { Project project = context.getProject(); int offset = context.getStartOffset(); PsiDocumentManager.getInstance(project).commitAllDocuments(); PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(context.getEditor().getDocument()); assert file != null; PsiElement elementAt = file.findElementAt(offset); PsiParameterList parameterList = PsiTreeUtil.getParentOfType(elementAt, PsiParameterList.class); if (parameterList == null) { if (elementAt == null) return LookupElement.EMPTY_ARRAY; final PsiElement parent = elementAt.getParent(); if (parent instanceof PsiMethod) { parameterList = ((PsiMethod)parent).getParameterList(); } else { return LookupElement.EMPTY_ARRAY; } } PsiParameter parameter = PsiTreeUtil.getParentOfType(elementAt, PsiParameter.class); Set<String> parameterNames = new HashSet<>(); for (PsiParameter psiParameter : parameterList.getParameters()) { if (psiParameter == parameter) continue; parameterNames.add(psiParameter.getName()); } Set<LookupElement> set = new LinkedHashSet<>(); for (String name : myNames) { if (parameterNames.contains(name)) { int j = 1; while (parameterNames.contains(name + j)) j++; name += j; } set.add(LookupElementBuilder.create(name)); } String[] suggestedNames = ExpressionUtil.getNames(context); if (suggestedNames != null) { for (String name : suggestedNames) { if (parameterNames.contains(name)) { int j = 1; while (parameterNames.contains(name + j)) j++; name += j; } set.add(LookupElementBuilder.create(name)); } } return set.toArray(LookupElement.EMPTY_ARRAY); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.cql3; import java.nio.ByteBuffer; import java.util.*; import com.google.common.base.Joiner; import org.apache.cassandra.config.ColumnDefinition; import org.apache.cassandra.db.ColumnFamily; import org.apache.cassandra.db.composites.CellName; import org.apache.cassandra.db.composites.Composite; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.db.marshal.MapType; import org.apache.cassandra.db.marshal.SetType; import org.apache.cassandra.exceptions.InvalidRequestException; import org.apache.cassandra.serializers.CollectionSerializer; import org.apache.cassandra.serializers.MarshalException; import org.apache.cassandra.serializers.SetSerializer; import org.apache.cassandra.transport.Server; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; /** * Static helper methods and classes for sets. */ public abstract class Sets { private Sets() {} public static ColumnSpecification valueSpecOf(ColumnSpecification column) { return new ColumnSpecification(column.ksName, column.cfName, new ColumnIdentifier("value(" + column.name + ")", true), ((SetType)column.type).getElementsType()); } public static class Literal implements Term.Raw { private final List<Term.Raw> elements; public Literal(List<Term.Raw> elements) { this.elements = elements; } public Term prepare(String keyspace, ColumnSpecification receiver) throws InvalidRequestException { validateAssignableTo(keyspace, receiver); // We've parsed empty maps as a set literal to break the ambiguity so // handle that case now if (receiver.type instanceof MapType && elements.isEmpty()) return new Maps.Value(Collections.<ByteBuffer, ByteBuffer>emptyMap()); ColumnSpecification valueSpec = Sets.valueSpecOf(receiver); Set<Term> values = new HashSet<Term>(elements.size()); boolean allTerminal = true; for (Term.Raw rt : elements) { Term t = rt.prepare(keyspace, valueSpec); if (t.containsBindMarker()) throw new InvalidRequestException(String.format("Invalid set literal for %s: bind variables are not supported inside collection literals", receiver.name)); if (t instanceof Term.NonTerminal) allTerminal = false; values.add(t); } DelayedValue value = new DelayedValue(((SetType)receiver.type).getElementsType(), values); return allTerminal ? value.bind(QueryOptions.DEFAULT) : value; } private void validateAssignableTo(String keyspace, ColumnSpecification receiver) throws InvalidRequestException { if (!(receiver.type instanceof SetType)) { // We've parsed empty maps as a set literal to break the ambiguity so // handle that case now if ((receiver.type instanceof MapType) && elements.isEmpty()) return; throw new InvalidRequestException(String.format("Invalid set literal for %s of type %s", receiver.name, receiver.type.asCQL3Type())); } ColumnSpecification valueSpec = Sets.valueSpecOf(receiver); for (Term.Raw rt : elements) { if (!rt.testAssignment(keyspace, valueSpec).isAssignable()) throw new InvalidRequestException(String.format("Invalid set literal for %s: value %s is not of type %s", receiver.name, rt, valueSpec.type.asCQL3Type())); } } public AssignmentTestable.TestResult testAssignment(String keyspace, ColumnSpecification receiver) { if (!(receiver.type instanceof SetType)) { // We've parsed empty maps as a set literal to break the ambiguity so handle that case now if (receiver.type instanceof MapType && elements.isEmpty()) return AssignmentTestable.TestResult.WEAKLY_ASSIGNABLE; return AssignmentTestable.TestResult.NOT_ASSIGNABLE; } // If there is no elements, we can't say it's an exact match (an empty set if fundamentally polymorphic). if (elements.isEmpty()) return AssignmentTestable.TestResult.WEAKLY_ASSIGNABLE; ColumnSpecification valueSpec = Sets.valueSpecOf(receiver); return AssignmentTestable.TestResult.testAll(keyspace, valueSpec, elements); } @Override public String toString() { return "{" + Joiner.on(", ").join(elements) + "}"; } } public static class Value extends Term.Terminal { public final SortedSet<ByteBuffer> elements; public Value(SortedSet<ByteBuffer> elements) { this.elements = elements; } public static Value fromSerialized(ByteBuffer value, SetType type, int version) throws InvalidRequestException { try { // Collections have this small hack that validate cannot be called on a serialized object, // but compose does the validation (so we're fine). Set<?> s = (Set<?>)type.getSerializer().deserializeForNativeProtocol(value, version); SortedSet<ByteBuffer> elements = new TreeSet<ByteBuffer>(type.getElementsType()); for (Object element : s) elements.add(type.getElementsType().decompose(element)); return new Value(elements); } catch (MarshalException e) { throw new InvalidRequestException(e.getMessage()); } } public ByteBuffer get(int protocolVersion) { return CollectionSerializer.pack(elements, elements.size(), protocolVersion); } public boolean equals(SetType st, Value v) { if (elements.size() != v.elements.size()) return false; Iterator<ByteBuffer> thisIter = elements.iterator(); Iterator<ByteBuffer> thatIter = v.elements.iterator(); AbstractType elementsType = st.getElementsType(); while (thisIter.hasNext()) if (elementsType.compare(thisIter.next(), thatIter.next()) != 0) return false; return true; } } // See Lists.DelayedValue public static class DelayedValue extends Term.NonTerminal { private final Comparator<ByteBuffer> comparator; private final Set<Term> elements; public DelayedValue(Comparator<ByteBuffer> comparator, Set<Term> elements) { this.comparator = comparator; this.elements = elements; } public boolean containsBindMarker() { // False since we don't support them in collection return false; } public void collectMarkerSpecification(VariableSpecifications boundNames) { } public Value bind(QueryOptions options) throws InvalidRequestException { SortedSet<ByteBuffer> buffers = new TreeSet<>(comparator); for (Term t : elements) { ByteBuffer bytes = t.bindAndGet(options); if (bytes == null) throw new InvalidRequestException("null is not supported inside collections"); // We don't support value > 64K because the serialization format encode the length as an unsigned short. if (bytes.remaining() > FBUtilities.MAX_UNSIGNED_SHORT) throw new InvalidRequestException(String.format("Set value is too long. Set values are limited to %d bytes but %d bytes value provided", FBUtilities.MAX_UNSIGNED_SHORT, bytes.remaining())); buffers.add(bytes); } return new Value(buffers); } } public static class Marker extends AbstractMarker { protected Marker(int bindIndex, ColumnSpecification receiver) { super(bindIndex, receiver); assert receiver.type instanceof SetType; } public Value bind(QueryOptions options) throws InvalidRequestException { ByteBuffer value = options.getValues().get(bindIndex); return value == null ? null : Value.fromSerialized(value, (SetType)receiver.type, options.getProtocolVersion()); } } public static class Setter extends Operation { public Setter(ColumnDefinition column, Term t) { super(column, t); } public void execute(ByteBuffer rowKey, ColumnFamily cf, Composite prefix, UpdateParameters params) throws InvalidRequestException { if (column.type.isMultiCell()) { // delete + add CellName name = cf.getComparator().create(prefix, column); cf.addAtom(params.makeTombstoneForOverwrite(name.slice())); } Adder.doAdd(t, cf, prefix, column, params); } } public static class Adder extends Operation { public Adder(ColumnDefinition column, Term t) { super(column, t); } public void execute(ByteBuffer rowKey, ColumnFamily cf, Composite prefix, UpdateParameters params) throws InvalidRequestException { assert column.type.isMultiCell() : "Attempted to add items to a frozen set"; doAdd(t, cf, prefix, column, params); } static void doAdd(Term t, ColumnFamily cf, Composite prefix, ColumnDefinition column, UpdateParameters params) throws InvalidRequestException { Term.Terminal value = t.bind(params.options); if (column.type.isMultiCell()) { if (value == null) return; for (ByteBuffer bb : ((Value) value).elements) { CellName cellName = cf.getComparator().create(prefix, column, bb); cf.addColumn(params.makeColumn(cellName, ByteBufferUtil.EMPTY_BYTE_BUFFER)); } } else { // for frozen sets, we're overwriting the whole cell CellName cellName = cf.getComparator().create(prefix, column); if (value == null) cf.addAtom(params.makeTombstone(cellName)); else cf.addColumn(params.makeColumn(cellName, value.get(Server.CURRENT_VERSION))); } } } // Note that this is reused for Map subtraction too (we subtract a set from a map) public static class Discarder extends Operation { public Discarder(ColumnDefinition column, Term t) { super(column, t); } public void execute(ByteBuffer rowKey, ColumnFamily cf, Composite prefix, UpdateParameters params) throws InvalidRequestException { assert column.type.isMultiCell() : "Attempted to remove items from a frozen set"; Term.Terminal value = t.bind(params.options); if (value == null) return; // This can be either a set or a single element Set<ByteBuffer> toDiscard = value instanceof Sets.Value ? ((Sets.Value)value).elements : Collections.singleton(value.get(params.options.getProtocolVersion())); for (ByteBuffer bb : toDiscard) cf.addColumn(params.makeTombstone(cf.getComparator().create(prefix, column, bb))); } } public static class ElementDiscarder extends Operation { public ElementDiscarder(ColumnDefinition column, Term k) { super(column, k); } public void execute(ByteBuffer rowKey, ColumnFamily cf, Composite prefix, UpdateParameters params) throws InvalidRequestException { assert column.type.isMultiCell() : "Attempted to delete a single element in a frozen set"; Term.Terminal elt = t.bind(params.options); if (elt == null) throw new InvalidRequestException("Invalid null set element"); CellName cellName = cf.getComparator().create(prefix, column, elt.get(params.options.getProtocolVersion())); cf.addColumn(params.makeTombstone(cellName)); } } }
package com.tyrfing.games.tyrlib3.view.graphics.text; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.IntBuffer; import java.nio.ShortBuffer; import com.tyrfing.games.tyrlib3.view.graphics.TyrGL; import com.tyrfing.games.tyrlib3.view.graphics.renderer.OpenGLRenderer; public class Vertices { //--Constants--// final static int POSITION_CNT_2D = 2; // Number of Components in Vertex Position for 2D final static int POSITION_CNT_3D = 3; // Number of Components in Vertex Position for 3D final static int COLOR_CNT = 4; // Number of Components in Vertex Color final static int TEXCOORD_CNT = 2; // Number of Components in Vertex Texture Coords final static int NORMAL_CNT = 3; // Number of Components in Vertex Normal private static final int MVP_MATRIX_INDEX_CNT = 1; // Number of Components in MVP matrix index final static int INDEX_SIZE = Short.SIZE / 8; // Index Byte Size (Short.SIZE = bits) //--Members--// // NOTE: all members are constant, and initialized in constructor! public final int positionCnt; // Number of Position Components (2=2D, 3=3D) public final int vertexStride; // Vertex Stride (Element Size of a Single Vertex) public final int vertexSize; // Bytesize of a Single Vertex final IntBuffer vertices; // Vertex Buffer final ShortBuffer indices; // Index Buffer public int numVertices; // Number of Vertices in Buffer public int numIndices; // Number of Indices in Buffer final int[] tmpBuffer; // Temp Buffer for Vertex Conversion private int mTextureCoordinateHandle; private int mPositionHandle; private int mMVPIndexHandle; private int[] buffers = new int[2]; //--Constructor--// // D: create the vertices/indices as specified (for 2d/3d) // A: maxVertices - maximum vertices allowed in buffer // maxIndices - maximum indices allowed in buffer public Vertices(int maxVertices, int maxIndices) { // this.gl = gl; // Save GL Instance this.positionCnt = POSITION_CNT_2D; // Set Position Component Count this.vertexStride = this.positionCnt + TEXCOORD_CNT + MVP_MATRIX_INDEX_CNT; // Calculate Vertex Stride this.vertexSize = this.vertexStride * 4; // Calculate Vertex Byte Size ByteBuffer buffer = ByteBuffer.allocateDirect( maxVertices * vertexSize ); // Allocate Buffer for Vertices (Max) buffer.order( ByteOrder.nativeOrder() ); // Set Native Byte Order this.vertices = buffer.asIntBuffer(); // Save Vertex Buffer if ( maxIndices > 0 ) { // IF Indices Required buffer = ByteBuffer.allocateDirect( maxIndices * INDEX_SIZE ); // Allocate Buffer for Indices (MAX) buffer.order( ByteOrder.nativeOrder() ); // Set Native Byte Order this.indices = buffer.asShortBuffer(); // Save Index Buffer } else // ELSE Indices Not Required indices = null; // No Index Buffer numVertices = 0; // Zero Vertices in Buffer numIndices = 0; // Zero Indices in Buffer this.tmpBuffer = new int[maxVertices * vertexSize / 4]; // Create Temp Buffer // initialize the shader attribute handles mTextureCoordinateHandle = AttribVariable.A_TexCoordinate.getHandle(); mMVPIndexHandle = AttribVariable.A_MVPMatrixIndex.getHandle(); mPositionHandle = AttribVariable.A_Position.getHandle(); if (TyrGL.GL_USE_VBO == 1) { TyrGL.glGenBuffers(2, buffers, 0); // Get A Valid Name TyrGL.glBindBuffer(TyrGL.GL_ARRAY_BUFFER, buffers[0]); // Bind The Buffer TyrGL.glBufferData(TyrGL.GL_ARRAY_BUFFER, maxVertices * vertexStride * OpenGLRenderer.BYTES_PER_FLOAT, vertices, TyrGL.GL_STREAM_DRAW); TyrGL.glBindBuffer(TyrGL.GL_ARRAY_BUFFER, buffers[1]); // Bind The Buffer // Load The Data TyrGL.glBufferData(TyrGL.GL_ARRAY_BUFFER, maxIndices * 2, indices, TyrGL.GL_STREAM_DRAW); } } //--Set Vertices--// // D: set the specified vertices in the vertex buffer // NOTE: optimized to use integer buffer! // A: vertices - array of vertices (floats) to set // offset - offset to first vertex in array // length - number of floats in the vertex array (total) // for easy setting use: vtx_cnt * (this.vertexSize / 4) // R: [none] public void setVertices(float[] vertices, int offset, int length) { this.vertices.clear(); // Remove Existing Vertices int last = offset + length; // Calculate Last Element for ( int i = offset, j = 0; i < last; i++, j++ ) // FOR Each Specified Vertex tmpBuffer[j] = Float.floatToRawIntBits( vertices[i] ); // Set Vertex as Raw Integer Bits in Buffer this.vertices.put( tmpBuffer, 0, length ); // Set New Vertices this.vertices.flip(); // Flip Vertex Buffer this.numVertices = length / this.vertexStride; // Save Number of Vertices if (TyrGL.GL_USE_VBO == 1) { TyrGL.glBindBuffer(TyrGL.GL_ARRAY_BUFFER, buffers[0]); TyrGL.glBufferSubData(TyrGL.GL_ARRAY_BUFFER, 0, this.numVertices * OpenGLRenderer.BYTES_PER_FLOAT * this.vertexStride, this.vertices); } } //--Set Indices--// // D: set the specified indices in the index buffer // A: indices - array of indices (shorts) to set // offset - offset to first index in array // length - number of indices in array (from offset) // R: [none] public void setIndices(short[] indices, int offset, int length) { this.indices.clear(); // Clear Existing Indices this.indices.put( indices, offset, length ); // Set New Indices this.indices.flip(); // Flip Index Buffer this.numIndices = length; // Save Number of Indices if (TyrGL.GL_USE_VBO == 1) { TyrGL.glBindBuffer(TyrGL.GL_ARRAY_BUFFER, buffers[1]); TyrGL.glBufferSubData(TyrGL.GL_ARRAY_BUFFER, 0, numIndices * 2, this.indices); } } //--Bind--// // D: perform all required binding/state changes before rendering batches. // USAGE: call once before calling draw() multiple times for this buffer. // A: [none] // R: [none] public void bind() { if (TyrGL.GL_USE_VBO == 1) { TyrGL.glBindBuffer(TyrGL.GL_ARRAY_BUFFER, buffers[0]); // Pass in the position. TyrGL.glVertexAttribPointer(mPositionHandle, positionCnt, TyrGL.GL_FLOAT, false, vertexSize, 0); TyrGL.glEnableVertexAttribArray(mPositionHandle); TyrGL.glVertexAttribPointer(mTextureCoordinateHandle, TEXCOORD_CNT, TyrGL.GL_FLOAT, false, vertexSize, positionCnt * OpenGLRenderer.BYTES_PER_FLOAT); TyrGL.glEnableVertexAttribArray(mTextureCoordinateHandle); TyrGL.glVertexAttribPointer(mMVPIndexHandle, MVP_MATRIX_INDEX_CNT, TyrGL.GL_FLOAT, false, vertexSize, (positionCnt + TEXCOORD_CNT) * OpenGLRenderer.BYTES_PER_FLOAT); TyrGL.glEnableVertexAttribArray(mMVPIndexHandle); } else { // bind vertex position pointer vertices.position( 0 ); // Set Vertex Buffer to Position TyrGL.glVertexAttribPointer(mPositionHandle, positionCnt, TyrGL.GL_FLOAT, false, vertexSize, vertices); TyrGL.glEnableVertexAttribArray(mPositionHandle); // bind texture position pointer vertices.position(positionCnt); // Set Vertex Buffer to Texture Coords (NOTE: position based on whether color is also specified) TyrGL.glVertexAttribPointer(mTextureCoordinateHandle, TEXCOORD_CNT, TyrGL.GL_FLOAT, false, vertexSize, vertices); TyrGL.glEnableVertexAttribArray(mTextureCoordinateHandle); // bind MVP Matrix index position handle vertices.position(positionCnt + TEXCOORD_CNT); TyrGL.glVertexAttribPointer(mMVPIndexHandle, MVP_MATRIX_INDEX_CNT, TyrGL.GL_FLOAT, false, vertexSize, vertices); TyrGL.glEnableVertexAttribArray(mMVPIndexHandle); } } //--Draw--// // D: draw the currently bound vertices in the vertex/index buffers // USAGE: can only be called after calling bind() for this buffer. // A: primitiveType - the type of primitive to draw // offset - the offset in the vertex/index buffer to start at // numVertices - the number of vertices (indices) to draw // R: [none] public void draw(int primitiveType, int offset, int numVertices) { if (indices != null) { // IF Indices Exist //draw indexed if (TyrGL.GL_USE_VBO == 1) { TyrGL.glBindBuffer(TyrGL.GL_ELEMENT_ARRAY_BUFFER, buffers[1]); TyrGL.glDrawElements(primitiveType, numVertices, TyrGL.GL_UNSIGNED_SHORT, offset); } else { indices.position(offset); // Set Index Buffer to Specified Offset TyrGL.glDrawElements(primitiveType, numVertices, TyrGL.GL_UNSIGNED_SHORT, indices); } } else { // ELSE No Indices Exist //draw direct TyrGL.glDrawArrays(primitiveType, offset, numVertices); } } //--Unbind--// // D: clear binding states when done rendering batches. // USAGE: call once before calling draw() multiple times for this buffer. // A: [none] // R: [none] public void unbind() { TyrGL.glDisableVertexAttribArray(mTextureCoordinateHandle); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.jsmpp.bean; import java.util.Arrays; import java.util.Objects; import org.jsmpp.bean.OptionalParameter.Tag; /** * @author uudashr * */ public class SubmitMulti extends Command { private static final long serialVersionUID = -6800953916456361473L; private String serviceType; private byte sourceAddrTon; private byte sourceAddrNpi; private String sourceAddr; private DestinationAddress[] destAddresses; private byte esmClass; private byte protocolId; private byte priorityFlag; private String scheduleDeliveryTime; private String validityPeriod; private byte registeredDelivery; private byte replaceIfPresentFlag; private byte dataCoding; private byte smDefaultMsgId; private byte[] shortMessage; private OptionalParameter[] optionalParameters; public SubmitMulti() { super(); } public String getServiceType() { return serviceType; } public void setServiceType(String serviceType) { this.serviceType = serviceType; } public byte getSourceAddrTon() { return sourceAddrTon; } public void setSourceAddrTon(byte sourceAddrTon) { this.sourceAddrTon = sourceAddrTon; } public byte getSourceAddrNpi() { return sourceAddrNpi; } public void setSourceAddrNpi(byte sourceAddrNpi) { this.sourceAddrNpi = sourceAddrNpi; } public String getSourceAddr() { return sourceAddr; } public void setSourceAddr(String sourceAddr) { this.sourceAddr = sourceAddr; } public DestinationAddress[] getDestAddresses() { return destAddresses; } public void setDestAddresses(DestinationAddress[] destAddresses) { this.destAddresses = destAddresses; } public byte getEsmClass() { return esmClass; } public void setEsmClass(byte esmClass) { this.esmClass = esmClass; } public byte getProtocolId() { return protocolId; } public void setProtocolId(byte protocolId) { this.protocolId = protocolId; } public byte getPriorityFlag() { return priorityFlag; } public void setPriorityFlag(byte priorityFlag) { this.priorityFlag = priorityFlag; } public String getScheduleDeliveryTime() { return scheduleDeliveryTime; } public void setScheduleDeliveryTime(String scheduleDeliveryTime) { this.scheduleDeliveryTime = scheduleDeliveryTime; } public String getValidityPeriod() { return validityPeriod; } public void setValidityPeriod(String validityPeriod) { this.validityPeriod = validityPeriod; } public byte getRegisteredDelivery() { return registeredDelivery; } public void setRegisteredDelivery(byte registeredDelivery) { this.registeredDelivery = registeredDelivery; } public byte getReplaceIfPresentFlag() { return replaceIfPresentFlag; } public void setReplaceIfPresentFlag(byte replaceIfPresentFlag) { this.replaceIfPresentFlag = replaceIfPresentFlag; } public byte getDataCoding() { return dataCoding; } public void setDataCoding(byte dataCoding) { this.dataCoding = dataCoding; } public byte getSmDefaultMsgId() { return smDefaultMsgId; } public void setSmDefaultMsgId(byte smDefaultMsgId) { this.smDefaultMsgId = smDefaultMsgId; } public byte[] getShortMessage() { return shortMessage; } public void setShortMessage(byte[] shortMessage) { this.shortMessage = shortMessage; } public <U extends OptionalParameter> U getOptionalParameter(Class<U> tagClass) { return OptionalParameters.get(tagClass, optionalParameters); } public OptionalParameter getOptionalParameter(Tag tagEnum) { return OptionalParameters.get(tagEnum.code(), optionalParameters); } public OptionalParameter[] getOptionalParameters() { return optionalParameters; } public void setOptionalParameters(OptionalParameter[] optionalParameters) { this.optionalParameters = optionalParameters; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (!(o instanceof SubmitMulti)) { return false; } if (!super.equals(o)) { return false; } final SubmitMulti that = (SubmitMulti) o; return sourceAddrTon == that.sourceAddrTon && sourceAddrNpi == that.sourceAddrNpi && esmClass == that.esmClass && protocolId == that.protocolId && priorityFlag == that.priorityFlag && registeredDelivery == that.registeredDelivery && replaceIfPresentFlag == that.replaceIfPresentFlag && dataCoding == that.dataCoding && smDefaultMsgId == that.smDefaultMsgId && Objects.equals( serviceType, that.serviceType) && Objects.equals(sourceAddr, that.sourceAddr) && Arrays.equals(destAddresses, that.destAddresses) && Objects.equals(scheduleDeliveryTime, that.scheduleDeliveryTime) && Objects.equals(validityPeriod, that.validityPeriod) && Arrays.equals(shortMessage, that.shortMessage) && Arrays.equals(optionalParameters, that.optionalParameters); } @Override public int hashCode() { int result = Objects.hash(super.hashCode(), serviceType, sourceAddrTon, sourceAddrNpi, sourceAddr, esmClass, protocolId, priorityFlag, scheduleDeliveryTime, validityPeriod, registeredDelivery, replaceIfPresentFlag, dataCoding, smDefaultMsgId); result = 31 * result + Arrays.hashCode(destAddresses); result = 31 * result + Arrays.hashCode(shortMessage); result = 31 * result + Arrays.hashCode(optionalParameters); return result; } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.common.util; import java.io.File; import java.util.Properties; /** * A utility class for replacing properties in strings. * * @author <a href="mailto:jason@planet57.com">Jason Dillon</a> * @author <a href="Scott.Stark@jboss.org">Scott Stark</a> * @author <a href="claudio.vesco@previnet.it">Claudio Vesco</a> * @author <a href="mailto:adrian@jboss.com">Adrian Brock</a> * @author <a href="mailto:dimitris@jboss.org">Dimitris Andreadis</a> * @version <tt>$Revision: 2898 $</tt> */ public final class StringPropertyReplacer { /** New line string constant */ public static final String NEWLINE = System.getProperty("line.separator", "\n"); /** File separator value */ private static final String FILE_SEPARATOR = File.separator; /** Path separator value */ private static final String PATH_SEPARATOR = File.pathSeparator; /** File separator alias */ private static final String FILE_SEPARATOR_ALIAS = "/"; /** Path separator alias */ private static final String PATH_SEPARATOR_ALIAS = ":"; // States used in property parsing private static final int NORMAL = 0; private static final int SEEN_DOLLAR = 1; private static final int IN_BRACKET = 2; /** * Go through the input string and replace any occurance of ${p} with * the System.getProperty(p) value. If there is no such property p defined, * then the ${p} reference will remain unchanged. * * If the property reference is of the form ${p:v} and there is no such property p, * then the default value v will be returned. * * If the property reference is of the form ${p1,p2} or ${p1,p2:v} then * the primary and the secondary properties will be tried in turn, before * returning either the unchanged input, or the default value. * * The property ${/} is replaced with System.getProperty("file.separator") * value and the property ${:} is replaced with System.getProperty("path.separator"). * * @param string - the string with possible ${} references * @return the input string with all property references replaced if any. * If there are no valid references the input string will be returned. */ public static String replaceProperties(final String string) { return replaceProperties(string, null); } /** * Go through the input string and replace any occurance of ${p} with * the props.getProperty(p) value. If there is no such property p defined, * then the ${p} reference will remain unchanged. * * If the property reference is of the form ${p:v} and there is no such property p, * then the default value v will be returned. * * If the property reference is of the form ${p1,p2} or ${p1,p2:v} then * the primary and the secondary properties will be tried in turn, before * returning either the unchanged input, or the default value. * * The property ${/} is replaced with System.getProperty("file.separator") * value and the property ${:} is replaced with System.getProperty("path.separator"). * * @param string - the string with possible ${} references * @param props - the source for ${x} property ref values, null means use System.getProperty() * @return the input string with all property references replaced if any. * If there are no valid references the input string will be returned. */ public static String replaceProperties(final String string, final Properties props) { if(string == null) { return null; } final char[] chars = string.toCharArray(); StringBuilder buffer = new StringBuilder(); boolean properties = false; int state = NORMAL; int start = 0; for (int i = 0; i < chars.length; ++i) { char c = chars[i]; // Dollar sign outside brackets if (c == '$' && state != IN_BRACKET) state = SEEN_DOLLAR; // Open bracket immediatley after dollar else if (c == '{' && state == SEEN_DOLLAR) { buffer.append(string.substring(start, i - 1)); state = IN_BRACKET; start = i - 1; } // No open bracket after dollar else if (state == SEEN_DOLLAR) state = NORMAL; // Closed bracket after open bracket else if (c == '}' && state == IN_BRACKET) { // No content if (start + 2 == i) { buffer.append("${}"); // REVIEW: Correct? } else // Collect the system property { String value = null; String key = string.substring(start + 2, i); // check for alias if (FILE_SEPARATOR_ALIAS.equals(key)) { value = FILE_SEPARATOR; } else if (PATH_SEPARATOR_ALIAS.equals(key)) { value = PATH_SEPARATOR; } else { // check from the properties if (props != null) value = props.getProperty(key); else value = System.getProperty(key); if (value == null) { // Check for a default value ${key:default} int colon = key.indexOf(':'); if (colon > 0) { String realKey = key.substring(0, colon); if (props != null) value = props.getProperty(realKey); else value = System.getProperty(realKey); if (value == null) { // Check for a composite key, "key1,key2" value = resolveCompositeKey(realKey, props); // Not a composite key either, use the specified default if (value == null) value = key.substring(colon+1); } } else { // No default, check for a composite key, "key1,key2" value = resolveCompositeKey(key, props); } } } if (value != null) { properties = true; buffer.append(value); } else { buffer.append("${"); buffer.append(key); buffer.append('}'); } } start = i + 1; state = NORMAL; } } // No properties if (!properties) return string; // Collect the trailing characters if (start != chars.length) buffer.append(string.substring(start, chars.length)); // Done return buffer.toString(); } /** * Try to resolve a "key" from the provided properties by * checking if it is actually a "key1,key2", in which case * try first "key1", then "key2". If all fails, return null. * * It also accepts "key1," and ",key2". * * @param key the key to resolve * @param props the properties to use * @return the resolved key or null */ private static String resolveCompositeKey(String key, Properties props) { String value = null; // Look for the comma int comma = key.indexOf(','); if (comma > -1) { // If we have a first part, try resolve it if (comma > 0) { // Check the first part String key1 = key.substring(0, comma); if (props != null) value = props.getProperty(key1); else value = System.getProperty(key1); } // Check the second part, if there is one and first lookup failed if (value == null && comma < key.length() - 1) { String key2 = key.substring(comma + 1); if (props != null) value = props.getProperty(key2); else value = System.getProperty(key2); } } // Return whatever we've found or null return value; } }
/* * Copyright 2014 Dan Haywood * * Licensed under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.isisaddons.module.security.dom.role; import java.util.List; import com.danhaywood.java.testsupport.coverage.PojoTester; import com.danhaywood.java.testsupport.coverage.PrivateConstructorTester; import com.google.common.collect.Lists; import org.jmock.Expectations; import org.jmock.auto.Mock; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.apache.isis.applib.services.appfeat.ApplicationFeatureRepository; import org.apache.isis.applib.services.appfeat.ApplicationMemberType; import org.apache.isis.core.metamodel.services.appfeat.ApplicationFeature; import org.apache.isis.core.metamodel.services.appfeat.ApplicationFeatureId; import org.apache.isis.core.metamodel.services.appfeat.ApplicationFeatureType; import org.apache.isis.core.unittestsupport.comparable.ComparableContractTest_compareTo; import org.apache.isis.core.unittestsupport.jmocking.JUnitRuleMockery2; import org.isisaddons.module.security.dom.permission.ApplicationPermission; import org.isisaddons.module.security.dom.permission.ApplicationPermissionMode; import org.isisaddons.module.security.dom.permission.ApplicationPermissionRepository; import org.isisaddons.module.security.dom.permission.ApplicationPermissionRule; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.sameInstance; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertThat; public class ApplicationRoleTest { @Rule public JUnitRuleMockery2 context = JUnitRuleMockery2.createFor(JUnitRuleMockery2.Mode.INTERFACES_AND_CLASSES); ApplicationRole applicationRole; @Mock ApplicationPermissionRepository mockApplicationPermissionRepository; @Mock ApplicationFeatureRepository mockApplicationFeatureRepository; final ApplicationFeature pkg1 = new ApplicationFeature(); final ApplicationFeature pkg2 = new ApplicationFeature(); final ApplicationFeature cls1 = new ApplicationFeature(); final ApplicationFeature cls2 = new ApplicationFeature(); @Before public void setUp() throws Exception { applicationRole = new ApplicationRole(); applicationRole.applicationPermissionRepository = mockApplicationPermissionRepository; applicationRole.applicationFeatureRepository = mockApplicationFeatureRepository; pkg1.setFeatureId(ApplicationFeatureId.newFeature(ApplicationFeatureType.PACKAGE, "com.mycompany")); pkg2.setFeatureId(ApplicationFeatureId.newFeature(ApplicationFeatureType.PACKAGE, "com.mycompany.foo")); cls1.setFeatureId(ApplicationFeatureId.newFeature(ApplicationFeatureType.CLASS, "com.mycompany.Bar")); cls2.setFeatureId(ApplicationFeatureId.newFeature(ApplicationFeatureType.CLASS, "com.mycompany.Baz")); } public static class Title extends ApplicationRoleTest { @Test public void whenMember() throws Exception { applicationRole.setName("Role1"); assertThat(applicationRole.title(), is("Role1")); } } public static class BeanProperties extends ApplicationRoleTest { @Test public void exercise() throws Exception { PojoTester.relaxed() .exercise(new ApplicationRole()); } } public static class UpdateName extends ApplicationRoleTest { public static class Action extends UpdateName { @Test public void happyCase() throws Exception { // given applicationRole.setName("original name"); // when applicationRole.updateName("New name"); // then assertThat(applicationRole.getName(), is("New name")); } } public static class Default0 extends UpdateName { @Test public void happyCase() throws Exception { applicationRole.setName("Original name"); assertThat(applicationRole.default0UpdateName(), is("Original name")); } } } public static class UpdateDescription extends ApplicationRoleTest { public static class Action extends UpdateDescription { @Test public void happyCase() throws Exception { // given applicationRole.setDescription("original description"); // when applicationRole.updateDescription("New description"); // then assertThat(applicationRole.getDescription(), is("New description")); } @Test public void setToNull() throws Exception { // given applicationRole.setDescription("original description"); // when applicationRole.updateDescription(null); // then assertThat(applicationRole.getDescription(), is(nullValue())); } } public static class Default0 extends ApplicationRoleTest { @Test public void happyCase() throws Exception { applicationRole.setDescription("Original descr"); assertThat(applicationRole.default0UpdateDescription(), is("Original descr")); } } } public static class GetPermissions extends ApplicationRoleTest { @Test public void happyCase() throws Exception { final List<ApplicationPermission> result = Lists.newArrayList(); context.checking(new Expectations() {{ oneOf(mockApplicationPermissionRepository).findByRole(applicationRole); will(returnValue(result)); }}); assertThat(applicationRole.getPermissions(), is(result)); } } public static class AddPackage extends ApplicationRoleTest { public static class ActionImpl extends AddPackage { @Test public void happyCase() throws Exception { context.checking(new Expectations() {{ oneOf(mockApplicationPermissionRepository).newPermission(applicationRole, ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, ApplicationFeatureType.PACKAGE, "com.mycompany"); }}); final ApplicationRole role = applicationRole.addPackage(ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany"); assertThat(role, sameInstance(applicationRole)); } } public static class Default0 extends AddPackage { @Test public void happyCase() throws Exception { assertThat(applicationRole.default0AddPackage(), sameInstance(ApplicationPermissionRule.ALLOW)); } } public static class Default1 extends AddPackage { @Test public void happyCase() throws Exception { assertThat(applicationRole.default1AddPackage(), sameInstance(ApplicationPermissionMode.CHANGING)); } } public static class Choices2 extends AddPackage { @Test public void happyCase() throws Exception { context.checking(new Expectations() {{ allowing(mockApplicationFeatureRepository).packageNames(); will(returnValue(Lists.newArrayList("com.mycompany", "com.mycompany.foo"))); }}); final List<String> packageNames = applicationRole.choices2AddPackage(); assertThat(packageNames, containsInAnyOrder("com.mycompany", "com.mycompany.foo")); } } } public static class AddClass extends ApplicationRoleTest { public static class ActionImpl extends AddClass { @Test public void happyCase() throws Exception { context.checking(new Expectations() {{ oneOf(mockApplicationPermissionRepository).newPermission(applicationRole, ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, ApplicationFeatureType.CLASS, "com.mycompany.Bar"); }}); final ApplicationRole role = applicationRole.addClass(ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany", "Bar"); assertThat(role, sameInstance(applicationRole)); } } public static class Default0 extends AddClass { @Test public void happyCase() throws Exception { assertThat(applicationRole.default0AddClass(), sameInstance(ApplicationPermissionRule.ALLOW)); } } public static class Default1 extends AddClass { @Test public void happyCase() throws Exception { assertThat(applicationRole.default1AddClass(), sameInstance(ApplicationPermissionMode.CHANGING)); } } public static class Choices2 extends AddClass { @Test public void happyCase() throws Exception { context.checking(new Expectations() {{ allowing(mockApplicationFeatureRepository).packageNamesContainingClasses(null); will(returnValue(Lists.newArrayList("com.mycompany", "com.mycompany.foo"))); }}); final List<String> packageNames = applicationRole.choices2AddClass(); assertThat(packageNames, containsInAnyOrder("com.mycompany", "com.mycompany.foo")); } } public static class Choices3 extends AddClass { @Test public void happyCase() throws Exception { context.checking(new Expectations() {{ allowing(mockApplicationFeatureRepository).classNamesContainedIn("com.mycompany", null); will(returnValue(Lists.newArrayList("Bar", "Baz"))); }}); final List<String> classNames = applicationRole.choices3AddClass( ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany"); assertThat(classNames, containsInAnyOrder("Bar", "Baz")); } } } public static class AddAction_or_AddProperty_or_AddCollection extends ApplicationRoleTest { public static class ActionImpl extends AddAction_or_AddProperty_or_AddCollection { @Before public void setUp() throws Exception { super.setUp(); applicationRole.applicationPermissionRepository = mockApplicationPermissionRepository; context.checking(new Expectations() {{ oneOf(mockApplicationPermissionRepository).newPermission(applicationRole, ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany", "Bar", "foo"); }}); } @Test public void forAction() throws Exception { final ApplicationRole role = applicationRole.addAction(ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany", "Bar", "foo"); assertThat(role, sameInstance(applicationRole)); } @Test public void forProperty() throws Exception { final ApplicationRole role = applicationRole.addProperty(ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany", "Bar", "foo"); assertThat(role, sameInstance(applicationRole)); } @Test public void forCollection() throws Exception { final ApplicationRole role = applicationRole.addCollection(ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany", "Bar", "foo"); assertThat(role, sameInstance(applicationRole)); } } public static class Default0 extends AddAction_or_AddProperty_or_AddCollection { @Test public void happyCase() throws Exception { assertThat(applicationRole.default0AddAction(), sameInstance(ApplicationPermissionRule.ALLOW)); assertThat(applicationRole.default0AddProperty(), sameInstance(ApplicationPermissionRule.ALLOW)); assertThat(applicationRole.default0AddCollection(), sameInstance(ApplicationPermissionRule.ALLOW)); } } public static class Default1 extends AddAction_or_AddProperty_or_AddCollection { @Test public void happyCase() throws Exception { assertThat(applicationRole.default1AddAction(), sameInstance(ApplicationPermissionMode.CHANGING)); assertThat(applicationRole.default1AddProperty(), sameInstance(ApplicationPermissionMode.CHANGING)); assertThat(applicationRole.default1AddCollection(), sameInstance(ApplicationPermissionMode.CHANGING)); } } public static class Choices2 extends AddAction_or_AddProperty_or_AddCollection { @Test public void happyCase() throws Exception { context.checking(new Expectations() {{ allowing(mockApplicationFeatureRepository).packageNamesContainingClasses(ApplicationMemberType.ACTION); will(returnValue(Lists.newArrayList("com.mycompany", "com.mycompany.actions"))); allowing(mockApplicationFeatureRepository).packageNamesContainingClasses(ApplicationMemberType.PROPERTY); will(returnValue(Lists.newArrayList("com.mycompany", "com.mycompany.properties"))); allowing(mockApplicationFeatureRepository).packageNamesContainingClasses(ApplicationMemberType.COLLECTION); will(returnValue(Lists.newArrayList("com.mycompany", "com.mycompany.collections"))); }}); List<String> packageNames; packageNames = applicationRole.choices2AddAction(); assertThat(packageNames, containsInAnyOrder("com.mycompany", "com.mycompany.actions")); packageNames = applicationRole.choices2AddProperty(); assertThat(packageNames, containsInAnyOrder("com.mycompany", "com.mycompany.properties")); packageNames = applicationRole.choices2AddCollection(); assertThat(packageNames, containsInAnyOrder("com.mycompany", "com.mycompany.collections")); } } public static class Choices3 extends AddAction_or_AddProperty_or_AddCollection { @Test public void forAll() throws Exception { context.checking(new Expectations() {{ allowing(mockApplicationFeatureRepository).classNamesContainedIn("com.mycompany", ApplicationMemberType.ACTION); will(returnValue(Lists.newArrayList("Bar", "Baz"))); allowing(mockApplicationFeatureRepository).classNamesContainedIn("com.mycompany", ApplicationMemberType.PROPERTY); will(returnValue(Lists.newArrayList("Fiz", "Foz"))); allowing(mockApplicationFeatureRepository).classNamesContainedIn("com.mycompany", ApplicationMemberType.COLLECTION); will(returnValue(Lists.newArrayList("Qiz", "Qoz"))); }}); List<String> classNames; classNames = applicationRole.choices3AddAction( ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany"); assertThat(classNames, containsInAnyOrder("Bar", "Baz")); classNames = applicationRole.choices3AddProperty( ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany"); assertThat(classNames, containsInAnyOrder("Fiz", "Foz")); classNames = applicationRole.choices3AddCollection( ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany"); assertThat(classNames, containsInAnyOrder("Qiz", "Qoz")); } } public static class Choices4 extends AddAction_or_AddProperty_or_AddCollection { @Test public void forAll() throws Exception { context.checking(new Expectations() {{ allowing(mockApplicationFeatureRepository).memberNamesOf("com.mycompany", "Bar", ApplicationMemberType.ACTION); will(returnValue(Lists.newArrayList("foo", "far"))); allowing(mockApplicationFeatureRepository).memberNamesOf("com.mycompany", "Bar", ApplicationMemberType.PROPERTY); will(returnValue(Lists.newArrayList("boo", "bar"))); allowing(mockApplicationFeatureRepository).memberNamesOf("com.mycompany", "Bar", ApplicationMemberType.COLLECTION); will(returnValue(Lists.newArrayList("coo", "car"))); }}); List<String> classNames; classNames = applicationRole.choices4AddAction( ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany", "Bar"); assertThat(classNames, containsInAnyOrder("foo", "far")); classNames = applicationRole.choices4AddProperty( ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany", "Bar"); assertThat(classNames, containsInAnyOrder("boo", "bar")); classNames = applicationRole.choices4AddCollection( ApplicationPermissionRule.ALLOW, ApplicationPermissionMode.CHANGING, "com.mycompany", "Bar"); assertThat(classNames, containsInAnyOrder("coo", "car")); } } } public static class CompareTo extends ComparableContractTest_compareTo<ApplicationRole> { @SuppressWarnings("unchecked") @Override protected List<List<ApplicationRole>> orderedTuples() { return listOf( listOf( newApplicationRole(null), newApplicationRole("X"), newApplicationRole("X"), newApplicationRole("Y") ) ); } private ApplicationRole newApplicationRole( String name) { final ApplicationRole applicationRole = new ApplicationRole(); applicationRole.setName(name); return applicationRole; } } public static class PrivateConstructors { @Test public void forFunctions() throws Exception { new PrivateConstructorTester(ApplicationRole.Functions.class).exercise(); } } }
/* * JBoss, Home of Professional Open Source * Copyright 2015, Red Hat, Inc., and individual contributors as indicated * by the @authors tag. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.util; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.Array; import java.lang.NoSuchMethodException; import java.net.URL; import java.security.AccessController; import java.security.CodeSource; import java.security.PrivilegedAction; import java.security.ProtectionDomain; import java.util.*; /** * A collection of <code>Class</code> utilities. * * @version <tt>$Revision$</tt> * @author <a href="mailto:jason@planet57.com">Jason Dillon</a> * @author <a href="mailto:scott.stark@jboss.org">Scott Stark</a> * @author <a href="mailto:dimitris@jboss.org">Dimitris Andreadis<a/> */ @SuppressWarnings("unchecked") public final class Classes { /** The string used to separator packages */ public static final String PACKAGE_SEPARATOR = "."; /** The characther used to separator packages */ public static final char PACKAGE_SEPARATOR_CHAR = '.'; /** The default package name. */ public static final String DEFAULT_PACKAGE_NAME = "<default>"; /** Format a string buffer containing the Class, Interfaces, CodeSource, and ClassLoader information for the given object clazz. @param clazz the Class @param results - the buffer to write the info to */ public static void displayClassInfo(Class clazz, StringBuffer results) { // Print out some codebase info for the clazz ClassLoader cl = clazz.getClassLoader(); results.append("\n"); results.append(clazz.getName()); results.append("("); results.append(Integer.toHexString(clazz.hashCode())); results.append(").ClassLoader="); results.append(cl); ClassLoader parent = cl; while( parent != null ) { results.append("\n.."); results.append(parent); URL[] urls = getClassLoaderURLs(parent); int length = urls != null ? urls.length : 0; for(int u = 0; u < length; u ++) { results.append("\n...."); results.append(urls[u]); } if( parent != null ) parent = parent.getParent(); } CodeSource clazzCS = clazz.getProtectionDomain().getCodeSource(); if( clazzCS != null ) { results.append("\n++++CodeSource: "); results.append(clazzCS); } else results.append("\n++++Null CodeSource"); results.append("\nImplemented Interfaces:"); Class[] ifaces = clazz.getInterfaces(); for(int i = 0; i < ifaces.length; i ++) { Class iface = ifaces[i]; results.append("\n++"); results.append(iface); results.append("("); results.append(Integer.toHexString(iface.hashCode())); results.append(")"); ClassLoader loader = ifaces[i].getClassLoader(); results.append("\n++++ClassLoader: "); results.append(loader); ProtectionDomain pd = ifaces[i].getProtectionDomain(); CodeSource cs = pd.getCodeSource(); if( cs != null ) { results.append("\n++++CodeSource: "); results.append(cs); } else results.append("\n++++Null CodeSource"); } } /** Use reflection to access a URL[] getURLs or URL[] getClasspath method so that non-URLClassLoader class loaders, or class loaders that override getURLs to return null or empty, can provide the true classpath info. * @param cl * @return the urls */ public static URL[] getClassLoaderURLs(ClassLoader cl) { URL[] urls = {}; try { Class returnType = urls.getClass(); Class[] parameterTypes = {}; Class clClass = cl.getClass(); Method getURLs = clClass.getMethod("getURLs", parameterTypes); if( returnType.isAssignableFrom(getURLs.getReturnType()) ) { Object[] args = {}; urls = (URL[]) getURLs.invoke(cl, args); } if( urls == null || urls.length == 0 ) { Method getCp = clClass.getMethod("getClasspath", parameterTypes); if( returnType.isAssignableFrom(getCp.getReturnType()) ) { Object[] args = {}; urls = (URL[]) getCp.invoke(cl, args); } } } catch(Exception ignore) { } return urls; } /** * Describe the class of an object * * @param object the object * @return the description */ public static String getDescription(Object object) { StringBuffer buffer = new StringBuffer(); describe(buffer, object); return buffer.toString(); } /** * Describe the class of an object * * @param buffer the string buffer * @param object the object */ public static void describe(StringBuffer buffer, Object object) { if (object == null) buffer.append("**null**"); else describe(buffer, object.getClass()); } /** * Describe the class * * @param buffer the string buffer * @param clazz the clazz */ public static void describe(StringBuffer buffer, Class clazz) { if (clazz == null) buffer.append("**null**"); else { buffer.append("{class=").append(clazz.getName()); Class[] intfs = clazz.getInterfaces(); if (intfs.length > 0) { buffer.append(" intfs="); for (int i = 0; i < intfs.length; ++i) { buffer.append(intfs[i].getName()); if (i < intfs.length-1) buffer.append(", "); } } buffer.append("}"); } } /** * Get the short name of the specified class by striping off the package * name. * * @param classname Class name. * @return Short class name. */ public static String stripPackageName(final String classname) { int idx = classname.lastIndexOf(PACKAGE_SEPARATOR); if (idx != -1) return classname.substring(idx + 1, classname.length()); return classname; } /** * Get the short name of the specified class by striping off the package * name. * * @param type Class name. * @return Short class name. */ public static String stripPackageName(final Class type) { return stripPackageName(type.getName()); } /** * Get the package name of the specified class. * * @param classname Class name. * @return Package name or "" if the classname is in the * <i>default</i> package. * * @throws EmptyStringException Classname is an empty string. */ public static String getPackageName(final String classname) { if (classname.length() == 0) throw new EmptyStringException(); int index = classname.lastIndexOf(PACKAGE_SEPARATOR); if (index != -1) return classname.substring(0, index); return ""; } /** * Get the package name of the specified class. * * @param type Class. * @return Package name. */ public static String getPackageName(final Class type) { return getPackageName(type.getName()); } /** * Force the given class to be loaded fully. * * <p>This method attempts to locate a static method on the given class * the attempts to invoke it with dummy arguments in the hope that * the virtual machine will prepare the class for the method call and * call all of the static class initializers. * * @param type Class to force load. * * @throws NullArgumentException Type is <i>null</i>. */ public static void forceLoad(final Class type) { if (type == null) throw new NullArgumentException("type"); // don't attempt to force primitives to load if (type.isPrimitive()) return; // don't attempt to force java.* classes to load String packageName = Classes.getPackageName(type); // System.out.println("package name: " + packageName); if (packageName.startsWith("java.") || packageName.startsWith("javax.")) { return; } // System.out.println("forcing class to load: " + type); try { Method methods[] = type.getDeclaredMethods(); Method method = null; for (int i = 0; i < methods.length; i++) { int modifiers = methods[i].getModifiers(); if (Modifier.isStatic(modifiers)) { method = methods[i]; break; } } if (method != null) { method.invoke(null, (Object[]) null); } else { type.newInstance(); } } catch (Exception ignore) { ThrowableHandler.add(ignore); } } ///////////////////////////////////////////////////////////////////////// // Primitives // ///////////////////////////////////////////////////////////////////////// /** Primitive type name -> class map. */ private static final Map PRIMITIVE_NAME_TYPE_MAP = new HashMap(); /** Setup the primitives map. */ static { PRIMITIVE_NAME_TYPE_MAP.put("boolean", Boolean.TYPE); PRIMITIVE_NAME_TYPE_MAP.put("byte", Byte.TYPE); PRIMITIVE_NAME_TYPE_MAP.put("char", Character.TYPE); PRIMITIVE_NAME_TYPE_MAP.put("short", Short.TYPE); PRIMITIVE_NAME_TYPE_MAP.put("int", Integer.TYPE); PRIMITIVE_NAME_TYPE_MAP.put("long", Long.TYPE); PRIMITIVE_NAME_TYPE_MAP.put("float", Float.TYPE); PRIMITIVE_NAME_TYPE_MAP.put("double", Double.TYPE); } /** * Get the primitive type for the given primitive name. * * <p> * For example, "boolean" returns Boolean.TYPE and so on... * * @param name Primitive type name (boolean, int, byte, ...) * @return Primitive type or null. * * @exception IllegalArgumentException Type is not a primitive class */ public static Class getPrimitiveTypeForName(final String name) { return (Class) PRIMITIVE_NAME_TYPE_MAP.get(name); } /** Map of primitive types to their wrapper classes */ private static final Class[] PRIMITIVE_WRAPPER_MAP = { Boolean.TYPE, Boolean.class, Byte.TYPE, Byte.class, Character.TYPE, Character.class, Double.TYPE, Double.class, Float.TYPE, Float.class, Integer.TYPE, Integer.class, Long.TYPE, Long.class, Short.TYPE, Short.class, }; /** * Get the wrapper class for the given primitive type. * * @param type Primitive class. * @return Wrapper class for primitive. * * @exception IllegalArgumentException Type is not a primitive class */ public static Class getPrimitiveWrapper(final Class type) { if (!type.isPrimitive()) { throw new IllegalArgumentException("type is not a primitive class"); } for (int i = 0; i < PRIMITIVE_WRAPPER_MAP.length; i += 2) { if (type.equals(PRIMITIVE_WRAPPER_MAP[i])) return PRIMITIVE_WRAPPER_MAP[i + 1]; } // should never get here, if we do then PRIMITIVE_WRAPPER_MAP // needs to be updated to include the missing mapping throw new UnreachableStatementException(); } /** * Populates a list with all the interfaces implemented by the argument * class c and all its superclasses. * * @param allIfaces - the list to populate with the interfaces * @param c - the class to start scanning for interfaces */ public static void getAllInterfaces(List allIfaces, Class c) { while (c != null) { Class[] ifaces = c.getInterfaces(); for (int n = 0; n < ifaces.length; n ++) { allIfaces.add(ifaces[n]); } c = c.getSuperclass(); } } /** * Returns an array containing all the unique interfaces implemented * by the argument class c and all its superclasses. Interfaces that * appear multiple times through inheritence are only accounted for once. * * @param c - the class to start scanning for interfaces * @return the interfaces */ public static Class[] getAllUniqueInterfaces(Class c) { Set uniqueIfaces = new HashSet(); while (c != null ) { Class[] ifaces = c.getInterfaces(); for (int n = 0; n < ifaces.length; n ++) { uniqueIfaces.add(ifaces[n]); } c = c.getSuperclass(); } return (Class[])uniqueIfaces.toArray(new Class[uniqueIfaces.size()]); } /** * Check if the given class is a primitive wrapper class. * * @param type Class to check. * @return True if the class is a primitive wrapper. */ public static boolean isPrimitiveWrapper(final Class type) { for (int i = 0; i < PRIMITIVE_WRAPPER_MAP.length; i += 2) { if (type.equals(PRIMITIVE_WRAPPER_MAP[i + 1])) { return true; } } return false; } /** * Check if the given class is a primitive class or a primitive * wrapper class. * * @param type Class to check. * @return True if the class is a primitive or primitive wrapper. */ public static boolean isPrimitive(final Class type) { if (type.isPrimitive() || isPrimitiveWrapper(type)) { return true; } return false; } /** Check type against boolean, byte, char, short, int, long, float, double. * @param type The java type name * @return true if this is a primative type name. */ public static boolean isPrimitive(final String type) { return PRIMITIVE_NAME_TYPE_MAP.containsKey(type); } /** * @param wrapper a primitive wrapper type * @return primitive type the passed in wrapper type corresponds to */ public static Class getPrimitive(Class wrapper) { Class primitive; if(Integer.class == wrapper) { primitive = int.class; } else if(Long.class == wrapper) { primitive = long.class; } else if(Double.class == wrapper) { primitive = double.class; } else if(Boolean.class == wrapper) { primitive = boolean.class; } else if(Short.class == wrapper) { primitive = short.class; } else if(Float.class == wrapper) { primitive = float.class; } else if(Byte.class == wrapper) { primitive = byte.class; } else if(Character.class == wrapper) { primitive = char.class; } else { throw new IllegalArgumentException("The class is not a primitive wrapper type: " + wrapper); } return primitive; } /** * Instantiate a java class object * * @param expected the expected class type * @param property the system property defining the class * @param defaultClassName the default class name * @return the instantiated object */ public static Object instantiate(Class expected, String property, String defaultClassName) { String className = getProperty(property, defaultClassName); Class clazz = null; try { clazz = loadClass(className); } catch (ClassNotFoundException e) { throw new NestedRuntimeException("Cannot load class " + className, e); } Object result = null; try { result = clazz.newInstance(); } catch (InstantiationException e) { throw new NestedRuntimeException("Error instantiating " + className, e); } catch (IllegalAccessException e) { throw new NestedRuntimeException("Error instantiating " + className, e); } if (expected.isAssignableFrom(clazz) == false) throw new NestedRuntimeException("Class " + className + " from classloader " + clazz.getClassLoader() + " is not of the expected class " + expected + " loaded from " + expected.getClassLoader()); return result; } ///////////////////////////////////////////////////////////////////////// // Class Loading // ///////////////////////////////////////////////////////////////////////// /** * This method acts equivalently to invoking * <code>Thread.currentThread().getContextClassLoader().loadClass(className);</code> but it also * supports primitive types and array classes of object types or primitive types. * * @param className the qualified name of the class or the name of primitive type or * array in the same format as returned by the * <code>java.lang.Class.getName()</code> method. * @return the Class object for the requested className * * @throws ClassNotFoundException when the <code>classLoader</code> can not find the requested class */ public static Class loadClass(String className) throws ClassNotFoundException { return loadClass(className, Thread.currentThread().getContextClassLoader()); } /** * This method acts equivalently to invoking classLoader.loadClass(className) * but it also supports primitive types and array classes of object types or * primitive types. * * @param className the qualified name of the class or the name of primitive * type or array in the same format as returned by the * java.lang.Class.getName() method. * @param classLoader the ClassLoader used to load classes * @return the Class object for the requested className * * @throws ClassNotFoundException when the <code>classLoader</code> can not * find the requested class */ public static Class loadClass(String className, ClassLoader classLoader) throws ClassNotFoundException { // ClassLoader.loadClass() does not handle primitive types: // // B byte // C char // D double // F float // I int // J long // S short // Z boolean // V void // if (className.length() == 1) { char type = className.charAt(0); if (type == 'B') return Byte.TYPE; if (type == 'C') return Character.TYPE; if (type == 'D') return Double.TYPE; if (type == 'F') return Float.TYPE; if (type == 'I') return Integer.TYPE; if (type == 'J') return Long.TYPE; if (type == 'S') return Short.TYPE; if (type == 'Z') return Boolean.TYPE; if (type == 'V') return Void.TYPE; // else throw... throw new ClassNotFoundException(className); } // Check for a primative type if( isPrimitive(className) == true ) return (Class) Classes.PRIMITIVE_NAME_TYPE_MAP.get(className); // Check for the internal vm format: Lclassname; if (className.charAt(0) == 'L' && className.charAt(className.length() - 1) == ';') return classLoader.loadClass(className.substring(1, className.length() - 1)); // first try - be optimistic // this will succeed for all non-array classes and array classes that have already been resolved // try { return classLoader.loadClass(className); } catch (ClassNotFoundException e) { // if it was non-array class then throw it if (className.charAt(0) != '[') throw e; } // we are now resolving array class for the first time // count opening braces int arrayDimension = 0; while (className.charAt(arrayDimension) == '[') arrayDimension++; // resolve component type - use recursion so that we can resolve primitive types also Class componentType = loadClass(className.substring(arrayDimension), classLoader); // construct array class return Array.newInstance(componentType, new int[arrayDimension]).getClass(); } /** * Convert a list of Strings from an Interator into an array of * Classes (the Strings are taken as classnames). * * @param it A java.util.Iterator pointing to a Collection of Strings * @param cl The ClassLoader to use * * @return Array of Classes * * @throws ClassNotFoundException When a class could not be loaded from * the specified ClassLoader */ public final static Class<?>[] convertToJavaClasses(Iterator<String> it, ClassLoader cl) throws ClassNotFoundException { ArrayList<Class<?>> classes = new ArrayList<Class<?>>(); while (it.hasNext()) { classes.add(convertToJavaClass(it.next(), cl)); } return classes.toArray(new Class[classes.size()]); } /** * Returns attribute's getter method. If the method not found then NoSuchMethodException will be thrown. * @param cls the class the attribute belongs too * @param attr the attribute's name * @return attribute's getter method * @throws NoSuchMethodException if the getter was not found */ public final static Method getAttributeGetter(Class cls, String attr) throws NoSuchMethodException { StringBuffer buf = new StringBuffer(attr.length() + 3); buf.append("get"); if(Character.isLowerCase(attr.charAt(0))) { buf.append(Character.toUpperCase(attr.charAt(0))) .append(attr.substring(1)); } else { buf.append(attr); } try { return cls.getMethod(buf.toString(), (Class[]) null); } catch (NoSuchMethodException e) { buf.replace(0, 3, "is"); return cls.getMethod(buf.toString(), (Class[]) null); } } /** * Returns attribute's setter method. If the method not found then NoSuchMethodException will be thrown. * @param cls the class the attribute belongs to * @param attr the attribute's name * @param type the attribute's type * @return attribute's setter method * @throws NoSuchMethodException if the setter was not found */ public final static Method getAttributeSetter(Class cls, String attr, Class type) throws NoSuchMethodException { StringBuffer buf = new StringBuffer(attr.length() + 3); buf.append("set"); if(Character.isLowerCase(attr.charAt(0))) { buf.append(Character.toUpperCase(attr.charAt(0))) .append(attr.substring(1)); } else { buf.append(attr); } return cls.getMethod(buf.toString(), new Class[]{type}); } /** * Convert a given String into the appropriate Class. * * @param name Name of class * @param cl ClassLoader to use * * @return The class for the given name * * @throws ClassNotFoundException When the class could not be found by * the specified ClassLoader */ private final static Class convertToJavaClass(String name, ClassLoader cl) throws ClassNotFoundException { int arraySize = 0; while (name.endsWith("[]")) { name = name.substring(0, name.length() - 2); arraySize++; } // Check for a primitive type Class c = (Class) PRIMITIVE_NAME_TYPE_MAP.get(name); if (c == null) { // No primitive, try to load it from the given ClassLoader try { c = cl.loadClass(name); } catch (ClassNotFoundException cnfe) { throw new ClassNotFoundException("Parameter class not found: " + name); } } // if we have an array get the array class if (arraySize > 0) { int[] dims = new int[arraySize]; for (int i = 0; i < arraySize; i++) { dims[i] = 1; } c = Array.newInstance(c, dims).getClass(); } return c; } /** * Get a system property * * @param name the property name * @param defaultValue the default value */ private static String getProperty(final String name, final String defaultValue) { return (String) AccessController.doPrivileged( new PrivilegedAction() { public Object run() { return System.getProperty(name, defaultValue); } }); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.heron.packing.roundrobin; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.junit.Assert; import org.junit.Test; import org.apache.heron.api.generated.TopologyAPI; import org.apache.heron.api.utils.TopologyUtils; import org.apache.heron.common.basics.ByteAmount; import org.apache.heron.packing.AssertPacking; import org.apache.heron.packing.CommonPackingTests; import org.apache.heron.packing.utils.PackingUtils; import org.apache.heron.spi.packing.IPacking; import org.apache.heron.spi.packing.IRepacking; import org.apache.heron.spi.packing.PackingException; import org.apache.heron.spi.packing.PackingPlan; import org.apache.heron.spi.packing.Resource; public class ResourceCompliantRRPackingTest extends CommonPackingTests { @Override protected IPacking getPackingImpl() { return new ResourceCompliantRRPacking(); } @Override protected IRepacking getRepackingImpl() { return new ResourceCompliantRRPacking(); } private int countComponent(String component, Set<PackingPlan.InstancePlan> instances) { int count = 0; for (PackingPlan.InstancePlan instancePlan : instances) { if (component.equals(instancePlan.getComponentName())) { count++; } } return count; } @Test(expected = PackingException.class) public void testFailureInsufficientContainerRamRequested() throws Exception { topologyConfig.setContainerRamRequested(ByteAmount.ZERO); pack(getTopology(spoutParallelism, boltParallelism, topologyConfig)); } /** * Test the scenario where the max container size is the default */ @Test public void testDefaultResources() throws Exception { int numContainers = 2; PackingPlan packingPlanNoExplicitResourcesConfig = pack(topology); Assert.assertEquals(numContainers, packingPlanNoExplicitResourcesConfig.getContainers().size()); Assert.assertEquals(totalInstances, packingPlanNoExplicitResourcesConfig.getInstanceCount()); } /** * Test the scenario where the max container size is the default and padding is configured */ @Test public void testDefaultContainerSizeWithPadding() throws Exception { int numContainers = 2; int padding = 50; topologyConfig.setContainerPaddingPercentage(padding); TopologyAPI.Topology newTopology = getTopology(spoutParallelism, boltParallelism, topologyConfig); PackingPlan packingPlan = pack(newTopology); Assert.assertEquals(numContainers, packingPlan.getContainers().size()); Assert.assertEquals(totalInstances, packingPlan.getInstanceCount()); } /** * Test the scenario where container level resource config are set */ @Test public void testContainerRequestedResourcesSingleContainer() throws Exception { int numContainers = 1; // Set up the topology and its config topologyConfig.put(org.apache.heron.api.Config.TOPOLOGY_STMGRS, numContainers); // Explicit set resources for container ByteAmount containerRam = ByteAmount.fromGigabytes(10); ByteAmount containerDisk = ByteAmount.fromGigabytes(20); double containerCpu = 30; topologyConfig.setContainerRamRequested(containerRam); topologyConfig.setContainerDiskRequested(containerDisk); topologyConfig.setContainerCpuRequested(containerCpu); TopologyAPI.Topology topologyExplicitResourcesConfig = getTopology(spoutParallelism, boltParallelism, topologyConfig); PackingPlan packingPlanExplicitResourcesConfig = pack(topologyExplicitResourcesConfig); Assert.assertEquals(numContainers, packingPlanExplicitResourcesConfig.getContainers().size()); Assert.assertEquals(totalInstances, packingPlanExplicitResourcesConfig.getInstanceCount()); for (PackingPlan.ContainerPlan containerPlan : packingPlanExplicitResourcesConfig.getContainers()) { Assert.assertEquals(Math.round(PackingUtils.increaseBy(totalInstances * instanceDefaultResources.getCpu(), DEFAULT_CONTAINER_PADDING)), (long) containerPlan.getRequiredResource().getCpu()); Assert.assertEquals(instanceDefaultResources.getRam() .multiply(totalInstances) .increaseBy(DEFAULT_CONTAINER_PADDING), containerPlan.getRequiredResource().getRam()); Assert.assertEquals(instanceDefaultResources.getDisk() .multiply(totalInstances) .increaseBy(DEFAULT_CONTAINER_PADDING), containerPlan.getRequiredResource().getDisk()); // All instances' resource requirement should be equal // So the size of set should be 1 Set<Resource> resources = new HashSet<>(); for (PackingPlan.InstancePlan instancePlan : containerPlan.getInstances()) { resources.add(instancePlan.getResource()); } Assert.assertEquals(1, resources.size()); Assert.assertEquals(instanceDefaultResources.getRam(), resources.iterator().next().getRam()); } } @Test public void testContainersRequestedExceedsInstanceCount() throws Exception { doTestContainerCountRequested(8, 7); // each of the 7 instances will get their own container } /** * Test the scenario RAM map config is partially set */ @Test public void testCompleteRamMapRequested() throws Exception { int numContainers = 2; // Explicit set resources for container // the value should be ignored, since we set the complete component RAM map ByteAmount containerRam = ByteAmount.fromGigabytes(Long.MAX_VALUE); // Explicit set component RAM map ByteAmount boltRam = ByteAmount.fromGigabytes(1); topologyConfig.setContainerRamRequested(containerRam); topologyConfig.setComponentRam(BOLT_NAME, boltRam); TopologyAPI.Topology topologyExplicitRamMap = getTopology(spoutParallelism, boltParallelism, topologyConfig); PackingPlan packingPlanExplicitRamMap = pack(topologyExplicitRamMap); Assert.assertEquals(totalInstances, packingPlanExplicitRamMap.getInstanceCount()); Assert.assertEquals(numContainers, packingPlanExplicitRamMap.getContainers().size()); AssertPacking.assertContainers(packingPlanExplicitRamMap.getContainers(), BOLT_NAME, SPOUT_NAME, boltRam, instanceDefaultResources.getRam(), containerRam); } /** * Test the scenario RAM map config is fully set */ @Test public void testPartialRamMap() throws Exception { int numContainers = 2; // Explicit set resources for container ByteAmount containerRam = ByteAmount.fromGigabytes(10); // Explicit set component RAM map ByteAmount boltRam = ByteAmount.fromGigabytes(1); ByteAmount spoutRam = ByteAmount.fromGigabytes(2); topologyConfig.setContainerRamRequested(containerRam); topologyConfig.setComponentRam(BOLT_NAME, boltRam); topologyConfig.setComponentRam(SPOUT_NAME, spoutRam); TopologyAPI.Topology topologyExplicitRamMap = getTopology(spoutParallelism, boltParallelism, topologyConfig); PackingPlan packingPlanExplicitRamMap = pack(topologyExplicitRamMap); Assert.assertEquals(totalInstances, packingPlanExplicitRamMap.getInstanceCount()); Assert.assertEquals(numContainers, packingPlanExplicitRamMap.getContainers().size()); AssertPacking.assertContainers(packingPlanExplicitRamMap.getContainers(), BOLT_NAME, SPOUT_NAME, boltRam, spoutRam, containerRam); } /** * Test the scenario where the user defined number of containers is not sufficient. */ @Test public void testInsufficientContainersWithOneAdjustment() throws Exception { int numContainers = 1; // Set up the topology and its config topologyConfig.put(org.apache.heron.api.Config.TOPOLOGY_STMGRS, numContainers); // Explicit set resources for container ByteAmount containerRam = ByteAmount.fromGigabytes(2); topologyConfig.setContainerRamRequested(containerRam); TopologyAPI.Topology newTopology = getTopology(spoutParallelism, boltParallelism, topologyConfig); PackingPlan packingPlan = pack(newTopology); Assert.assertEquals(7, packingPlan.getContainers().size()); Assert.assertEquals(totalInstances, packingPlan.getInstanceCount()); } /** * Test the scenario where the user defined number of containers is not sufficient. */ @Test public void testInsufficientContainersWithMultipleAdjustments() throws Exception { int numContainers = 1; // Set up the topology and its config topologyConfig.put(org.apache.heron.api.Config.TOPOLOGY_STMGRS, numContainers); // Explicit set resources for container ByteAmount containerRam = ByteAmount.fromGigabytes(3); // Explicit set component RAM map ByteAmount boltRam = ByteAmount.fromGigabytes(1); ByteAmount spoutRam = ByteAmount.fromGigabytes(2); topologyConfig.setContainerRamRequested(containerRam); topologyConfig.setComponentRam(BOLT_NAME, boltRam); topologyConfig.setComponentRam(SPOUT_NAME, spoutRam); TopologyAPI.Topology topologyExplicitRamMap = getTopology(spoutParallelism, boltParallelism, topologyConfig); PackingPlan packingPlan = pack(topologyExplicitRamMap); Assert.assertEquals(7, packingPlan.getContainers().size()); Assert.assertEquals(totalInstances, packingPlan.getInstanceCount()); } /** * test even packing of instances */ @Test public void testEvenPacking() throws Exception { int numContainers = 2; int componentParallelism = 4; // Set up the topology and its config topologyConfig.put(org.apache.heron.api.Config.TOPOLOGY_STMGRS, numContainers); TopologyAPI.Topology newTopology = getTopology(componentParallelism, componentParallelism, topologyConfig); int numInstance = TopologyUtils.getTotalInstance(newTopology); // Two components Assert.assertEquals(2 * componentParallelism, numInstance); PackingPlan output = pack(newTopology); Assert.assertEquals(numContainers, output.getContainers().size()); Assert.assertEquals((Integer) numInstance, output.getInstanceCount()); for (PackingPlan.ContainerPlan container : output.getContainers()) { Assert.assertEquals(numInstance / numContainers, container.getInstances().size()); Assert.assertEquals(2, countComponent("spout", container.getInstances())); Assert.assertEquals(2, countComponent("bolt", container.getInstances())); } } /** * Test the scenario where the max container size is the default * and scaling is requested. */ @Test public void testDefaultContainerSizeRepack() throws Exception { int numScalingInstances = 5; Map<String, Integer> componentChanges = new HashMap<>(); componentChanges.put(BOLT_NAME, numScalingInstances); int numContainersBeforeRepack = 2; PackingPlan newPackingPlan = doDefaultScalingTest(componentChanges, numContainersBeforeRepack); Assert.assertEquals(4, newPackingPlan.getContainers().size()); Assert.assertEquals((Integer) (totalInstances + numScalingInstances), newPackingPlan.getInstanceCount()); AssertPacking.assertContainers(newPackingPlan.getContainers(), BOLT_NAME, SPOUT_NAME, instanceDefaultResources.getRam(), instanceDefaultResources.getRam(), null); for (PackingPlan.ContainerPlan containerPlan : newPackingPlan.getContainers()) { Assert.assertEquals(Math.round(PackingUtils.increaseBy( containerPlan.getInstances().size() * instanceDefaultResources.getCpu(), DEFAULT_CONTAINER_PADDING)), (long) containerPlan.getRequiredResource().getCpu()); Assert.assertEquals(instanceDefaultResources.getRam() .multiply(containerPlan.getInstances().size()) .increaseBy(DEFAULT_CONTAINER_PADDING), containerPlan.getRequiredResource().getRam()); Assert.assertEquals(instanceDefaultResources.getDisk() .multiply(containerPlan.getInstances().size()) .increaseBy(DEFAULT_CONTAINER_PADDING), containerPlan.getRequiredResource().getDisk()); } } /** * Test the scenario RAM map config is partially set and scaling is requested */ @Test public void testRepackPadding() throws Exception { int paddingPercentage = 50; topologyConfig.setContainerPaddingPercentage(paddingPercentage); // Explicit set component RAM map ByteAmount boltRam = ByteAmount.fromGigabytes(4); ByteAmount maxContainerRam = ByteAmount.fromGigabytes(10); topologyConfig.setComponentRam(BOLT_NAME, boltRam); topologyConfig.setContainerRamRequested(maxContainerRam); TopologyAPI.Topology topologyExplicitRamMap = getTopology(spoutParallelism, boltParallelism, topologyConfig); int numScalingInstances = 3; Map<String, Integer> componentChanges = new HashMap<>(); componentChanges.put(BOLT_NAME, numScalingInstances); int numContainersBeforeRepack = 3; PackingPlan newPackingPlan = doScalingTest(topologyExplicitRamMap, componentChanges, boltRam, boltParallelism, instanceDefaultResources.getRam(), spoutParallelism, numContainersBeforeRepack, totalInstances); Assert.assertEquals(6, newPackingPlan.getContainers().size()); Assert.assertEquals((Integer) (totalInstances + numScalingInstances), newPackingPlan.getInstanceCount()); AssertPacking.assertContainers(newPackingPlan.getContainers(), BOLT_NAME, SPOUT_NAME, boltRam, instanceDefaultResources.getRam(), null); for (PackingPlan.ContainerPlan containerPlan : newPackingPlan.getContainers()) { //Each container either contains a single bolt or 1 bolt and 2 spouts or 1 bolt and 1 spout if (containerPlan.getInstances().size() == 1) { Assert.assertEquals(boltRam.increaseBy(paddingPercentage), containerPlan.getRequiredResource().getRam()); } if (containerPlan.getInstances().size() == 2) { Assert.assertEquals(boltRam.plus(instanceDefaultResources.getRam()) .increaseBy(paddingPercentage), containerPlan.getRequiredResource().getRam()); } if (containerPlan.getInstances().size() == 3) { Assert.assertEquals(boltRam.plus(instanceDefaultResources.getRam().multiply(2)) .increaseBy(paddingPercentage), containerPlan.getRequiredResource().getRam()); } } } /** * Test the scenario RAM map config is partially set and scaling is requested */ @Test public void testPartialRamMapScaling() throws Exception { // Explicit set resources for container ByteAmount maxContainerRam = ByteAmount.fromGigabytes(10); // Explicit set component RAM map ByteAmount boltRam = ByteAmount.fromGigabytes(4); topologyConfig.setContainerRamRequested(maxContainerRam); topologyConfig.setComponentRam(BOLT_NAME, boltRam); TopologyAPI.Topology topologyExplicitRamMap = getTopology(spoutParallelism, boltParallelism, topologyConfig); int numScalingInstances = 3; Map<String, Integer> componentChanges = new HashMap<>(); componentChanges.put(BOLT_NAME, numScalingInstances); int numContainersBeforeRepack = 3; PackingPlan newPackingPlan = doScalingTest(topologyExplicitRamMap, componentChanges, boltRam, boltParallelism, instanceDefaultResources.getRam(), spoutParallelism, numContainersBeforeRepack, totalInstances); Assert.assertEquals(6, newPackingPlan.getContainers().size()); Assert.assertEquals((Integer) (totalInstances + numScalingInstances), newPackingPlan.getInstanceCount()); AssertPacking.assertContainers(newPackingPlan.getContainers(), BOLT_NAME, SPOUT_NAME, boltRam, instanceDefaultResources.getRam(), null); } /** * Test the scenario where the scaling down is requested */ @Test public void testScaleDown() throws Exception { int spoutScalingDown = -2; int boltScalingDown = -1; Map<String, Integer> componentChanges = new HashMap<>(); componentChanges.put(SPOUT_NAME, spoutScalingDown); //leave 2 spouts componentChanges.put(BOLT_NAME, boltScalingDown); //leave 2 bolts int numContainersBeforeRepack = 2; PackingPlan newPackingPlan = doDefaultScalingTest(componentChanges, numContainersBeforeRepack); Assert.assertEquals(1, newPackingPlan.getContainers().size()); Assert.assertEquals((Integer) (totalInstances + spoutScalingDown + boltScalingDown), newPackingPlan.getInstanceCount()); AssertPacking.assertNumInstances(newPackingPlan.getContainers(), BOLT_NAME, 2); AssertPacking.assertNumInstances(newPackingPlan.getContainers(), SPOUT_NAME, 2); } /** * Test the scenario where the scaling down is requested and the first container is removed */ @Test public void removeFirstContainer() throws Exception { int spoutScalingDown = -3; int boltScalingDown = -3; /* The packing plan consists of two containers. The first one contains 2 spouts and 2 bolts the second one contains 2 spouts and 1 bolt. During scaling we remove 3 spouts and 3 bolts and thus the first container is removed. */ Map<String, Integer> componentChanges = new HashMap<>(); componentChanges.put(SPOUT_NAME, spoutScalingDown); //leave 1 spout componentChanges.put(BOLT_NAME, boltScalingDown); //leave 1 bolt int numContainersBeforeRepack = 2; PackingPlan newPackingPlan = doDefaultScalingTest(componentChanges, numContainersBeforeRepack); Assert.assertEquals(1, newPackingPlan.getContainers().size()); Assert.assertEquals((Integer) (totalInstances + spoutScalingDown + boltScalingDown), newPackingPlan.getInstanceCount()); AssertPacking.assertNumInstances(newPackingPlan.getContainers(), BOLT_NAME, 0); AssertPacking.assertNumInstances(newPackingPlan.getContainers(), SPOUT_NAME, 1); } /** * Test the scenario where scaling down and up is simultaneously requested and padding is * configured */ @Test public void scaleDownAndUpWithExtraPadding() throws Exception { int paddingPercentage = 50; int numContainers = 1; topologyConfig.setContainerPaddingPercentage(paddingPercentage); // Explicit set resources for container ByteAmount maxContainerRam = ByteAmount.fromGigabytes(12); // Explicit set component RAM map ByteAmount spoutRam = ByteAmount.fromGigabytes(2); topologyConfig.setContainerRamRequested(maxContainerRam); topologyConfig.setComponentRam(SPOUT_NAME, spoutRam); topologyConfig.setNumStmgrs(numContainers); int noBolts = 2; int noSpouts = 1; TopologyAPI.Topology topologyExplicitRamMap = getTopology(noSpouts, noBolts, topologyConfig); int spoutScalingUp = 1; int boltScalingDown = -2; Map<String, Integer> componentChanges = new HashMap<>(); componentChanges.put(SPOUT_NAME, spoutScalingUp); // 2 spouts componentChanges.put(BOLT_NAME, boltScalingDown); // 0 bolts int numContainersBeforeRepack = 1; PackingPlan newPackingPlan = doScalingTest(topologyExplicitRamMap, componentChanges, instanceDefaultResources.getRam(), noBolts, spoutRam, noSpouts, numContainersBeforeRepack, noSpouts + noBolts); Assert.assertEquals(1, newPackingPlan.getContainers().size()); Assert.assertEquals((Integer) (noSpouts + noBolts + spoutScalingUp + boltScalingDown), newPackingPlan.getInstanceCount()); AssertPacking.assertNumInstances(newPackingPlan.getContainers(), BOLT_NAME, noBolts + boltScalingDown); AssertPacking.assertNumInstances(newPackingPlan.getContainers(), SPOUT_NAME, noSpouts + spoutScalingUp); } /** * Test the scenario where scaling down and up is simultaneously requested and padding is * configured */ @Test public void scaleDownAndUpNoPadding() throws Exception { int paddingPercentage = 0; int numContainers = 1; topologyConfig.setContainerPaddingPercentage(paddingPercentage); // Explicit set resources for container ByteAmount maxContainerRam = ByteAmount.fromGigabytes(12); // Explicit set component RAM map ByteAmount spoutRam = ByteAmount.fromGigabytes(4); topologyConfig.setContainerRamRequested(maxContainerRam); topologyConfig.setComponentRam(SPOUT_NAME, spoutRam); topologyConfig.setNumStmgrs(numContainers); int noBolts = 3; int noSpouts = 1; TopologyAPI.Topology topologyExplicitRamMap = getTopology(noSpouts, noBolts, topologyConfig); int spoutScalingUp = 1; int boltScalingDown = -1; Map<String, Integer> componentChanges = new HashMap<>(); componentChanges.put(SPOUT_NAME, spoutScalingUp); // 2 spouts componentChanges.put(BOLT_NAME, boltScalingDown); // 2 bolts int numContainersBeforeRepack = 1; PackingPlan newPackingPlan = doScalingTest(topologyExplicitRamMap, componentChanges, instanceDefaultResources.getRam(), noBolts, spoutRam, noSpouts, numContainersBeforeRepack, noSpouts + noBolts); Assert.assertEquals(2, newPackingPlan.getContainers().size()); Assert.assertEquals((Integer) (noSpouts + noBolts + spoutScalingUp + boltScalingDown), newPackingPlan.getInstanceCount()); AssertPacking.assertNumInstances(newPackingPlan.getContainers(), BOLT_NAME, noBolts + boltScalingDown); AssertPacking.assertNumInstances(newPackingPlan.getContainers(), SPOUT_NAME, noSpouts + spoutScalingUp); } @Test public void scaleUpMultiple() throws Exception { int spoutScalingUp = 4; int boltScalingUp = 4; Map<String, Integer> componentChanges = new HashMap<>(); componentChanges.put(SPOUT_NAME, spoutScalingUp); // 8 spouts componentChanges.put(BOLT_NAME, boltScalingUp); // 8 bolts int numContainersBeforeRepack = 2; PackingPlan newPackingPlan = doDefaultScalingTest(componentChanges, numContainersBeforeRepack); Assert.assertEquals(4, newPackingPlan.getContainers().size()); Assert.assertEquals((Integer) (totalInstances + spoutScalingUp + boltScalingUp), newPackingPlan.getInstanceCount()); AssertPacking.assertNumInstances(newPackingPlan.getContainers(), BOLT_NAME, boltParallelism + boltScalingUp); AssertPacking.assertNumInstances(newPackingPlan.getContainers(), SPOUT_NAME, spoutParallelism + spoutScalingUp); } }
/* * (c) Samy Chambi, Daniel Lemire. * */ /** * Benchmarks on real data. */ import it.uniroma3.mat.extendedset.intset.ConciseSet; import it.uniroma3.mat.extendedset.intset.ImmutableConciseSet; import it.uniroma3.mat.extendedset.intset.IntSet.IntIterator; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; import java.util.ArrayList; import net.sourceforge.sizeof.SizeOf; import org.roaringbitmap.buffer.BufferFastAggregation; import org.roaringbitmap.buffer.ImmutableRoaringBitmap; import org.roaringbitmap.RoaringBitmap; public class Benchmark { private static final int nbRepetitions = 100; private static final long warmup_ms = 100L; private static int careof = 0; private static ImmutableRoaringBitmap[] irbs = null; private static ArrayList<ImmutableConciseSet> icss = null; private static ImmutableRoaringBitmap irb = null; private static ImmutableConciseSet ics = null; public static void main(String[] args) { boolean sizeOf = true; int NumberOfBitmaps = 200; try { SizeOf.setMinSizeToLog(0); SizeOf.skipStaticField(true); SizeOf.deepSizeOf(args); } catch (IllegalStateException e) { sizeOf = false; System.out .println("# disabling sizeOf, run -javaagent:lib/SizeOf.jar or equiv. to enable"); } try { String dataSources[] = { "census1881.csv", "census1881.csv", "census-income.csv", "census-income.csv", "weather_sept_85.csv", "weather_sept_85.csv" }; RealDataRetriever dataRetriever = new RealDataRetriever(args.length == 0 ? "real-roaring-datasets" : args[0]); int[][] datum = new int[NumberOfBitmaps][]; for (int i = 0; i < dataSources.length; i++) { String dataSet = dataSources[i]; // ************ Roaring part **************** { File file = File.createTempFile("roarings", "bin"); file.deleteOnExit(); final FileOutputStream fos = new FileOutputStream(file); final DataOutputStream dos = new DataOutputStream(fos); ArrayList<Long> offsets = new ArrayList<Long>(); // Building NumberOfBitmaps RoaringBitmaps long bef = System.currentTimeMillis(); for (int j = 0; j < NumberOfBitmaps; j++) { int[] data = dataRetriever .fetchBitPositions(dataSet, j); datum[j] = data.clone(); RoaringBitmap rb = RoaringBitmap.bitmapOf(data); rb.trim(); offsets.add(fos.getChannel().position()); rb.serialize(dos); dos.flush(); } offsets.add(fos.getChannel().position()); long aft = System.currentTimeMillis(); long serialisationTime = aft - bef; long lastOffset = fos.getChannel().position(); dos.close(); RandomAccessFile memoryMappedFile = new RandomAccessFile( file, "r"); MappedByteBuffer mbb = memoryMappedFile.getChannel().map( FileChannel.MapMode.READ_ONLY, 0, lastOffset); // RAM space used in bytes long sizeRAM = 0; irbs = new ImmutableRoaringBitmap[NumberOfBitmaps]; for (int k = 0; k < offsets.size() - 1; k++) { mbb.position((int) offsets.get(k).longValue()); final ByteBuffer bb = mbb.slice(); bb.limit((int) (offsets.get(k + 1) - offsets.get(k))); ImmutableRoaringBitmap irb = new ImmutableRoaringBitmap( bb); irbs[k] = irb; if (sizeOf) sizeRAM += (SizeOf.deepSizeOf(irb)); } // we redo the work, but just for the timing bef = System.currentTimeMillis(); for (int k = 0; k < offsets.size() - 1; k++) { mbb.position((int) offsets.get(k).longValue()); final ByteBuffer bb = mbb.slice(); bb.limit((int) (offsets.get(k + 1) - offsets.get(k))); ImmutableRoaringBitmap irb = new ImmutableRoaringBitmap( bb); irbs[k] = irb; } aft = System.currentTimeMillis(); long deserializationTime = aft - bef; //irbs = Arrays.copyOfRange(irbs, 0, offsets.size() - 1); // Disk space used in bytes long sizeDisk = file.length(); // Horizontal unions between NumberOfBitmaps Roaring bitmaps double horizUnionTime = test(new Launcher() { @Override public void launch() { irb = BufferFastAggregation.horizontal_or(irbs); careof += irb.getCardinality(); } }); // Intersections between NumberOfBitmaps Roaring bitmaps double intersectTime = test(new Launcher() { @Override public void launch() { irb = BufferFastAggregation.and(irbs); careof += irb.getCardinality(); } }); // Average time to retrieve set bits double scanTime = testScanRoaring(); System.out.println("***************************"); System.out.println("Roaring bitmap on " + dataSet + " dataset"); System.out.println("***************************"); System.out.println("Deserialization time: "+deserializationTime+" ms"); System.out.println("RAM Size = " + (sizeRAM*1. / 1024) + " Kb" + " (" + Math.round(sizeRAM * 1. / NumberOfBitmaps) + " bytes/bitmap)"); System.out.println("Disk Size = " + (sizeDisk*1. / 1024) + " Kb" + " (" + Math.round(sizeDisk * 1. / NumberOfBitmaps) + " bytes/bitmap)"); System.out.println("Horizontal unions time = " + horizUnionTime + " ms"); System.out.println("Intersections time = " + intersectTime + " ms"); System.out.println("Scans time = " + scanTime + " ms"); System.out.println(".ignore = " + careof); mbb = null; memoryMappedFile.close(); file.delete(); } // ***************** ConciseSet part // ********************************** { File file = File.createTempFile("conciseSets", "bin"); file.deleteOnExit(); final FileOutputStream fos = new FileOutputStream(file); final DataOutputStream dos = new DataOutputStream(fos); ArrayList<Long> offsets = new ArrayList<Long>(); // Building NumberOfBitmaps ConciseSets long bef = System.currentTimeMillis(); for (int j = 0; j < NumberOfBitmaps; j++) { ConciseSet cs = toConcise(datum[j]); offsets.add(fos.getChannel().position()); int[] ints = cs.getWords(); for (int k = 0; k < ints.length; k++) dos.writeInt(ints[k]); dos.flush(); } offsets.add(fos.getChannel().position()); long aft = System.currentTimeMillis(); long serialisationTime = aft - bef; long lastOffset = fos.getChannel().position(); dos.close(); // RAM storage in bytes long sizeRAM = 0; RandomAccessFile memoryMappedFile = new RandomAccessFile( file, "r"); MappedByteBuffer mbb = memoryMappedFile.getChannel().map( FileChannel.MapMode.READ_ONLY, 0, lastOffset); icss = new ArrayList<ImmutableConciseSet>(); for (int k = 0; k < offsets.size() - 1 ; k++) { mbb.position((int) offsets.get(k).longValue()); final ByteBuffer bb = mbb.slice(); bb.limit((int) (offsets.get(k + 1) - offsets.get(k))); ImmutableConciseSet ics = new ImmutableConciseSet(bb); icss.add(ics); } bef = System.currentTimeMillis(); icss = new ArrayList<ImmutableConciseSet>(NumberOfBitmaps); for (int k = 0; k < offsets.size() - 1 ; k++) { mbb.position((int) offsets.get(k).longValue()); final ByteBuffer bb = mbb.slice(); bb.limit((int) (offsets.get(k + 1) - offsets.get(k))); ImmutableConciseSet ics = new ImmutableConciseSet(bb); icss.add(ics); } aft = System.currentTimeMillis(); long deserializationTime = aft - bef; // Disk storage in bytes long sizeDisk = file.length(); // Average time to compute unions between NumberOfBitmaps // ConciseSets double unionTime = test(new Launcher() { @Override public void launch() { ics = ImmutableConciseSet.union(icss.iterator()); careof += ics.size(); } }); // Average time to compute intersects between // NumberOfBitmaps ConciseSets double intersectTime = test(new Launcher() { @Override public void launch() { ics = ImmutableConciseSet.intersection(icss .iterator()); careof += ics.size(); } }); // Average time to retrieve set bits double scanTime = testScanConcise(); System.out.println("***************************"); System.out.println("ConciseSet on " + dataSet + " dataset"); System.out.println("***************************"); System.out.println("Deserialization time: "+deserializationTime+" ms"); System.out.println("RAM Size = " + (sizeRAM * 1. / 1024) + " Kb" + " (" + Math.round(sizeRAM * 1. / NumberOfBitmaps) + " bytes/bitmap)"); System.out.println("Disk Size = " + (sizeDisk * 1. / 1024) + " Kb" + " (" + Math.round(sizeDisk * 1. / NumberOfBitmaps) + " bytes/bitmap)"); System.out.println("Unions time = " + unionTime + " ms"); System.out.println("Intersections time = " + intersectTime + " ms"); System.out.println("Scans time = " + scanTime + " ms"); System.out.println(".ignore = " + careof); mbb = null; memoryMappedFile.close(); file.delete(); } } } catch (IOException e) { e.printStackTrace(); } } static ConciseSet toConcise(int[] dat) { ConciseSet ans = new ConciseSet(); for (int i : dat) ans.add(i); return ans; } static double test(Launcher job) { long jobTime, begin, end; int i, repeat = 1; // Warming up the cache do { repeat *= 2;// potentially unsafe for very large integers begin = System.currentTimeMillis(); for (int r = 0; r < repeat; r++) { job.launch(); } end = System.currentTimeMillis(); jobTime = (end - begin); } while ((jobTime < warmup_ms) && (repeat < (1 << 24))); // We can start timings now begin = System.currentTimeMillis(); for (i = 0; i < nbRepetitions; ++i) { job.launch(); } end = System.currentTimeMillis(); jobTime = end - begin; return (double) (jobTime) / (double) (nbRepetitions); } static double testScanRoaring() { long scanTime, begin, end; int i, k, repeat = 1; org.roaringbitmap.IntIterator it; // Warming up the cache do { repeat *= 2; scanTime = 0; for (int r = 0; r < repeat; r++) { begin = System.currentTimeMillis(); for (k = 0; k < irbs.length; k++) { irb = irbs[k]; it = irb.getIntIterator(); while (it.hasNext()) { it.next(); } } end = System.currentTimeMillis(); scanTime += end - begin; } } while ((scanTime < warmup_ms) && (repeat < (1 << 24))); // We can start timings now scanTime = 0; for (i = 0; i < nbRepetitions; i++) { begin = System.currentTimeMillis(); for (k = 0; k < irbs.length; k++) { irb = irbs[k]; it = irb.getIntIterator(); while (it.hasNext()) { it.next(); } } end = System.currentTimeMillis(); scanTime += end - begin; } return scanTime * 1. / nbRepetitions; } static double testScanConcise() { long scanTime, begin, end; int i, k, repeat = 1; IntIterator it; // Warming up the cache do { repeat *= 2; scanTime = 0; for (int r = 0; r < repeat; r++) { begin = System.currentTimeMillis(); for (k = 0; k < icss.size(); k++) { ics = icss.get(k); it = ics.iterator(); while (it.hasNext()) { it.next(); } } end = System.currentTimeMillis(); scanTime += end - begin; } } while ((scanTime < warmup_ms) && (repeat < (1 << 24))); // We can start timings now scanTime = 0; begin = System.currentTimeMillis(); for (i = 0; i < nbRepetitions; i++) { for (k = 0; k < icss.size(); k++) { ics = icss.get(k); it = ics.iterator(); while (it.hasNext()) { it.next(); } } } end = System.currentTimeMillis(); scanTime = end - begin; return scanTime * 1. / nbRepetitions; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.metron.writer; import com.google.common.collect.Iterables; import org.apache.metron.common.Constants; import org.apache.metron.common.configuration.writer.WriterConfiguration; import org.apache.metron.common.error.MetronError; import org.apache.metron.common.message.MessageGetStrategy; import org.apache.metron.common.system.Clock; import org.apache.metron.common.utils.ErrorUtils; import org.apache.metron.common.writer.BulkMessageWriter; import org.apache.metron.common.writer.BulkWriterResponse; import org.apache.storm.task.OutputCollector; import org.apache.storm.tuple.Tuple; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; /** * This component implements message batching, with both flush on queue size, and flush on queue timeout. * There is a queue for each sensorType. * Ideally each queue would have its own timer, but we only have one global timer, the Tick Tuple * generated at fixed intervals by the system and received by the Bolt. Given this constraint, * we use the following strategy: * - The default batchTimeout is, as recommended by Storm, 1/2 the Storm 'topology.message.timeout.secs', * modified by batchTimeoutDivisor, in case multiple batching writers are daisy-chained in one topology. * - If some sensors configure their own batchTimeouts, they are compared with the default. Batch * timeouts greater than the default will be ignored, because they can cause message recycling in Storm. * Batch timeouts configured to <= zero, or undefined, mean use the default. * - The *smallest* configured batchTimeout among all sensor types, greater than zero and less than * the default, will be used to configure the 'topology.tick.tuple.freq.secs' for the Bolt. If there are no * valid configured batchTimeouts, the defaultBatchTimeout will be used. * - The age of the queue is checked every time a sensor message arrives. Thus, if at least one message * per second is received for a given sensor, that queue will flush on timeout or sooner, depending on batchSize. * - On each Tick Tuple received, *all* queues will be checked, and if any are older than their respective * batchTimeout, they will be flushed. Note that this does NOT guarantee timely flushing, depending on the * phase relationship between the queue's batchTimeout and the tick interval. The maximum age of a queue * before it flushes is its batchTimeout + the tick interval, which is guaranteed to be less than 2x the * batchTimeout, and also less than the 'topology.message.timeout.secs'. This guarantees that the messages * will not age out of the Storm topology, but it does not guarantee the flush interval requested, for * sensor types not receiving at least one message every second. * * @param <MESSAGE_T> */ public class BulkWriterComponent<MESSAGE_T> { public static final Logger LOG = LoggerFactory .getLogger(BulkWriterComponent.class); private Map<String, Collection<Tuple>> sensorTupleMap = new HashMap<>(); private Map<String, List<MESSAGE_T>> sensorMessageMap = new HashMap<>(); private Map<String, long[]> batchTimeoutMap = new HashMap<>(); private OutputCollector collector; //In test scenarios, defaultBatchTimeout may not be correctly initialized, so do it here. //This is a conservative defaultBatchTimeout for a vanilla bolt with batchTimeoutDivisor=2 public static final int UNINITIALIZED_DEFAULT_BATCH_TIMEOUT = 6; private int defaultBatchTimeout = UNINITIALIZED_DEFAULT_BATCH_TIMEOUT; private boolean handleCommit = true; private boolean handleError = true; private static final int LAST_CREATE_TIME_MS = 0; //index zero'th element of long[] in batchTimeoutMap private static final int TIMEOUT_MS = 1; //index next element of long[] in batchTimeoutMap private Clock clock = new Clock(); public BulkWriterComponent(OutputCollector collector) { this.collector = collector; } public BulkWriterComponent(OutputCollector collector, boolean handleCommit, boolean handleError) { this(collector); this.handleCommit = handleCommit; this.handleError = handleError; } /** * Used only for testing. Overrides the default (actual) wall clock. * @return this mutated BulkWriterComponent */ public BulkWriterComponent withClock(Clock clock) { this.clock = clock; return this; } public void commit(Iterable<Tuple> tuples) { tuples.forEach(t -> collector.ack(t)); if(LOG.isDebugEnabled()) { LOG.debug("Acking {} tuples", Iterables.size(tuples)); } } public void commit(BulkWriterResponse response) { commit(response.getSuccesses()); } public void error(String sensorType, Throwable e, Iterable<Tuple> tuples, MessageGetStrategy messageGetStrategy) { tuples.forEach(t -> collector.ack(t)); MetronError error = new MetronError() .withSensorType(sensorType) .withErrorType(Constants.ErrorType.INDEXING_ERROR) .withThrowable(e); if(!Iterables.isEmpty(tuples)) { LOG.error("Failing {} tuples", Iterables.size(tuples), e); } tuples.forEach(t -> error.addRawMessage(messageGetStrategy.get(t))); ErrorUtils.handleError(collector, error); } public void error(String sensorType, BulkWriterResponse errors, MessageGetStrategy messageGetStrategy) { Map<Throwable, Collection<Tuple>> errorMap = errors.getErrors(); for(Map.Entry<Throwable, Collection<Tuple>> entry : errorMap.entrySet()) { error(sensorType, entry.getKey(), entry.getValue(), messageGetStrategy); } } protected Collection<Tuple> createTupleCollection() { return new ArrayList<>(); } public void errorAll(Throwable e, MessageGetStrategy messageGetStrategy) { for(String key : new HashSet<>(sensorTupleMap.keySet())) { errorAll(key, e, messageGetStrategy); } } public void errorAll(String sensorType, Throwable e, MessageGetStrategy messageGetStrategy) { Collection<Tuple> tuples = Optional.ofNullable(sensorTupleMap.get(sensorType)).orElse(new ArrayList<>()); error(sensorType, e, tuples, messageGetStrategy); sensorTupleMap.remove(sensorType); sensorMessageMap.remove(sensorType); } public void write( String sensorType , Tuple tuple , MESSAGE_T message , BulkMessageWriter<MESSAGE_T> bulkMessageWriter , WriterConfiguration configurations , MessageGetStrategy messageGetStrategy ) throws Exception { if (!configurations.isEnabled(sensorType)) { collector.ack(tuple); return; } int batchSize = configurations.getBatchSize(sensorType); if (batchSize <= 1) { //simple case - no batching, no timeouts Collection<Tuple> tupleList = sensorTupleMap.get(sensorType); //still read in case batchSize changed if (tupleList == null) { tupleList = createTupleCollection(); } tupleList.add(tuple); List<MESSAGE_T> messageList = sensorMessageMap.get(sensorType); //still read in case batchSize changed if (messageList == null) { messageList = new ArrayList<>(); } messageList.add(message); flush(sensorType, bulkMessageWriter, configurations, messageGetStrategy, tupleList, messageList); return; } //Otherwise do the full batch buffering with timeouts long[] batchTimeoutInfo = batchTimeoutMap.get(sensorType); if (batchTimeoutInfo == null) { //lazily create the batchTimeoutInfo array, once per sensor. batchTimeoutInfo = new long[] {0L, 0L}; batchTimeoutMap.put(sensorType, batchTimeoutInfo); } Collection<Tuple> tupleList = sensorTupleMap.get(sensorType); if (tupleList == null) { //This block executes at the beginning of every batch, per sensor. tupleList = createTupleCollection(); sensorTupleMap.put(sensorType, tupleList); batchTimeoutInfo[LAST_CREATE_TIME_MS] = clock.currentTimeMillis(); //configurations can change, so (re)init getBatchTimeout(sensorType) at start of every batch int batchTimeoutSecs = configurations.getBatchTimeout(sensorType); if (batchTimeoutSecs <= 0 || batchTimeoutSecs > defaultBatchTimeout) { batchTimeoutSecs = defaultBatchTimeout; } batchTimeoutInfo[TIMEOUT_MS] = TimeUnit.SECONDS.toMillis(batchTimeoutSecs); } tupleList.add(tuple); List<MESSAGE_T> messageList = sensorMessageMap.get(sensorType); if (messageList == null) { messageList = new ArrayList<>(); sensorMessageMap.put(sensorType, messageList); } messageList.add(message); //Check for batchSize flush if (tupleList.size() >= batchSize) { flush(sensorType, bulkMessageWriter, configurations, messageGetStrategy, tupleList, messageList); return; } //Check for batchTimeout flush (if the tupleList isn't brand new). //Debugging note: If your queue always flushes at length==2 regardless of feed rate, //it may mean defaultBatchTimeout has somehow been set to zero. if (tupleList.size() > 1 && (clock.currentTimeMillis() - batchTimeoutInfo[LAST_CREATE_TIME_MS] >= batchTimeoutInfo[TIMEOUT_MS])) { flush(sensorType, bulkMessageWriter, configurations, messageGetStrategy, tupleList, messageList); return; } } private void flush( String sensorType , BulkMessageWriter<MESSAGE_T> bulkMessageWriter , WriterConfiguration configurations , MessageGetStrategy messageGetStrategy , Collection<Tuple> tupleList , List<MESSAGE_T> messageList ) throws Exception { long startTime = System.currentTimeMillis(); //no need to mock, so use real time try { BulkWriterResponse response = bulkMessageWriter.write(sensorType, configurations, tupleList, messageList); // Commit or error piecemeal. if(handleCommit) { commit(response); } if(handleError) { error(sensorType, response, messageGetStrategy); } else if (response.hasErrors()) { throw new IllegalStateException("Unhandled bulk errors in response: " + response.getErrors()); } } catch (Throwable e) { if(handleError) { error(sensorType, e, tupleList, messageGetStrategy); } else { throw e; } } finally { sensorTupleMap.remove(sensorType); sensorMessageMap.remove(sensorType); } long endTime = System.currentTimeMillis(); long elapsed = endTime - startTime; LOG.debug("Bulk batch for sensor {} completed in ~{} ns", sensorType, elapsed); } // Flushes all queues older than their batchTimeouts. public void flushTimeouts( BulkMessageWriter<MESSAGE_T> bulkMessageWriter , WriterConfiguration configurations , MessageGetStrategy messageGetStrategy ) throws Exception { // No need to do "all" sensorTypes here, just the ones that have data batched up. // Note queues with batchSize == 1 don't get batched, so they never persist in the sensorTupleMap. for (String sensorType : sensorTupleMap.keySet()) { long[] batchTimeoutInfo = batchTimeoutMap.get(sensorType); if (batchTimeoutInfo == null //Shouldn't happen, but conservatively flush if so || clock.currentTimeMillis() - batchTimeoutInfo[LAST_CREATE_TIME_MS] >= batchTimeoutInfo[TIMEOUT_MS]) { flush(sensorType, bulkMessageWriter, configurations, messageGetStrategy , sensorTupleMap.get(sensorType), sensorMessageMap.get(sensorType)); return; } } } /** * @param defaultBatchTimeout */ public void setDefaultBatchTimeout(int defaultBatchTimeout) { this.defaultBatchTimeout = defaultBatchTimeout; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner.optimizations; import com.facebook.presto.Session; import com.facebook.presto.index.IndexManager; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.ResolvedIndex; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.TupleDomain; import com.facebook.presto.sql.planner.DomainTranslator; import com.facebook.presto.sql.planner.PlanNodeIdAllocator; import com.facebook.presto.sql.planner.Symbol; import com.facebook.presto.sql.planner.SymbolAllocator; import com.facebook.presto.sql.planner.plan.AggregationNode; import com.facebook.presto.sql.planner.plan.FilterNode; import com.facebook.presto.sql.planner.plan.IndexJoinNode; import com.facebook.presto.sql.planner.plan.IndexSourceNode; import com.facebook.presto.sql.planner.plan.JoinNode; import com.facebook.presto.sql.planner.plan.PlanNode; import com.facebook.presto.sql.planner.plan.PlanRewriter; import com.facebook.presto.sql.planner.plan.PlanVisitor; import com.facebook.presto.sql.planner.plan.ProjectNode; import com.facebook.presto.sql.planner.plan.SortNode; import com.facebook.presto.sql.planner.plan.TableScanNode; import com.facebook.presto.sql.tree.BooleanLiteral; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.QualifiedNameReference; import com.google.common.base.Functions; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.jetbrains.annotations.NotNull; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import static com.facebook.presto.sql.ExpressionUtils.combineConjuncts; import static com.facebook.presto.sql.tree.BooleanLiteral.TRUE_LITERAL; import static com.facebook.presto.util.ImmutableCollectors.toImmutableSet; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Predicates.in; import static java.util.Objects.requireNonNull; public class IndexJoinOptimizer extends PlanOptimizer { private final IndexManager indexManager; private final Metadata metadata; public IndexJoinOptimizer(Metadata metadata, IndexManager indexManager) { this.metadata = requireNonNull(metadata, "metadata is null"); this.indexManager = requireNonNull(indexManager, "indexManager is null"); } @Override public PlanNode optimize(PlanNode plan, Session session, Map<Symbol, com.facebook.presto.spi.type.Type> types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator) { checkNotNull(plan, "plan is null"); checkNotNull(session, "session is null"); checkNotNull(types, "types is null"); checkNotNull(symbolAllocator, "symbolAllocator is null"); checkNotNull(idAllocator, "idAllocator is null"); return PlanRewriter.rewriteWith(new Rewriter(symbolAllocator, idAllocator, indexManager, metadata, session), plan, null); } private static class Rewriter extends PlanRewriter<Void> { private final IndexManager indexManager; private final SymbolAllocator symbolAllocator; private final PlanNodeIdAllocator idAllocator; private final Metadata metadata; private final Session session; private Rewriter(SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator, IndexManager indexManager, Metadata metadata, Session session) { this.symbolAllocator = requireNonNull(symbolAllocator, "symbolAllocator is null"); this.idAllocator = requireNonNull(idAllocator, "idAllocator is null"); this.indexManager = requireNonNull(indexManager, "indexManager is null"); this.metadata = requireNonNull(metadata, "metadata is null"); this.session = requireNonNull(session, "session is null"); } @Override public PlanNode visitJoin(JoinNode node, RewriteContext<Void> context) { PlanNode leftRewritten = context.rewrite(node.getLeft()); PlanNode rightRewritten = context.rewrite(node.getRight()); if (!node.getCriteria().isEmpty()) { // Index join only possible with JOIN criteria List<Symbol> leftJoinSymbols = Lists.transform(node.getCriteria(), JoinNode.EquiJoinClause::getLeft); List<Symbol> rightJoinSymbols = Lists.transform(node.getCriteria(), JoinNode.EquiJoinClause::getRight); Optional<PlanNode> leftIndexCandidate = IndexSourceRewriter.rewriteWithIndex( leftRewritten, ImmutableSet.copyOf(leftJoinSymbols), indexManager, symbolAllocator, idAllocator, metadata, session); if (leftIndexCandidate.isPresent()) { // Sanity check that we can trace the path for the index lookup key Map<Symbol, Symbol> trace = IndexKeyTracer.trace(leftIndexCandidate.get(), ImmutableSet.copyOf(leftJoinSymbols)); checkState(!trace.isEmpty() && leftJoinSymbols.containsAll(trace.keySet())); } Optional<PlanNode> rightIndexCandidate = IndexSourceRewriter.rewriteWithIndex( rightRewritten, ImmutableSet.copyOf(rightJoinSymbols), indexManager, symbolAllocator, idAllocator, metadata, session); if (rightIndexCandidate.isPresent()) { // Sanity check that we can trace the path for the index lookup key Map<Symbol, Symbol> trace = IndexKeyTracer.trace(rightIndexCandidate.get(), ImmutableSet.copyOf(rightJoinSymbols)); checkState(!trace.isEmpty() && rightJoinSymbols.containsAll(trace.keySet())); } switch (node.getType()) { case INNER: // Prefer the right candidate over the left candidate if (rightIndexCandidate.isPresent()) { return new IndexJoinNode(idAllocator.getNextId(), IndexJoinNode.Type.INNER, leftRewritten, rightIndexCandidate.get(), createEquiJoinClause(leftJoinSymbols, rightJoinSymbols), Optional.empty(), Optional.empty()); } else if (leftIndexCandidate.isPresent()) { return new IndexJoinNode(idAllocator.getNextId(), IndexJoinNode.Type.INNER, rightRewritten, leftIndexCandidate.get(), createEquiJoinClause(rightJoinSymbols, leftJoinSymbols), Optional.empty(), Optional.empty()); } break; case LEFT: if (rightIndexCandidate.isPresent()) { return new IndexJoinNode(idAllocator.getNextId(), IndexJoinNode.Type.SOURCE_OUTER, leftRewritten, rightIndexCandidate.get(), createEquiJoinClause(leftJoinSymbols, rightJoinSymbols), Optional.empty(), Optional.empty()); } break; case RIGHT: if (leftIndexCandidate.isPresent()) { return new IndexJoinNode(idAllocator.getNextId(), IndexJoinNode.Type.SOURCE_OUTER, rightRewritten, leftIndexCandidate.get(), createEquiJoinClause(rightJoinSymbols, leftJoinSymbols), Optional.empty(), Optional.empty()); } break; case FULL: break; default: throw new IllegalArgumentException("Unknown type: " + node.getType()); } } if (leftRewritten != node.getLeft() || rightRewritten != node.getRight()) { return new JoinNode(node.getId(), node.getType(), leftRewritten, rightRewritten, node.getCriteria(), node.getLeftHashSymbol(), node.getRightHashSymbol()); } return node; } private static List<IndexJoinNode.EquiJoinClause> createEquiJoinClause(List<Symbol> probeSymbols, List<Symbol> indexSymbols) { checkArgument(probeSymbols.size() == indexSymbols.size()); ImmutableList.Builder<IndexJoinNode.EquiJoinClause> builder = ImmutableList.builder(); for (int i = 0; i < probeSymbols.size(); i++) { builder.add(new IndexJoinNode.EquiJoinClause(probeSymbols.get(i), indexSymbols.get(i))); } return builder.build(); } } private static Symbol referenceToSymbol(Expression expression) { checkArgument(expression instanceof QualifiedNameReference); return Symbol.fromQualifiedName(((QualifiedNameReference) expression).getName()); } /** * Tries to rewrite a PlanNode tree with an IndexSource instead of a TableScan */ private static class IndexSourceRewriter extends PlanRewriter<IndexSourceRewriter.Context> { private final IndexManager indexManager; private final SymbolAllocator symbolAllocator; private final PlanNodeIdAllocator idAllocator; private final Metadata metadata; private final Session session; private IndexSourceRewriter(IndexManager indexManager, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator, Metadata metadata, Session session) { this.metadata = requireNonNull(metadata, "metadata is null"); this.symbolAllocator = checkNotNull(symbolAllocator, "symbolAllocator is null"); this.idAllocator = checkNotNull(idAllocator, "idAllocator is null"); this.indexManager = checkNotNull(indexManager, "indexManager is null"); this.session = requireNonNull(session, "session is null"); } public static Optional<PlanNode> rewriteWithIndex( PlanNode planNode, Set<Symbol> lookupSymbols, IndexManager indexManager, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator, Metadata metadata, Session session) { AtomicBoolean success = new AtomicBoolean(); IndexSourceRewriter indexSourceRewriter = new IndexSourceRewriter(indexManager, symbolAllocator, idAllocator, metadata, session); PlanNode rewritten = PlanRewriter.rewriteWith(indexSourceRewriter, planNode, new Context(lookupSymbols, success)); if (success.get()) { return Optional.of(rewritten); } return Optional.empty(); } @Override public PlanNode visitPlan(PlanNode node, RewriteContext<Context> context) { // We don't know how to process this PlanNode in the context of an IndexJoin, so just give up by returning something return node; } @Override public PlanNode visitTableScan(TableScanNode node, RewriteContext<Context> context) { return planTableScan(node, BooleanLiteral.TRUE_LITERAL, context.get()); } @NotNull private PlanNode planTableScan(TableScanNode node, Expression predicate, Context context) { DomainTranslator.ExtractionResult decomposedPredicate = DomainTranslator.fromPredicate( metadata, session, predicate, symbolAllocator.getTypes()); TupleDomain<ColumnHandle> simplifiedConstraint = decomposedPredicate.getTupleDomain() .transform(node.getAssignments()::get) .intersect(node.getCurrentConstraint()); checkState(node.getOutputSymbols().containsAll(context.getLookupSymbols())); Set<ColumnHandle> lookupColumns = context.getLookupSymbols().stream() .map(node.getAssignments()::get) .collect(toImmutableSet()); Set<ColumnHandle> outputColumns = node.getOutputSymbols().stream().map(node.getAssignments()::get).collect(toImmutableSet()); Optional<ResolvedIndex> optionalResolvedIndex = indexManager.resolveIndex(session, node.getTable(), lookupColumns, outputColumns, simplifiedConstraint); if (!optionalResolvedIndex.isPresent()) { // No index available, so give up by returning something return node; } ResolvedIndex resolvedIndex = optionalResolvedIndex.get(); Map<ColumnHandle, Symbol> inverseAssignments = ImmutableBiMap.copyOf(node.getAssignments()).inverse(); PlanNode source = new IndexSourceNode( idAllocator.getNextId(), resolvedIndex.getIndexHandle(), node.getTable(), context.getLookupSymbols(), node.getOutputSymbols(), node.getAssignments(), simplifiedConstraint); Expression resultingPredicate = combineConjuncts( DomainTranslator.toPredicate( resolvedIndex.getUnresolvedTupleDomain().transform(inverseAssignments::get), symbolAllocator.getTypes()), decomposedPredicate.getRemainingExpression()); if (!resultingPredicate.equals(TRUE_LITERAL)) { // todo it is likely we end up with redundant filters here because the predicate push down has already been run... the fix is to run predicate push down again source = new FilterNode(idAllocator.getNextId(), source, resultingPredicate); } context.markSuccess(); return source; } @Override public PlanNode visitProject(ProjectNode node, RewriteContext<Context> context) { // Rewrite the lookup symbols in terms of only the pre-projected symbols that have direct translations Set<Symbol> newLookupSymbols = context.get().getLookupSymbols().stream() .map(node.getAssignments()::get) .filter(QualifiedNameReference.class::isInstance) .map(IndexJoinOptimizer::referenceToSymbol) .collect(toImmutableSet()); if (newLookupSymbols.isEmpty()) { return node; } return context.defaultRewrite(node, new Context(newLookupSymbols, context.get().getSuccess())); } @Override public PlanNode visitFilter(FilterNode node, RewriteContext<Context> context) { if (node.getSource() instanceof TableScanNode) { return planTableScan((TableScanNode) node.getSource(), node.getPredicate(), context.get()); } return context.defaultRewrite(node, new Context(context.get().getLookupSymbols(), context.get().getSuccess())); } @Override public PlanNode visitIndexSource(IndexSourceNode node, RewriteContext<Context> context) { throw new IllegalStateException("Should not be trying to generate an Index on something that has already been determined to use an Index"); } @Override public PlanNode visitIndexJoin(IndexJoinNode node, RewriteContext<Context> context) { // Lookup symbols can only be passed through the probe side of an index join Set<Symbol> probeLookupSymbols = context.get().getLookupSymbols().stream() .filter(node.getProbeSource().getOutputSymbols()::contains) .collect(toImmutableSet()); if (probeLookupSymbols.isEmpty()) { return node; } PlanNode rewrittenProbeSource = context.rewrite(node.getProbeSource(), new Context(probeLookupSymbols, context.get().getSuccess())); PlanNode source = node; if (rewrittenProbeSource != node.getProbeSource()) { source = new IndexJoinNode(node.getId(), node.getType(), rewrittenProbeSource, node.getIndexSource(), node.getCriteria(), node.getProbeHashSymbol(), node.getIndexHashSymbol()); } return source; } @Override public PlanNode visitAggregation(AggregationNode node, RewriteContext<Context> context) { // Lookup symbols can only be passed through if they are part of the group by columns Set<Symbol> groupByLookupSymbols = context.get().getLookupSymbols().stream() .filter(node.getGroupBy()::contains) .collect(toImmutableSet()); if (groupByLookupSymbols.isEmpty()) { return node; } return context.defaultRewrite(node, new Context(groupByLookupSymbols, context.get().getSuccess())); } @Override public PlanNode visitSort(SortNode node, RewriteContext<Context> context) { // Sort has no bearing when building an index, so just ignore the sort return context.rewrite(node.getSource(), context.get()); } public static class Context { private final Set<Symbol> lookupSymbols; private final AtomicBoolean success; public Context(Set<Symbol> lookupSymbols, AtomicBoolean success) { checkArgument(!lookupSymbols.isEmpty(), "lookupSymbols can not be empty"); this.lookupSymbols = ImmutableSet.copyOf(checkNotNull(lookupSymbols, "lookupSymbols is null")); this.success = checkNotNull(success, "success is null"); } public Set<Symbol> getLookupSymbols() { return lookupSymbols; } public AtomicBoolean getSuccess() { return success; } public void markSuccess() { checkState(success.compareAndSet(false, true), "Can only have one success per context"); } } } /** * Identify the mapping from the lookup symbols used at the top of the index plan to * the actual symbols produced by the IndexSource. Note that multiple top-level lookup symbols may share the same * underlying IndexSource symbol. Also note that lookup symbols that do not correspond to underlying index source symbols * will be omitted from the returned Map. */ public static class IndexKeyTracer { public static Map<Symbol, Symbol> trace(PlanNode node, Set<Symbol> lookupSymbols) { return node.accept(new Visitor(), lookupSymbols); } private static class Visitor extends PlanVisitor<Set<Symbol>, Map<Symbol, Symbol>> { @Override protected Map<Symbol, Symbol> visitPlan(PlanNode node, Set<Symbol> lookupSymbols) { throw new UnsupportedOperationException("Node not expected to be part of Index pipeline: " + node); } @Override public Map<Symbol, Symbol> visitProject(ProjectNode node, Set<Symbol> lookupSymbols) { // Map from output Symbols to source Symbols Map<Symbol, Symbol> directSymbolTranslationOutputMap = Maps.transformValues(Maps.filterValues(node.getAssignments(), QualifiedNameReference.class::isInstance), IndexJoinOptimizer::referenceToSymbol); Map<Symbol, Symbol> outputToSourceMap = FluentIterable.from(lookupSymbols) .filter(in(directSymbolTranslationOutputMap.keySet())) .toMap(Functions.forMap(directSymbolTranslationOutputMap)); checkState(!outputToSourceMap.isEmpty(), "No lookup symbols were able to pass through the projection"); // Map from source Symbols to underlying index source Symbols Map<Symbol, Symbol> sourceToIndexMap = node.getSource().accept(this, ImmutableSet.copyOf(outputToSourceMap.values())); // Generate the Map the connects lookup symbols to underlying index source symbols Map<Symbol, Symbol> outputToIndexMap = Maps.transformValues(Maps.filterValues(outputToSourceMap, in(sourceToIndexMap.keySet())), Functions.forMap(sourceToIndexMap)); return ImmutableMap.copyOf(outputToIndexMap); } @Override public Map<Symbol, Symbol> visitFilter(FilterNode node, Set<Symbol> lookupSymbols) { return node.getSource().accept(this, lookupSymbols); } @Override public Map<Symbol, Symbol> visitIndexJoin(IndexJoinNode node, Set<Symbol> lookupSymbols) { Set<Symbol> probeLookupSymbols = lookupSymbols.stream() .filter(node.getProbeSource().getOutputSymbols()::contains) .collect(toImmutableSet()); checkState(!probeLookupSymbols.isEmpty(), "No lookup symbols were able to pass through the index join probe source"); return node.getProbeSource().accept(this, probeLookupSymbols); } @Override public Map<Symbol, Symbol> visitAggregation(AggregationNode node, Set<Symbol> lookupSymbols) { Set<Symbol> groupByLookupSymbols = lookupSymbols.stream() .filter(node.getGroupBy()::contains) .collect(toImmutableSet()); checkState(!groupByLookupSymbols.isEmpty(), "No lookup symbols were able to pass through the aggregation group by"); return node.getSource().accept(this, groupByLookupSymbols); } @Override public Map<Symbol, Symbol> visitSort(SortNode node, Set<Symbol> lookupSymbols) { return node.getSource().accept(this, lookupSymbols); } @Override public Map<Symbol, Symbol> visitIndexSource(IndexSourceNode node, Set<Symbol> lookupSymbols) { checkState(node.getLookupSymbols().equals(lookupSymbols), "lookupSymbols must be the same as IndexSource lookup symbols"); return FluentIterable.from(lookupSymbols) .toMap(Functions.<Symbol>identity()); } } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.eas.designer.application.query.completion; import com.eas.client.ClientConstants; import com.eas.client.DatabaseMdCache; import com.eas.client.DatabasesClient; import com.eas.client.SqlQuery; import com.eas.client.metadata.Field; import com.eas.client.metadata.Fields; import com.eas.client.metadata.Parameter; import com.eas.client.metadata.Parameters; import com.eas.designer.application.indexer.AppElementInfo; import com.eas.designer.application.indexer.IndexerQuery; import com.eas.designer.application.query.PlatypusQueryDataObject; import com.eas.designer.application.query.lexer.LexSqlTokenId; import com.eas.designer.application.query.lexer.SqlLanguageHierarchy; import com.eas.designer.application.query.lexer.SqlTokenId; import java.util.Collection; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import javax.swing.text.BadLocationException; import javax.swing.text.Document; import javax.swing.text.Element; import javax.swing.text.JTextComponent; import javax.swing.text.StyledDocument; import net.sf.jsqlparser.TablesFinder; import net.sf.jsqlparser.schema.Table; import net.sf.jsqlparser.statement.insert.Insert; import net.sf.jsqlparser.statement.select.Select; import net.sf.jsqlparser.statement.update.Update; import org.netbeans.api.lexer.Token; import org.netbeans.api.lexer.TokenHierarchy; import org.netbeans.api.lexer.TokenSequence; import org.netbeans.modules.editor.NbEditorDocument; import org.netbeans.spi.editor.completion.CompletionProvider; import org.netbeans.spi.editor.completion.CompletionResultSet; import org.netbeans.spi.editor.completion.CompletionTask; import org.netbeans.spi.editor.completion.support.AsyncCompletionQuery; import org.netbeans.spi.editor.completion.support.AsyncCompletionTask; import org.openide.ErrorManager; import org.openide.filesystems.FileObject; import org.openide.loaders.DataObject; import org.openide.loaders.DataObjectNotFoundException; import org.openide.util.NbBundle; /** * * @author mg */ public class SqlCompletionProvider implements CompletionProvider { public static final int UNKNOWN_ZONE = 0; // public static final int SELECT_ZONE = 1; // aliases, table names without aliases and column with dot public static final int FROM_ZONE = 3; // tables names public static final int WHERE_ZONE = 5; // aliases, tables names public static final int HAVING_ZONE = 7; // aliases, table names without aliases public static final int GROUP_ZONE = 8; public static final int INSERT_INTO_ZONE = 9; public static final int INSERT_FIELDS_ZONE = 10; public static final int INSERT_VALUES_ZONE = 11; public static final int INSERT_VALUES_LIST_ZONE = 12; public static final int UPDATE_ZONE = 13; public static final int SET_ZONE = 14; public static final int KEYWORD_ZONE = 15;// keywords names protected int addedCompletionItems = 0; public void addCompletionItem(CompletionPoint point, SqlCompletionItem item, CompletionResultSet resultSet) { if (point.filter == null || item.getText().toLowerCase().startsWith(point.filter.toLowerCase())) { resultSet.addItem(item); addedCompletionItems++; } } public void fillCompletionByStoredQueries(PlatypusQueryDataObject dataObject, CompletionPoint point, CompletionResultSet resultSet) throws DataObjectNotFoundException { Collection<AppElementInfo> appElements = IndexerQuery.appElementsByPrefix(dataObject.getProject(), ""); if (appElements != null) { for (AppElementInfo appInfo : appElements) { if (appInfo != null && appInfo.primaryFileObject != null) { DataObject fdo = DataObject.find(appInfo.primaryFileObject); if (fdo instanceof PlatypusQueryDataObject && fdo != dataObject) { SqlCompletionItem item = new StoredQuerySqlCompletionItem(appInfo.appElementId, dataObject, (PlatypusQueryDataObject) fdo, point.startOffset, point.endOffset); addCompletionItem(point, item, resultSet); } } } } } public void fillCompletionByTablesBySchema(String aSchema, PlatypusQueryDataObject dataObject, CompletionPoint point, CompletionResultSet resultSet) throws Exception { Map<String, Fields> tables = dataObject.achieveTables(aSchema); fillCompletionByTables(tables, dataObject, point, resultSet); } public void fillCompletionByTables(Map<String, Fields> tables, PlatypusQueryDataObject dataObject, CompletionPoint point, CompletionResultSet resultSet) throws Exception { if (tables != null) { for (Entry<String, Fields> aTableEntry : tables.entrySet()) { SqlCompletionItem item = new TableSqlCompletionItem(dataObject, aTableEntry, point.startOffset, point.endOffset); addCompletionItem(point, item, resultSet); } } } public void fillCompletionByFields(Fields fields, PlatypusQueryDataObject dataObject, CompletionPoint point, CompletionResultSet resultSet) throws Exception { if (fields != null) { if (!fields.isEmpty()) { for (Field field : fields.toCollection()) { SqlCompletionItem item = new FieldSqlCompletionItem(field, point.startOffset, point.endOffset); addCompletionItem(point, item, resultSet); } } else { SqlCompletionItem item = new EmptySqlCompletionItem(NbBundle.getMessage(SqlCompletionProvider.class, "NoFields")); addCompletionItem(point, item, resultSet); } } } public void fillCompletionByParameters(Parameters aParams, PlatypusQueryDataObject dataObject, CompletionPoint point, CompletionResultSet resultSet) throws Exception { if (aParams != null) { if (!aParams.isEmpty()) { for (Field field : aParams.toCollection()) { SqlCompletionItem item = new ParameterSqlCompletionItem((Parameter) field, point.startOffset, point.endOffset); addCompletionItem(point, item, resultSet); } } else { SqlCompletionItem item = new EmptySqlCompletionItem(NbBundle.getMessage(SqlCompletionProvider.class, "NoParameters")); addCompletionItem(point, item, resultSet); } } } public void fillCompletionInsertFieldsZone(CompletionPoint point, PlatypusQueryDataObject dataObject, CompletionResultSet resultSet) throws Exception { if (dataObject.getStatement() instanceof Insert) { DatabasesClient basesProxy = dataObject.getBasesProxy(); if (basesProxy != null) { Insert iStatement = (Insert) dataObject.getStatement(); DatabaseMdCache mdCache = basesProxy.getDbMetadataCache(dataObject.getDatasourceName()); String schema = iStatement.getTable().getSchemaName(); String defaultSchema = mdCache.getConnectionSchema(); Fields fields = mdCache.getTableMetadata(defaultSchema.equalsIgnoreCase(schema) ? iStatement.getTable().getName() : iStatement.getTable().getWholeTableName()); fillCompletionByFields(fields, dataObject, point, resultSet); } } } public void fillCompletionUpdateFieldsZone(CompletionPoint point, PlatypusQueryDataObject dataObject, CompletionResultSet resultSet) throws Exception { if (dataObject.getStatement() instanceof Update) { DatabasesClient basesProxy = dataObject.getBasesProxy(); if (basesProxy != null) { Update uStatement = (Update) dataObject.getStatement(); DatabaseMdCache mdCache = basesProxy.getDbMetadataCache(dataObject.getDatasourceName()); String schema = uStatement.getTable().getSchemaName(); String defaultSchema = mdCache.getConnectionSchema(); Fields fields = mdCache.getTableMetadata(defaultSchema.equalsIgnoreCase(schema) ? uStatement.getTable().getName() : uStatement.getTable().getWholeTableName()); fillCompletionByFields(fields, dataObject, point, resultSet); } } } public void fillCompletionParametersZone(CompletionPoint point, PlatypusQueryDataObject dataObject, CompletionResultSet resultSet) throws Exception { fillCompletionByParameters(dataObject.getModel().getParameters(), dataObject, point, resultSet); } public void fillCompletionFromZone(CompletionPoint point, PlatypusQueryDataObject dataObject, CompletionResultSet resultSet) throws Exception { if (point.atDot) { if (point.prevContext != null) { if (point.prevPrevContext == null) { Set<String> schemas = dataObject.achieveSchemas(); String schema = findName(point.prevContext, schemas); if (schema != null) { fillCompletionByTablesBySchema(schema, dataObject, point, resultSet); } } } } else { if (point.filter != null && point.filter.startsWith(ClientConstants.STORED_QUERY_REF_PREFIX)) { fillCompletionByStoredQueries(dataObject, point, resultSet); } else { fillCompletionByTablesBySchema(null, dataObject, point, resultSet); } } } public void fillCompletionSelectZone(PlatypusQueryDataObject dataObject, CompletionPoint point, CompletionResultSet resultSet) throws Exception { if (dataObject.getStatement() instanceof Select) { DatabasesClient basesProxy = dataObject.getBasesProxy(); if (basesProxy != null) { DatabaseMdCache mdCache = basesProxy.getDbMetadataCache(dataObject.getDatasourceName()); if (point.atDot) { if (point.prevContext != null) { Set<String> schemas = dataObject.achieveSchemas(); Map<String, Table> tables = TablesFinder.getTablesMap(null, dataObject.getStatement(), true); if (point.prevPrevContext != null) { // Two times qualified String schema = findName(point.prevPrevContext, schemas); if (schema != null) { String defaultSchema = mdCache.getConnectionSchema(); Fields fields = mdCache.getTableMetadata(defaultSchema.equalsIgnoreCase(schema) ? point.prevContext : schema + "." + point.prevContext); fillCompletionByFields(fields, dataObject, point, resultSet); } } else { // Once qualified String schema = findName(point.prevContext, schemas); if (schema == null) { String aliasOrTable = findName(point.prevContext, tables.keySet()); if (aliasOrTable != null) { // [Alias's] tables's fields Table table = tables.get(aliasOrTable); String parserTableName = table.getWholeTableName(); Fields fields = null; if (parserTableName.startsWith(ClientConstants.STORED_QUERY_REF_PREFIX)) { SqlQuery q = basesProxy.getQueries().getQuery(parserTableName.substring(1), null, null, null); if (q != null) { fields = q.getFields(); } } else { fields = mdCache.getTableMetadata(table.getWholeTableName()); } if (fields != null) { fillCompletionByFields(fields, dataObject, point, resultSet); } } else { // fallback to default schema's tables Fields fields = mdCache.getTableMetadata(point.prevContext); fillCompletionByFields(fields, dataObject, point, resultSet); } } else { // Tables list (not aliased) fillCompletionByTablesBySchema(schema, dataObject, point, resultSet); } } } } else { Map<String, Table> tables = TablesFinder.getTablesMap(null, dataObject.getStatement(), true); for (String alias : tables.keySet()) { String parserTableName = tables.get(alias).getWholeTableName(); if (parserTableName.startsWith(ClientConstants.STORED_QUERY_REF_PREFIX)) { FileObject subjectFO = IndexerQuery.appElementId2File(dataObject.getProject(), parserTableName.substring(1)); if (subjectFO != null) { DataObject subjectDO = DataObject.find(subjectFO); if (subjectDO instanceof PlatypusQueryDataObject) { SqlCompletionItem item = new StoredQuerySqlCompletionItem(dataObject, (PlatypusQueryDataObject) subjectDO, alias, point.startOffset, point.endOffset); addCompletionItem(point, item, resultSet); } } } else { Fields fields = mdCache.getTableMetadata(parserTableName); SqlCompletionItem item = new TableSqlCompletionItem(dataObject, alias, fields, point.startOffset, point.endOffset); addCompletionItem(point, item, resultSet); } } } } } } public void fillCompletionWhereZone(PlatypusQueryDataObject dataObject, CompletionPoint point, CompletionResultSet resultSet) throws Exception { DatabasesClient basesProxy = dataObject.getBasesProxy(); if (basesProxy != null) { DatabaseMdCache mdCache = basesProxy.getDbMetadataCache(dataObject.getDatasourceName()); if (point.atDot) { if (point.prevContext != null) { Set<String> schemas = dataObject.achieveSchemas(); Map<String, Table> tables = TablesFinder.getTablesMap(null, dataObject.getStatement(), true); if (point.prevPrevContext != null) { // Two times qualified String schema = findName(point.prevPrevContext, schemas); if (schema != null) { String defaultSchema = mdCache.getConnectionSchema(); Fields fields = mdCache.getTableMetadata(defaultSchema.equalsIgnoreCase(schema) ? point.prevContext : schema + "." + point.prevContext); fillCompletionByFields(fields, dataObject, point, resultSet); } } else { // Once qualified String schema = findName(point.prevContext, schemas); if (schema == null) { String aliasOrTable = findName(point.prevContext, tables.keySet()); if (aliasOrTable != null) { // [Alias's] tables's fields Table table = tables.get(aliasOrTable); String parserTableName = table.getWholeTableName(); Fields fields = null; if (parserTableName.startsWith(ClientConstants.STORED_QUERY_REF_PREFIX)) { SqlQuery q = basesProxy.getQueries().getQuery(parserTableName.substring(1), null, null, null); if (q != null) { fields = q.getFields(); } } else { fields = mdCache.getTableMetadata(table.getWholeTableName()); } if (fields != null) { fillCompletionByFields(fields, dataObject, point, resultSet); } } else { // fallback to default schema's tables Fields fields = mdCache.getTableMetadata(point.prevContext); fillCompletionByFields(fields, dataObject, point, resultSet); } } else { // Tables list (not aliased) fillCompletionByTablesBySchema(schema, dataObject, point, resultSet); } } } } else { Map<String, Table> tables = TablesFinder.getTablesMap(null, dataObject.getStatement(), true); for (String alias : tables.keySet()) { String parserTableName = tables.get(alias).getWholeTableName(); if (parserTableName.startsWith(ClientConstants.STORED_QUERY_REF_PREFIX)) { FileObject subjectFO = IndexerQuery.appElementId2File(dataObject.getProject(), parserTableName.substring(1)); if (subjectFO != null) { DataObject subjectDO = DataObject.find(subjectFO); if (subjectDO instanceof PlatypusQueryDataObject) { SqlCompletionItem item = new StoredQuerySqlCompletionItem(dataObject, (PlatypusQueryDataObject) subjectDO, alias, point.startOffset, point.endOffset); addCompletionItem(point, item, resultSet); } } } else { Fields fields = mdCache.getTableMetadata(parserTableName); SqlCompletionItem item = new TableSqlCompletionItem(dataObject, alias, fields, point.startOffset, point.endOffset); addCompletionItem(point, item, resultSet); } } } } } public void fillCompletionWithKeywords(CompletionPoint point, CompletionResultSet resultSet) { Collection<SqlTokenId> tokens = SqlLanguageHierarchy.checkTokens(); for (SqlTokenId token : tokens) { if (SqlLanguageHierarchy.KEYWORD_CATEGORY_NAME.equals(token.primaryCategory())) { if (token.name().startsWith("K_")) { SqlCompletionItem item = new KeywordSqlCompletionItem(token, point.startOffset, point.endOffset); addCompletionItem(point, item, resultSet); } } } } public void fillCompletionWithBaseKeywords(CompletionPoint point, CompletionResultSet resultSet) { Collection<SqlTokenId> tokens = SqlLanguageHierarchy.checkTokens(); for (SqlTokenId token : tokens) { if (SqlLanguageHierarchy.KEYWORD_CATEGORY_NAME.equals(token.primaryCategory())) { if (token.name().equalsIgnoreCase("K_SELECT") || token.name().equalsIgnoreCase("K_INSERT") || token.name().equalsIgnoreCase("K_UPDATE") || token.name().equalsIgnoreCase("K_DELETE")) { SqlCompletionItem item = new KeywordSqlCompletionItem(token, point.startOffset, point.endOffset); addCompletionItem(point, item, resultSet); } } } } private void fillCompletionException(Exception ex, CompletionResultSet resultSet) { resultSet.addItem(new ExceptionSqlCompletionItem(ex)); } protected class CompletionPoint { public int zone = UNKNOWN_ZONE; public String prevPrevContext = null; public String prevContext = null; public String filter = null; public boolean atDot; public int startOffset = -1; public int endOffset = -1; } public SqlCompletionProvider() { super(); } @Override public CompletionTask createTask(int queryType, JTextComponent component) { if (queryType == CompletionProvider.COMPLETION_QUERY_TYPE || queryType == CompletionProvider.TOOLTIP_QUERY_TYPE || queryType == CompletionProvider.DOCUMENTATION_QUERY_TYPE) { return createCompletionTask(component); } else { return null; } } public CompletionTask createCompletionTask(JTextComponent component) { return new AsyncCompletionTask(new AsyncCompletionQuery() { @Override protected void query(CompletionResultSet resultSet, Document doc, int caretOffset) { try { PlatypusQueryDataObject dataObject = (PlatypusQueryDataObject) doc.getProperty(PlatypusQueryDataObject.DATAOBJECT_DOC_PROPERTY); if (dataObject == null || dataObject.getSqlFullTextDocument() == doc) { resultSet.addItem(new DummySqlCompletionItem()); } else if (doc instanceof NbEditorDocument) { CompletionPoint completionPoint = calcCompletionPoint((NbEditorDocument) doc, caretOffset); fillCompletionPoint(dataObject, completionPoint, resultSet, doc, caretOffset); } resultSet.finish(); } catch (Exception ex) { ErrorManager.getDefault().notify(ex); } } }, component); } @Override public int getAutoQueryTypes(JTextComponent component, String typedText) { return CompletionProvider.COMPLETION_QUERY_TYPE; } public CompletionPoint calcCompletionPoint(NbEditorDocument doc, int caretOffset) throws Exception { CompletionPoint point = new CompletionPoint(); point.zone = UNKNOWN_ZONE; point.filter = null; point.atDot = false; if (caretOffset > 0) { final StyledDocument styledDoc = (StyledDocument) doc; // Calc start and end offset point.startOffset = getStartWordOffset(doc, caretOffset); point.endOffset = getEndWordOffset(doc, caretOffset); // Calc filter fragment if (caretOffset - point.startOffset > 0) { point.filter = styledDoc.getText(point.startOffset, caretOffset - point.startOffset); } // calc dots contexts if (point.startOffset > 0) { point.atDot = ".".equals(doc.getText(point.startOffset - 1, 1)); if (point.atDot) { int startOffset = getStartWordOffset(doc, point.startOffset - 2); if (startOffset > -1 && point.startOffset - 1 - startOffset > 0) { point.prevContext = doc.getText(startOffset, point.startOffset - 1 - startOffset); if (startOffset > 0) { boolean atDot = ".".equals(doc.getText(startOffset - 1, 1)); if (atDot) { int preStartOffset = getStartWordOffset(doc, startOffset - 2); if (preStartOffset > -1 && startOffset - 1 - preStartOffset > 0) { point.prevPrevContext = doc.getText(preStartOffset, startOffset - 1 - preStartOffset); } } } } } } doc.readLock(); try { TokenHierarchy<?> hierarchy = TokenHierarchy.get(doc); TokenSequence<LexSqlTokenId> ts = hierarchy.tokenSequence(LexSqlTokenId.language()); while (ts.moveNext()) { Token<LexSqlTokenId> t = ts.token(); int tokenOffset = ts.offset(); int tokenLength = t.length(); int counter = 0; if (tokenLength != t.text().length()) { while (counter < t.text().length() && t.text().charAt(counter++) == ' ') { tokenOffset++; tokenLength--; } counter = t.text().length() - 1; while (counter >= 0 && t.text().charAt(counter--) == ' ') { tokenLength--; } } if (caretOffset <= tokenOffset) { break; } if (caretOffset > tokenOffset && caretOffset <= tokenOffset + tokenLength) { if (SqlLanguageHierarchy.KEYWORD_CATEGORY_NAME.equals(t.id().primaryCategory())) { point.zone = KEYWORD_ZONE; } } if ("select".equalsIgnoreCase(t.text().toString())) { point.zone = SELECT_ZONE; } else if ("from".equalsIgnoreCase(t.text().toString())) { point.zone = FROM_ZONE; } else if ("where".equalsIgnoreCase(t.text().toString())) { point.zone = WHERE_ZONE; } else if ("having".equalsIgnoreCase(t.text().toString())) { point.zone = HAVING_ZONE; } else if ("group".equalsIgnoreCase(t.text().toString())) { point.zone = GROUP_ZONE; ts.moveNext(); // BY } else if ("insert".equalsIgnoreCase(t.text().toString())) { point.zone = INSERT_INTO_ZONE; ts.moveNext(); // INTO } else if ("values".equalsIgnoreCase(t.text().toString())) { point.zone = INSERT_VALUES_ZONE; } else if (point.zone == INSERT_INTO_ZONE && "(".equalsIgnoreCase(t.text().toString())) { point.zone = INSERT_FIELDS_ZONE; } else if (point.zone == INSERT_VALUES_ZONE && "(".equalsIgnoreCase(t.text().toString())) { point.zone = INSERT_VALUES_LIST_ZONE; } else if ("update".equalsIgnoreCase(t.text().toString())) { point.zone = UPDATE_ZONE; } else if ("set".equalsIgnoreCase(t.text().toString())) { point.zone = SET_ZONE; } } } finally { doc.readUnlock(); } } else { point.zone = UNKNOWN_ZONE; point.startOffset = 0; point.endOffset = 0; } return point; } private synchronized void fillCompletionPoint(PlatypusQueryDataObject dataObject, CompletionPoint point, CompletionResultSet resultSet, Document doc, int caretOffset) throws Exception { try { addedCompletionItems = 0; if (point.zone == SELECT_ZONE) { fillCompletionSelectZone(dataObject, point, resultSet); } else if (point.zone == FROM_ZONE) { fillCompletionFromZone(point, dataObject, resultSet); } else if (point.zone == WHERE_ZONE) { fillCompletionWhereZone(dataObject, point, resultSet); if (!point.atDot) { fillCompletionParametersZone(point, dataObject, resultSet); } } else if (point.zone == GROUP_ZONE) { fillCompletionWhereZone(dataObject, point, resultSet); } else if (point.zone == HAVING_ZONE) { fillCompletionSelectZone(dataObject, point, resultSet); } else if (point.zone == INSERT_INTO_ZONE) { fillCompletionFromZone(point, dataObject, resultSet); } else if (point.zone == INSERT_FIELDS_ZONE) { fillCompletionInsertFieldsZone(point, dataObject, resultSet); } else if (point.zone == INSERT_VALUES_LIST_ZONE) { fillCompletionParametersZone(point, dataObject, resultSet); } else if (point.zone == UPDATE_ZONE) { fillCompletionFromZone(point, dataObject, resultSet); } else if (point.zone == SET_ZONE) { fillCompletionUpdateFieldsZone(point, dataObject, resultSet); fillCompletionParametersZone(point, dataObject, resultSet); } else if (point.zone == KEYWORD_ZONE) { fillCompletionWithKeywords(point, resultSet); } else { if (addedCompletionItems == 0) { if (point.filter != null && !point.filter.isEmpty()) { fillCompletionWithKeywords(point, resultSet); } else { fillCompletionWithBaseKeywords(point, resultSet); } } } } catch (Exception ex) { fillCompletionException(ex, resultSet); } } static int getRowFirstNonWhite(StyledDocument doc, int offset) throws BadLocationException { Element lineElement = doc.getParagraphElement(offset); int start = lineElement.getStartOffset(); while (start + 1 < lineElement.getEndOffset()) { try { if (doc.getText(start, 1).charAt(0) != ' ') { break; } } catch (BadLocationException ex) { throw (BadLocationException) new BadLocationException( "calling getText(" + start + ", " + (start + 1) + ") on doc of length: " + doc.getLength(), start).initCause(ex); } start++; } return start; } protected int getStartWordOffset(NbEditorDocument aDoc, int caretOffset) throws Exception { while (caretOffset > 0 && aDoc.getLength() > 0 && (Character.isJavaIdentifierPart(aDoc.getText(caretOffset - 1, 1).toCharArray()[0]) || aDoc.getText(caretOffset - 1, 1).startsWith(":")/*Parameters case*/ || aDoc.getText(caretOffset - 1, 1).startsWith(ClientConstants.STORED_QUERY_REF_PREFIX)/*Sub-queries strong reference case*/)) { caretOffset--; } return caretOffset; } public int getEndWordOffset(NbEditorDocument aDoc, int caretOffset) throws BadLocationException { while (caretOffset < aDoc.getLength() && aDoc.getLength() > 0 && Character.isJavaIdentifierPart(aDoc.getText(caretOffset, 1).toCharArray()[0])) { caretOffset++; } return caretOffset; } protected String findName(String aPattern, Collection<String> aNames) { for (String name : aNames) { if (name.equalsIgnoreCase(aPattern)) { return name; } } return null; } }
/** * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.internal.operators.observable; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; import java.util.*; import java.util.concurrent.*; import org.junit.*; import org.mockito.Mockito; import io.reactivex.*; import io.reactivex.Observable; import io.reactivex.Observer; import io.reactivex.exceptions.TestException; public class ObservableToListTest { @Test public void testListObservable() { Observable<String> w = Observable.fromIterable(Arrays.asList("one", "two", "three")); Observable<List<String>> observable = w.toList().toObservable(); Observer<List<String>> observer = TestHelper.mockObserver(); observable.subscribe(observer); verify(observer, times(1)).onNext(Arrays.asList("one", "two", "three")); verify(observer, Mockito.never()).onError(any(Throwable.class)); verify(observer, times(1)).onComplete(); } @Test public void testListViaObservableObservable() { Observable<String> w = Observable.fromIterable(Arrays.asList("one", "two", "three")); Observable<List<String>> observable = w.toList().toObservable(); Observer<List<String>> observer = TestHelper.mockObserver(); observable.subscribe(observer); verify(observer, times(1)).onNext(Arrays.asList("one", "two", "three")); verify(observer, Mockito.never()).onError(any(Throwable.class)); verify(observer, times(1)).onComplete(); } @Test public void testListMultipleSubscribersObservable() { Observable<String> w = Observable.fromIterable(Arrays.asList("one", "two", "three")); Observable<List<String>> observable = w.toList().toObservable(); Observer<List<String>> o1 = TestHelper.mockObserver(); observable.subscribe(o1); Observer<List<String>> o2 = TestHelper.mockObserver(); observable.subscribe(o2); List<String> expected = Arrays.asList("one", "two", "three"); verify(o1, times(1)).onNext(expected); verify(o1, Mockito.never()).onError(any(Throwable.class)); verify(o1, times(1)).onComplete(); verify(o2, times(1)).onNext(expected); verify(o2, Mockito.never()).onError(any(Throwable.class)); verify(o2, times(1)).onComplete(); } @Test @Ignore("Null values are not allowed") public void testListWithNullValueObservable() { Observable<String> w = Observable.fromIterable(Arrays.asList("one", null, "three")); Observable<List<String>> observable = w.toList().toObservable(); Observer<List<String>> observer = TestHelper.mockObserver(); observable.subscribe(observer); verify(observer, times(1)).onNext(Arrays.asList("one", null, "three")); verify(observer, Mockito.never()).onError(any(Throwable.class)); verify(observer, times(1)).onComplete(); } @Test public void testListWithBlockingFirstObservable() { Observable<String> o = Observable.fromIterable(Arrays.asList("one", "two", "three")); List<String> actual = o.toList().toObservable().blockingFirst(); Assert.assertEquals(Arrays.asList("one", "two", "three"), actual); } @SuppressWarnings("unchecked") @Test public void capacityHintObservable() { Observable.range(1, 10) .toList(4) .toObservable() .test() .assertResult(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); } @Test public void testList() { Observable<String> w = Observable.fromIterable(Arrays.asList("one", "two", "three")); Single<List<String>> observable = w.toList(); SingleObserver<List<String>> observer = TestHelper.mockSingleObserver(); observable.subscribe(observer); verify(observer, times(1)).onSuccess(Arrays.asList("one", "two", "three")); verify(observer, Mockito.never()).onError(any(Throwable.class)); } @Test public void testListViaObservable() { Observable<String> w = Observable.fromIterable(Arrays.asList("one", "two", "three")); Single<List<String>> observable = w.toList(); SingleObserver<List<String>> observer = TestHelper.mockSingleObserver(); observable.subscribe(observer); verify(observer, times(1)).onSuccess(Arrays.asList("one", "two", "three")); verify(observer, Mockito.never()).onError(any(Throwable.class)); } @Test public void testListMultipleSubscribers() { Observable<String> w = Observable.fromIterable(Arrays.asList("one", "two", "three")); Single<List<String>> observable = w.toList(); SingleObserver<List<String>> o1 = TestHelper.mockSingleObserver(); observable.subscribe(o1); SingleObserver<List<String>> o2 = TestHelper.mockSingleObserver(); observable.subscribe(o2); List<String> expected = Arrays.asList("one", "two", "three"); verify(o1, times(1)).onSuccess(expected); verify(o1, Mockito.never()).onError(any(Throwable.class)); verify(o2, times(1)).onSuccess(expected); verify(o2, Mockito.never()).onError(any(Throwable.class)); } @Test @Ignore("Null values are not allowed") public void testListWithNullValue() { Observable<String> w = Observable.fromIterable(Arrays.asList("one", null, "three")); Single<List<String>> observable = w.toList(); SingleObserver<List<String>> observer = TestHelper.mockSingleObserver(); observable.subscribe(observer); verify(observer, times(1)).onSuccess(Arrays.asList("one", null, "three")); verify(observer, Mockito.never()).onError(any(Throwable.class)); } @Test public void testListWithBlockingFirst() { Observable<String> o = Observable.fromIterable(Arrays.asList("one", "two", "three")); List<String> actual = o.toList().blockingGet(); Assert.assertEquals(Arrays.asList("one", "two", "three"), actual); } static void await(CyclicBarrier cb) { try { cb.await(); } catch (InterruptedException ex) { ex.printStackTrace(); } catch (BrokenBarrierException ex) { ex.printStackTrace(); } } @SuppressWarnings("unchecked") @Test public void capacityHint() { Observable.range(1, 10) .toList(4) .test() .assertResult(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)); } @Test public void dispose() { TestHelper.checkDisposed(Observable.just(1).toList().toObservable()); TestHelper.checkDisposed(Observable.just(1).toList()); } @SuppressWarnings("unchecked") @Test public void error() { Observable.error(new TestException()) .toList() .toObservable() .test() .assertFailure(TestException.class); } @SuppressWarnings("unchecked") @Test public void errorSingle() { Observable.error(new TestException()) .toList() .test() .assertFailure(TestException.class); } @SuppressWarnings("unchecked") @Test public void collectionSupplierThrows() { Observable.just(1) .toList(new Callable<Collection<Integer>>() { @Override public Collection<Integer> call() throws Exception { throw new TestException(); } }) .toObservable() .test() .assertFailure(TestException.class); } @SuppressWarnings("unchecked") @Test public void collectionSupplierReturnsNull() { Observable.just(1) .toList(new Callable<Collection<Integer>>() { @Override public Collection<Integer> call() throws Exception { return null; } }) .toObservable() .test() .assertFailure(NullPointerException.class) .assertErrorMessage("The collectionSupplier returned a null collection. Null values are generally not allowed in 2.x operators and sources."); } @SuppressWarnings("unchecked") @Test public void singleCollectionSupplierThrows() { Observable.just(1) .toList(new Callable<Collection<Integer>>() { @Override public Collection<Integer> call() throws Exception { throw new TestException(); } }) .test() .assertFailure(TestException.class); } @SuppressWarnings("unchecked") @Test public void singleCollectionSupplierReturnsNull() { Observable.just(1) .toList(new Callable<Collection<Integer>>() { @Override public Collection<Integer> call() throws Exception { return null; } }) .test() .assertFailure(NullPointerException.class) .assertErrorMessage("The collectionSupplier returned a null collection. Null values are generally not allowed in 2.x operators and sources."); } }
/* * Copyright 2017 Albert Tregnaghi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions * and limitations under the License. * */ package de.jcup.egradle.eclipse.ui; import java.io.ByteArrayInputStream; import java.io.InputStream; import org.eclipse.core.resources.IFile; import org.eclipse.core.runtime.CoreException; import org.eclipse.jface.text.IDocument; import org.eclipse.jface.viewers.ITreeContentProvider; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IFileEditorInput; import de.jcup.egradle.core.model.BuildContext; import de.jcup.egradle.core.model.Error; import de.jcup.egradle.core.model.Item; import de.jcup.egradle.core.model.Model; import de.jcup.egradle.core.model.ModelBuilder; import de.jcup.egradle.core.model.ModelBuilder.ModelBuilderException; import de.jcup.egradle.core.model.groovyantlr.GroovyASTModelBuilder; import de.jcup.egradle.core.util.Filter; import de.jcup.egradle.core.util.ILogSupport; import de.jcup.egradle.eclipse.api.GroovyBasedModelType; public abstract class AbstractGroovyBasedEditorOutlineContentProvider implements ITreeContentProvider { private static final Object[] NO_OBJECTS = new Object[] {}; protected PersistedMarkerHelper outlineErrorMarkerHelper; private static Object[] EMPTY = NO_OBJECTS; private GroovyBasedModelType groovyBasedModelType; protected IExtendedEditor editor; private Model model; private Object monitor = new Object(); protected Filter filter; protected ILogSupport logSupport; private boolean useCachedModel; public GroovyBasedModelType getModelType() { if (groovyBasedModelType == null) { groovyBasedModelType = createDefaultModelType(); } return groovyBasedModelType; } protected abstract GroovyBasedModelType createDefaultModelType(); public void setModelType(GroovyBasedModelType groovyBasedModelType) { this.groovyBasedModelType = groovyBasedModelType; } /** * Clears model cache, so a model rebuild is tried */ public void clearModelCache() { useCachedModel = false; if (editor != null) { getElements(editor.getDocument()); } } public AbstractGroovyBasedEditorOutlineContentProvider() { super(); } @Override public Object[] getElements(Object inputElement) { String dataAsString = null; String charset = null; if (inputElement instanceof IDocument) { if (editor == null) { return NO_OBJECTS; } if (useCachedModel && model != null) { return getRootChildren(); } useCachedModel = true; IDocument document = (IDocument) inputElement; dataAsString = document.get(); IEditorInput input = editor.getEditorInput(); if (input instanceof IFileEditorInput) { IFileEditorInput fie = (IFileEditorInput) input; IFile file = fie.getFile(); try { charset = file.getCharset(); } catch (CoreException e) { logSupport.logError("Was not able to get charset of file:" + file, e); } } } else if (inputElement instanceof String) { dataAsString = (String) inputElement; } else { /* do not set dataAsString - so FALL BACK must do the job */ } synchronized (monitor) { if (dataAsString != null) { tryTolLoad(dataAsString, charset); } if (model != null) { return getRootChildren(); } return new Object[] { "no content" }; } } private Object[] tryTolLoad(String dataAsString, String charset) { /* try to load */ try (InputStream is = new ByteArrayInputStream(dataAsString.getBytes())) { Object[] elements = null; GroovyBasedModelType groovyBasedModelType = getModelType(); elements = createElements(charset, is, groovyBasedModelType); if (elements == null) { elements = new Object[] { groovyBasedModelType + " not supported as modeltype!" }; } return elements; } catch (Exception e) { logSupport.logError("Problems on outline building", e); return null; } } protected abstract Object[] createElements(String charset, InputStream is, GroovyBasedModelType groovyBasedModelType) throws Exception; @Override public Object[] getChildren(Object parentElement) { if (parentElement instanceof Item) { Item item = (Item) parentElement; return item.getChildren(); } return EMPTY; } @Override public Object getParent(Object element) { if (element instanceof Item) { Item item = (Item) element; return item.getParent(); } return null; } @Override public boolean hasChildren(Object element) { if (element instanceof Item) { Item item = (Item) element; return item.hasChildren(); } return false; } public Item tryToFindByOffset(int offset) { synchronized (monitor) { if (model == null) { return null; } return model.getItemAt(offset); } } protected Object[] buildGroovyASTModel(String charset, InputStream is) throws Exception { GroovyASTModelBuilder builder = new GroovyASTModelBuilder(is); BuildContext context = new BuildContext(); Object[] elements = createModelAndGetRootElements(context, builder); appendError(context); return elements; } /** * Clears error markers for THIS editor */ private void clearErrorMarkers() { IFile file = resolveEditorFile(); outlineErrorMarkerHelper.removeAllMarkers(file); } protected void appendError(BuildContext context) { if (!context.hasErrors()) { return; } IFile file = resolveEditorFile(); if (file == null) { return; } try { for (Error error : context.getErrors()) { outlineErrorMarkerHelper.createErrorMarker(file, error.getMessage(), error.getLineNumber(), error.getCharStart(), error.getCharEnd()); } } catch (CoreException e) { logSupport.logError("Was not able to create error marker at file:" + file, e); } } private IFile resolveEditorFile() { if (editor == null) { return null; } IEditorInput input = editor.getEditorInput(); if (input == null) { return null; } IFile file = null; if (input instanceof IFileEditorInput) { IFileEditorInput fei = (IFileEditorInput) input; file = fei.getFile(); } return file; } protected Object[] createModelAndGetRootElements(BuildContext context, ModelBuilder builder) throws ModelBuilderException { synchronized (monitor) { Model newModel = builder.build(context); if (context == null || !context.hasErrors()) { switchToNewModel(newModel); } } return getRootChildren(); } private void switchToNewModel(Model newModel) { clearErrorMarkers(); model = newModel; } public Model getModel() { return model; } private Item[] getRootChildren() { if (model == null) { return new Item[] {}; } return model.getRoot().getChildren(); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.java.codeInsight.completion; import com.intellij.JavaTestUtil; import com.intellij.codeInsight.completion.CompletionType; import com.intellij.codeInsight.completion.LightFixtureCompletionTestCase; import com.intellij.codeInsight.lookup.LookupManager; import com.intellij.codeInsight.template.impl.TemplateManagerImpl; import com.intellij.codeInsight.template.impl.TemplateState; import com.intellij.lang.java.JavaDocumentationProvider; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.roots.LanguageLevelProjectExtension; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.PsiClass; import com.intellij.testFramework.TestDataPath; import java.io.IOException; @TestDataPath("$CONTENT_ROOT/testData") public class ClassNameCompletionTest extends LightFixtureCompletionTestCase { @Override protected void setUp() throws Exception { super.setUp(); LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.JDK_1_7); } @Override protected String getBasePath() { return JavaTestUtil.getRelativeJavaTestDataPath() + "/codeInsight/completion/className/"; } public void testImportAfterNew() { createClass("package pack; public class AAClass {}"); createClass("package pack; public class WithInnerAClass{\n" + " public static class Inner{}\n" + "}"); String path = "/importAfterNew"; configureByFile(path + "/before1.java"); checkResultByFile(path + "/after1.java"); configureByFile(path + "/before2.java"); selectItem(myItems[0]); checkResultByFile(path + "/after2.java"); } public void testDocAfterNew() { createClass("public class Time { Time() {} Time(long time) {} }"); String path = "/docAfterNew"; configureByFile(path + "/before1.java"); assertTrue(myItems != null && myItems.length >= 1); String doc = new JavaDocumentationProvider().generateDoc( (PsiClass)myItems[0].getObject(), myFixture.getFile().findElementAt(myFixture.getEditor().getCaretModel().getOffset()) ); assertEquals(doc, "<html>Candidates for new <b>Time</b>() are:<br>&nbsp;&nbsp;<a href=\"psi_element://Time#Time()\">Time()</a><br>&nbsp;" + "&nbsp;<a href=\"psi_element://Time#Time(long)\">Time(long time)</a><br></html>"); } public void testTypeParametersTemplate() { createClass("package pack; public interface Foo<T> {void foo(T t};"); String path = "/template"; TemplateManagerImpl.setTemplateTesting(getProject(), myFixture.getTestRootDisposable()); configureByFile(path + "/before1.java"); selectItem(myItems[0]); TemplateState state = TemplateManagerImpl.getTemplateState(myFixture.getEditor()); type("String"); assert state != null; state.gotoEnd(false); checkResultByFile(path + "/after1.java"); configureByFile(path + "/before2.java"); selectItem(myItems[0]); assert TemplateManagerImpl.getTemplateState(myFixture.getEditor()) == null; checkResultByFile(path +"/after2.java"); configureByFile(path + "/before3.java"); selectItem(myItems[0]); assert TemplateManagerImpl.getTemplateState(myFixture.getEditor()) == null; checkResultByFile(path +"/after3.java"); } private void createClass(String text) { myFixture.addClass(text); } public void testAfterNewThrowable1() { addClassesForAfterNewThrowable(); String path = "/afterNewThrowable"; configureByFile(path + "/before1.java"); myFixture.type('\n'); checkResultByFile(path + "/after1.java"); } private void addClassesForAfterNewThrowable() { createClass("public class OurException extends Throwable{}"); createClass("public class OurNotException {\n" + " public static class InnerException extends Throwable{}\n" + " public static class InnerNonException{}\n" + "}"); } public void testAfterNewThrowable2() { addClassesForAfterNewThrowable(); String path = "/afterNewThrowable"; configureByFile(path + "/before2.java"); myFixture.type('\n'); checkResultByFile(path + "/after2.java"); } public void testExcessParensAfterNew() { doTest(); } public void testReuseParensAfterNew() { doTest(); } public void testBracesAfterNew() { doTest(); } public void testInPlainTextFile() { configureByFile(getTestName(false) + ".txt"); checkResultByFile(getTestName(false) + "_after.txt"); } public void testInPropertiesFile() { myFixture.configureByText("a.properties", "abc = StrinBui<caret>"); complete(); myFixture.checkResult("abc = java.lang.StringBuilder<caret>"); } public void testInsideForwardReferencingTypeBound() { myFixture.configureByText("a.java", "class F<T extends Zo<caret>o, Zoo> {}"); complete(); myFixture.assertPreferredCompletionItems(0, "Zoo"); } public void testDoubleStringBuffer() { createClass("package java.lang; public class StringBuffer {}"); doTest(); assertNull(myItems); } public void testReplaceReferenceExpressionWithTypeElement() { createClass("package foo.bar; public class ABCDEF {}"); doTest(); } public void testCamelHumpPrefix() { String path = "/java/"; configureByFile(path + getTestName(false) + ".java"); complete(); assertEquals(2, myItems.length); } private void doTest() { String path = "/java/"; configureByFile(path + getTestName(false) + ".java"); checkResultByFile(path + getTestName(false) + "_after.java"); } public void testNameCompletionJava() { String path = "/nameCompletion/java"; configureByFile(path + "/test1-source.java"); performAction(); checkResultByFile(path + "/test1-result.java"); configureByFile(path + "/test2-source.java"); performAction(); checkResultByFile(path + "/test2-result.java"); } public void testImplementsFiltering1() { final String path = "/nameCompletion/java"; configureByFile(path + "/test4-source.java"); performAction(); checkResultByFile(path + "/test4-result.java"); } public void testImplementsFiltering2() { final String path = "/nameCompletion/java"; configureByFile(path + "/test3-source.java"); performAction(); checkResultByFile(path + "/test3-result.java"); configureByFile(path + "/implements2-source.java"); performAction(); checkResultByFile(path + "/implements2-result.java"); configureByFile(path + "/implements3-source.java"); performAction(); checkResultByFile(path + "/implements3-result.java"); } public void testAnnotationFiltering() { createClass("@interface MyObjectType {}"); final String path = "/nameCompletion/java"; configureByFile(path + "/test8-source.java"); performAction(); checkResultByFile(path + "/test8-result.java"); cleanupVfs(); configureByFile(path + "/test9-source.java"); performAction(); checkResultByFile(path + "/test9-result.java"); cleanupVfs(); configureByFile(path + "/test9_2-source.java"); performAction(); checkResultByFile(path + "/test9_2-result.java"); cleanupVfs(); configureByFile(path + "/test9_3-source.java"); performAction(); checkResultByFile(path + "/test9_3-result.java"); cleanupVfs(); configureByFile(path + "/test11-source.java"); performAction(); checkResultByFile(path + "/test11-result.java"); cleanupVfs(); configureByFile(path + "/test10-source.java"); performAction(); checkResultByFile(path + "/test10-result.java"); cleanupVfs(); configureByFile(path + "/test12-source.java"); performAction(); checkResultByFile(path + "/test12-result.java"); cleanupVfs(); configureByFile(path + "/test13-source.java"); performAction(); checkResultByFile(path + "/test13-result.java"); } private void cleanupVfs() { WriteCommandAction.runWriteCommandAction(null, new Runnable() { public void run() { FileDocumentManager.getInstance().saveAllDocuments(); for (VirtualFile file : myFixture.getTempDirFixture().getFile("").getChildren()) { try { file.delete(this); } catch (IOException e) { throw new RuntimeException(e); } } } }); } public void testInMethodCall() { final String path = "/nameCompletion/java"; configureByFile(path + "/methodCall-source.java"); performAction(); checkResultByFile(path + "/methodCall-result.java"); } public void testInMethodCallQualifier() { final String path = "/nameCompletion/java"; configureByFile(path + "/methodCall1-source.java"); performAction(); checkResultByFile(path + "/methodCall1-result.java"); } public void testInVariableDeclarationType() { final String path = "/nameCompletion/java"; configureByFile(path + "/varType-source.java"); performAction(); checkResultByFile(path + "/varType-result.java"); } public void testExtraSpace() { doJavaTest('\n'); } public void testAnnotation() { doJavaTest('\n'); } public void testInStaticImport() { doJavaTest('\n'); } public void testInCommentWithPackagePrefix() { doJavaTest('\n'); } public void testNestedAnonymousTab() { doJavaTest('\t');} private void doJavaTest(char toType) { final String path = "/nameCompletion/java"; myFixture.configureByFile(path + "/" + getTestName(false) + "-source.java"); performAction(toType); checkResultByFile(path + "/" + getTestName(false) + "-result.java"); } @Override protected void complete() { myItems = myFixture.complete(CompletionType.BASIC, 2); } private void performAction() { performAction('\n'); } private void performAction(char toType) { complete(); if (LookupManager.getActiveLookup(myFixture.getEditor()) != null) { myFixture.type(toType); } } }
/** * Copyright 2010 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.mapreduce; import java.io.UnsupportedEncodingException; import java.util.List; import java.util.ArrayList; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser; import org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser.BadTsvLineException; import org.apache.hadoop.hbase.mapreduce.ImportTsv.TsvParser.ParsedLine; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.client.Result; import org.junit.Test; import com.google.common.base.Joiner; import com.google.common.base.Splitter; import com.google.common.collect.Iterables; import static org.junit.Assert.*; public class TestImportTsv { @Test public void testTsvParserSpecParsing() { TsvParser parser; parser = new TsvParser("HBASE_ROW_KEY", "\t"); assertNull(parser.getFamily(0)); assertNull(parser.getQualifier(0)); assertEquals(0, parser.getRowKeyColumnIndex()); parser = new TsvParser("HBASE_ROW_KEY,col1:scol1", "\t"); assertNull(parser.getFamily(0)); assertNull(parser.getQualifier(0)); assertBytesEquals(Bytes.toBytes("col1"), parser.getFamily(1)); assertBytesEquals(Bytes.toBytes("scol1"), parser.getQualifier(1)); assertEquals(0, parser.getRowKeyColumnIndex()); parser = new TsvParser("HBASE_ROW_KEY,col1:scol1,col1:scol2", "\t"); assertNull(parser.getFamily(0)); assertNull(parser.getQualifier(0)); assertBytesEquals(Bytes.toBytes("col1"), parser.getFamily(1)); assertBytesEquals(Bytes.toBytes("scol1"), parser.getQualifier(1)); assertBytesEquals(Bytes.toBytes("col1"), parser.getFamily(2)); assertBytesEquals(Bytes.toBytes("scol2"), parser.getQualifier(2)); assertEquals(0, parser.getRowKeyColumnIndex()); } @Test public void testTsvParser() throws BadTsvLineException { TsvParser parser = new TsvParser("col_a,col_b:qual,HBASE_ROW_KEY,col_d", "\t"); assertBytesEquals(Bytes.toBytes("col_a"), parser.getFamily(0)); assertBytesEquals(HConstants.EMPTY_BYTE_ARRAY, parser.getQualifier(0)); assertBytesEquals(Bytes.toBytes("col_b"), parser.getFamily(1)); assertBytesEquals(Bytes.toBytes("qual"), parser.getQualifier(1)); assertNull(parser.getFamily(2)); assertNull(parser.getQualifier(2)); assertEquals(2, parser.getRowKeyColumnIndex()); byte[] line = Bytes.toBytes("val_a\tval_b\tval_c\tval_d"); ParsedLine parsed = parser.parse(line, line.length); checkParsing(parsed, Splitter.on("\t").split(Bytes.toString(line))); } private void checkParsing(ParsedLine parsed, Iterable<String> expected) { ArrayList<String> parsedCols = new ArrayList<String>(); for (int i = 0; i < parsed.getColumnCount(); i++) { parsedCols.add(Bytes.toString( parsed.getLineBytes(), parsed.getColumnOffset(i), parsed.getColumnLength(i))); } if (!Iterables.elementsEqual(parsedCols, expected)) { fail("Expected: " + Joiner.on(",").join(expected) + "\n" + "Got:" + Joiner.on(",").join(parsedCols)); } } private void assertBytesEquals(byte[] a, byte[] b) { assertEquals(Bytes.toStringBinary(a), Bytes.toStringBinary(b)); } /** * Test cases that throw BadTsvLineException */ @Test(expected=BadTsvLineException.class) public void testTsvParserBadTsvLineExcessiveColumns() throws BadTsvLineException { TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a", "\t"); byte[] line = Bytes.toBytes("val_a\tval_b\tval_c"); ParsedLine parsed = parser.parse(line, line.length); } @Test(expected=BadTsvLineException.class) public void testTsvParserBadTsvLineZeroColumn() throws BadTsvLineException { TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a", "\t"); byte[] line = Bytes.toBytes(""); ParsedLine parsed = parser.parse(line, line.length); } @Test(expected=BadTsvLineException.class) public void testTsvParserBadTsvLineOnlyKey() throws BadTsvLineException { TsvParser parser = new TsvParser("HBASE_ROW_KEY,col_a", "\t"); byte[] line = Bytes.toBytes("key_only"); ParsedLine parsed = parser.parse(line, line.length); } @Test(expected=BadTsvLineException.class) public void testTsvParserBadTsvLineNoRowKey() throws BadTsvLineException { TsvParser parser = new TsvParser("col_a,HBASE_ROW_KEY", "\t"); byte[] line = Bytes.toBytes("only_cola_data_and_no_row_key"); ParsedLine parsed = parser.parse(line, line.length); } @Test public void testMROnTable() throws Exception { String TABLE_NAME = "TestTable"; String FAMILY = "FAM"; String INPUT_FILE = "InputFile.esv"; // Prepare the arguments required for the test. String[] args = new String[] { "-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B", "-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", TABLE_NAME, INPUT_FILE }; doMROnTableTest(INPUT_FILE, FAMILY, TABLE_NAME, args, 1); } @Test public void testMROnTableWithCustomMapper() throws Exception { String TABLE_NAME = "TestTable"; String FAMILY = "FAM"; String INPUT_FILE = "InputFile2.esv"; // Prepare the arguments required for the test. String[] args = new String[] { "-D" + ImportTsv.MAPPER_CONF_KEY + "=org.apache.hadoop.hbase.mapreduce.TsvImporterCustomTestMapper", TABLE_NAME, INPUT_FILE }; doMROnTableTest(INPUT_FILE, FAMILY, TABLE_NAME, args, 3); } private void doMROnTableTest(String inputFile, String family, String tableName, String[] args, int valueMultiplier) throws Exception { // Cluster HBaseTestingUtility htu1 = new HBaseTestingUtility(); MiniHBaseCluster cluster = htu1.startMiniCluster(); GenericOptionsParser opts = new GenericOptionsParser(cluster.getConfiguration(), args); Configuration conf = opts.getConfiguration(); args = opts.getRemainingArgs(); try { FileSystem fs = FileSystem.get(conf); FSDataOutputStream op = fs.create(new Path(inputFile), true); String line = "KEY\u001bVALUE1\u001bVALUE2\n"; op.write(line.getBytes(HConstants.UTF8_ENCODING)); op.close(); final byte[] FAM = Bytes.toBytes(family); final byte[] TAB = Bytes.toBytes(tableName); final byte[] QA = Bytes.toBytes("A"); final byte[] QB = Bytes.toBytes("B"); HTableDescriptor desc = new HTableDescriptor(TAB); desc.addFamily(new HColumnDescriptor(FAM)); new HBaseAdmin(conf).createTable(desc); Job job = ImportTsv.createSubmittableJob(conf, args); job.waitForCompletion(false); assertTrue(job.isSuccessful()); HTable table = new HTable(new Configuration(conf), TAB); boolean verified = false; long pause = conf.getLong("hbase.client.pause", 5 * 1000); int numRetries = conf.getInt("hbase.client.retries.number", 5); for (int i = 0; i < numRetries; i++) { try { Scan scan = new Scan(); // Scan entire family. scan.addFamily(FAM); ResultScanner resScanner = table.getScanner(scan); for (Result res : resScanner) { assertTrue(res.size() == 2); List<KeyValue> kvs = res.list(); assertEquals(toU8Str(kvs.get(0).getRow()), toU8Str(Bytes.toBytes("KEY"))); assertEquals(toU8Str(kvs.get(1).getRow()), toU8Str(Bytes.toBytes("KEY"))); assertEquals(toU8Str(kvs.get(0).getValue()), toU8Str(Bytes.toBytes("VALUE" + valueMultiplier))); assertEquals(toU8Str(kvs.get(1).getValue()), toU8Str(Bytes.toBytes("VALUE" + 2*valueMultiplier))); // Only one result set is expected, so let it loop. } verified = true; break; } catch (NullPointerException e) { // If here, a cell was empty. Presume its because updates came in // after the scanner had been opened. Wait a while and retry. } try { Thread.sleep(pause); } catch (InterruptedException e) { // continue } } assertTrue(verified); } finally { cluster.shutdown(); } } public static String toU8Str(byte[] bytes) throws UnsupportedEncodingException { return new String(bytes, HConstants.UTF8_ENCODING); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search; import org.apache.lucene.search.Explanation; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit.NestedIdentity; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.fetch.subphase.highlight.HighlightFieldTests; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.RandomObjects; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class SearchHitTests extends ESTestCase { private static Set<String> META_FIELDS = Sets.newHashSet("_uid", "_all", "_parent", "_routing", "_size", "_timestamp", "_ttl"); public static SearchHit createTestItem(boolean withOptionalInnerHits) { int internalId = randomInt(); String uid = randomAsciiOfLength(10); Text type = new Text(randomAsciiOfLengthBetween(5, 10)); NestedIdentity nestedIdentity = null; if (randomBoolean()) { nestedIdentity = NestedIdentityTests.createTestItem(randomIntBetween(0, 2)); } Map<String, SearchHitField> fields = new HashMap<>(); if (randomBoolean()) { int size = randomIntBetween(0, 10); for (int i = 0; i < size; i++) { Tuple<List<Object>, List<Object>> values = RandomObjects.randomStoredFieldValues(random(), XContentType.JSON); if (randomBoolean()) { String metaField = randomFrom(META_FIELDS); fields.put(metaField, new SearchHitField(metaField, values.v1())); } else { String fieldName = randomAsciiOfLengthBetween(5, 10); fields.put(fieldName, new SearchHitField(fieldName, values.v1())); } } } SearchHit hit = new SearchHit(internalId, uid, type, nestedIdentity, fields); if (frequently()) { if (rarely()) { hit.score(Float.NaN); } else { hit.score(randomFloat()); } } if (frequently()) { hit.sourceRef(RandomObjects.randomSource(random())); } if (randomBoolean()) { hit.version(randomLong()); } if (randomBoolean()) { hit.sortValues(SearchSortValuesTests.createTestItem()); } if (randomBoolean()) { int size = randomIntBetween(0, 5); Map<String, HighlightField> highlightFields = new HashMap<>(size); for (int i = 0; i < size; i++) { highlightFields.put(randomAsciiOfLength(5), HighlightFieldTests.createTestItem()); } hit.highlightFields(highlightFields); } if (randomBoolean()) { int size = randomIntBetween(0, 5); String[] matchedQueries = new String[size]; for (int i = 0; i < size; i++) { matchedQueries[i] = randomAsciiOfLength(5); } hit.matchedQueries(matchedQueries); } if (randomBoolean()) { hit.explanation(createExplanation(randomIntBetween(0, 5))); } if (withOptionalInnerHits) { int innerHitsSize = randomIntBetween(0, 3); Map<String, SearchHits> innerHits = new HashMap<>(innerHitsSize); for (int i = 0; i < innerHitsSize; i++) { innerHits.put(randomAsciiOfLength(5), SearchHitsTests.createTestItem()); } hit.setInnerHits(innerHits); } if (randomBoolean()) { hit.shard(new SearchShardTarget(randomAsciiOfLengthBetween(5, 10), new ShardId(new Index(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10)), randomInt()))); } return hit; } public void testFromXContent() throws IOException { SearchHit searchHit = createTestItem(true); boolean humanReadable = randomBoolean(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference originalBytes = toXContent(searchHit, xContentType, humanReadable); SearchHit parsed; try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { parser.nextToken(); // jump to first START_OBJECT parsed = SearchHit.fromXContent(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); } public void testToXContent() throws IOException { SearchHit searchHit = new SearchHit(1, "id1", new Text("type"), Collections.emptyMap()); searchHit.score(1.5f); XContentBuilder builder = JsonXContent.contentBuilder(); searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":1.5}", builder.string()); } public void testSerializeShardTarget() throws Exception { SearchShardTarget target = new SearchShardTarget("_node_id", new Index("_index", "_na_"), 0); Map<String, SearchHits> innerHits = new HashMap<>(); SearchHit innerHit1 = new SearchHit(0, "_id", new Text("_type"), null); innerHit1.shard(target); SearchHit innerInnerHit2 = new SearchHit(0, "_id", new Text("_type"), null); innerInnerHit2.shard(target); innerHits.put("1", new SearchHits(new SearchHit[]{innerInnerHit2}, 1, 1f)); innerHit1.setInnerHits(innerHits); SearchHit innerHit2 = new SearchHit(0, "_id", new Text("_type"), null); innerHit2.shard(target); SearchHit innerHit3 = new SearchHit(0, "_id", new Text("_type"), null); innerHit3.shard(target); innerHits = new HashMap<>(); SearchHit hit1 = new SearchHit(0, "_id", new Text("_type"), null); innerHits.put("1", new SearchHits(new SearchHit[]{innerHit1, innerHit2}, 1, 1f)); innerHits.put("2", new SearchHits(new SearchHit[]{innerHit3}, 1, 1f)); hit1.shard(target); hit1.setInnerHits(innerHits); SearchHit hit2 = new SearchHit(0, "_id", new Text("_type"), null); hit2.shard(target); SearchHits hits = new SearchHits(new SearchHit[]{hit1, hit2}, 2, 1f); BytesStreamOutput output = new BytesStreamOutput(); hits.writeTo(output); InputStream input = output.bytes().streamInput(); SearchHits results = SearchHits.readSearchHits(new InputStreamStreamInput(input)); assertThat(results.getAt(0).getShard(), equalTo(target)); assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).getShard(), notNullValue()); assertThat(results.getAt(0).getInnerHits().get("2").getAt(0).getShard(), notNullValue()); assertThat(results.getAt(1).getShard(), equalTo(target)); } public void testNullSource() throws Exception { SearchHit searchHit = new SearchHit(0, "_id", new Text("_type"), null); assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceRef(), nullValue()); assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceAsString(), nullValue()); assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceRef(), nullValue()); assertThat(searchHit.getSourceAsString(), nullValue()); } public void testHasSource() { SearchHit searchHit = new SearchHit(randomInt()); assertFalse(searchHit.hasSource()); searchHit.sourceRef(new BytesArray("{}")); assertTrue(searchHit.hasSource()); } private static Explanation createExplanation(int depth) { String description = randomAsciiOfLengthBetween(5, 20); float value = randomFloat(); List<Explanation> details = new ArrayList<>(); if (depth > 0) { int numberOfDetails = randomIntBetween(1, 3); for (int i = 0; i < numberOfDetails; i++) { details.add(createExplanation(depth - 1)); } } return Explanation.match(value, description, details); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.s3control.model.transform; import static com.amazonaws.util.StringUtils.UTF8; import java.io.StringWriter; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.s3control.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.StringInputStream; import com.amazonaws.util.StringUtils; import com.amazonaws.util.IdempotentUtils; import com.amazonaws.util.XMLWriter; /** * CreateJobRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateJobRequestMarshaller implements Marshaller<Request<CreateJobRequest>, CreateJobRequest> { public Request<CreateJobRequest> marshall(CreateJobRequest createJobRequest) { if (createJobRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } Request<CreateJobRequest> request = new DefaultRequest<CreateJobRequest>(createJobRequest, "AWSS3Control"); request.setHttpMethod(HttpMethodName.POST); if (createJobRequest.getAccountId() != null) { request.addHeader("x-amz-account-id", StringUtils.fromString(createJobRequest.getAccountId())); } String uriResourcePath = "/v20180820/jobs"; request.setResourcePath(uriResourcePath); try { StringWriter stringWriter = new StringWriter(); XMLWriter xmlWriter = new XMLWriter(stringWriter, "http://awss3control.amazonaws.com/doc/2018-08-20/"); xmlWriter.startElement("CreateJobRequest"); if (createJobRequest != null) { if (createJobRequest.getConfirmationRequired() != null) { xmlWriter.startElement("ConfirmationRequired").value(createJobRequest.getConfirmationRequired()).endElement(); } { JobOperation operation = createJobRequest.getOperation(); if (operation != null) { xmlWriter.startElement("Operation"); { LambdaInvokeOperation lambdaInvoke = operation.getLambdaInvoke(); if (lambdaInvoke != null) { xmlWriter.startElement("LambdaInvoke"); if (lambdaInvoke.getFunctionArn() != null) { xmlWriter.startElement("FunctionArn").value(lambdaInvoke.getFunctionArn()).endElement(); } xmlWriter.endElement(); } } { S3CopyObjectOperation s3PutObjectCopy = operation.getS3PutObjectCopy(); if (s3PutObjectCopy != null) { xmlWriter.startElement("S3PutObjectCopy"); if (s3PutObjectCopy.getTargetResource() != null) { xmlWriter.startElement("TargetResource").value(s3PutObjectCopy.getTargetResource()).endElement(); } if (s3PutObjectCopy.getCannedAccessControlList() != null) { xmlWriter.startElement("CannedAccessControlList").value(s3PutObjectCopy.getCannedAccessControlList()).endElement(); } java.util.List<S3Grant> s3CopyObjectOperationAccessControlGrantsList = s3PutObjectCopy.getAccessControlGrants(); if (s3CopyObjectOperationAccessControlGrantsList != null) { xmlWriter.startElement("AccessControlGrants"); for (S3Grant s3CopyObjectOperationAccessControlGrantsListValue : s3CopyObjectOperationAccessControlGrantsList) { xmlWriter.startElement("member"); { S3Grantee grantee = s3CopyObjectOperationAccessControlGrantsListValue.getGrantee(); if (grantee != null) { xmlWriter.startElement("Grantee"); if (grantee.getTypeIdentifier() != null) { xmlWriter.startElement("TypeIdentifier").value(grantee.getTypeIdentifier()).endElement(); } if (grantee.getIdentifier() != null) { xmlWriter.startElement("Identifier").value(grantee.getIdentifier()).endElement(); } if (grantee.getDisplayName() != null) { xmlWriter.startElement("DisplayName").value(grantee.getDisplayName()).endElement(); } xmlWriter.endElement(); } } if (s3CopyObjectOperationAccessControlGrantsListValue.getPermission() != null) { xmlWriter.startElement("Permission").value(s3CopyObjectOperationAccessControlGrantsListValue.getPermission()) .endElement(); } xmlWriter.endElement(); } xmlWriter.endElement(); } if (s3PutObjectCopy.getMetadataDirective() != null) { xmlWriter.startElement("MetadataDirective").value(s3PutObjectCopy.getMetadataDirective()).endElement(); } if (s3PutObjectCopy.getModifiedSinceConstraint() != null) { xmlWriter.startElement("ModifiedSinceConstraint").value(s3PutObjectCopy.getModifiedSinceConstraint()).endElement(); } { S3ObjectMetadata newObjectMetadata = s3PutObjectCopy.getNewObjectMetadata(); if (newObjectMetadata != null) { xmlWriter.startElement("NewObjectMetadata"); if (newObjectMetadata.getCacheControl() != null) { xmlWriter.startElement("CacheControl").value(newObjectMetadata.getCacheControl()).endElement(); } if (newObjectMetadata.getContentDisposition() != null) { xmlWriter.startElement("ContentDisposition").value(newObjectMetadata.getContentDisposition()).endElement(); } if (newObjectMetadata.getContentEncoding() != null) { xmlWriter.startElement("ContentEncoding").value(newObjectMetadata.getContentEncoding()).endElement(); } if (newObjectMetadata.getContentLanguage() != null) { xmlWriter.startElement("ContentLanguage").value(newObjectMetadata.getContentLanguage()).endElement(); } java.util.HashMap<String, String> s3ObjectMetadataUserMetadataMap = (java.util.HashMap<String, String>) newObjectMetadata .getUserMetadata(); if (s3ObjectMetadataUserMetadataMap != null) { xmlWriter.startElement("UserMetadata"); for (Map.Entry<String, String> s3ObjectMetadataUserMetadataMapValue : s3ObjectMetadataUserMetadataMap.entrySet()) { if (s3ObjectMetadataUserMetadataMapValue == null) { continue; } xmlWriter.startElement("entry"); xmlWriter.startElement("key"); xmlWriter.value(s3ObjectMetadataUserMetadataMapValue.getKey()); xmlWriter.endElement(); xmlWriter.startElement("value"); xmlWriter.value(s3ObjectMetadataUserMetadataMapValue.getValue()); xmlWriter.endElement(); xmlWriter.endElement(); } xmlWriter.endElement(); } if (newObjectMetadata.getContentLength() != null) { xmlWriter.startElement("ContentLength").value(newObjectMetadata.getContentLength()).endElement(); } if (newObjectMetadata.getContentMD5() != null) { xmlWriter.startElement("ContentMD5").value(newObjectMetadata.getContentMD5()).endElement(); } if (newObjectMetadata.getContentType() != null) { xmlWriter.startElement("ContentType").value(newObjectMetadata.getContentType()).endElement(); } if (newObjectMetadata.getHttpExpiresDate() != null) { xmlWriter.startElement("HttpExpiresDate").value(newObjectMetadata.getHttpExpiresDate()).endElement(); } if (newObjectMetadata.getRequesterCharged() != null) { xmlWriter.startElement("RequesterCharged").value(newObjectMetadata.getRequesterCharged()).endElement(); } if (newObjectMetadata.getSSEAlgorithm() != null) { xmlWriter.startElement("SSEAlgorithm").value(newObjectMetadata.getSSEAlgorithm()).endElement(); } xmlWriter.endElement(); } } java.util.List<S3Tag> s3CopyObjectOperationNewObjectTaggingList = s3PutObjectCopy.getNewObjectTagging(); if (s3CopyObjectOperationNewObjectTaggingList != null) { xmlWriter.startElement("NewObjectTagging"); for (S3Tag s3CopyObjectOperationNewObjectTaggingListValue : s3CopyObjectOperationNewObjectTaggingList) { xmlWriter.startElement("member"); if (s3CopyObjectOperationNewObjectTaggingListValue.getKey() != null) { xmlWriter.startElement("Key").value(s3CopyObjectOperationNewObjectTaggingListValue.getKey()).endElement(); } if (s3CopyObjectOperationNewObjectTaggingListValue.getValue() != null) { xmlWriter.startElement("Value").value(s3CopyObjectOperationNewObjectTaggingListValue.getValue()).endElement(); } xmlWriter.endElement(); } xmlWriter.endElement(); } if (s3PutObjectCopy.getRedirectLocation() != null) { xmlWriter.startElement("RedirectLocation").value(s3PutObjectCopy.getRedirectLocation()).endElement(); } if (s3PutObjectCopy.getRequesterPays() != null) { xmlWriter.startElement("RequesterPays").value(s3PutObjectCopy.getRequesterPays()).endElement(); } if (s3PutObjectCopy.getStorageClass() != null) { xmlWriter.startElement("StorageClass").value(s3PutObjectCopy.getStorageClass()).endElement(); } if (s3PutObjectCopy.getUnModifiedSinceConstraint() != null) { xmlWriter.startElement("UnModifiedSinceConstraint").value(s3PutObjectCopy.getUnModifiedSinceConstraint()).endElement(); } if (s3PutObjectCopy.getSSEAwsKmsKeyId() != null) { xmlWriter.startElement("SSEAwsKmsKeyId").value(s3PutObjectCopy.getSSEAwsKmsKeyId()).endElement(); } if (s3PutObjectCopy.getTargetKeyPrefix() != null) { xmlWriter.startElement("TargetKeyPrefix").value(s3PutObjectCopy.getTargetKeyPrefix()).endElement(); } if (s3PutObjectCopy.getObjectLockLegalHoldStatus() != null) { xmlWriter.startElement("ObjectLockLegalHoldStatus").value(s3PutObjectCopy.getObjectLockLegalHoldStatus()).endElement(); } if (s3PutObjectCopy.getObjectLockMode() != null) { xmlWriter.startElement("ObjectLockMode").value(s3PutObjectCopy.getObjectLockMode()).endElement(); } if (s3PutObjectCopy.getObjectLockRetainUntilDate() != null) { xmlWriter.startElement("ObjectLockRetainUntilDate").value(s3PutObjectCopy.getObjectLockRetainUntilDate()).endElement(); } if (s3PutObjectCopy.getBucketKeyEnabled() != null) { xmlWriter.startElement("BucketKeyEnabled").value(s3PutObjectCopy.getBucketKeyEnabled()).endElement(); } if (s3PutObjectCopy.getChecksumAlgorithm() != null) { xmlWriter.startElement("ChecksumAlgorithm").value(s3PutObjectCopy.getChecksumAlgorithm()).endElement(); } xmlWriter.endElement(); } } { S3SetObjectAclOperation s3PutObjectAcl = operation.getS3PutObjectAcl(); if (s3PutObjectAcl != null) { xmlWriter.startElement("S3PutObjectAcl"); { S3AccessControlPolicy accessControlPolicy = s3PutObjectAcl.getAccessControlPolicy(); if (accessControlPolicy != null) { xmlWriter.startElement("AccessControlPolicy"); { S3AccessControlList accessControlList = accessControlPolicy.getAccessControlList(); if (accessControlList != null) { xmlWriter.startElement("AccessControlList"); { S3ObjectOwner owner = accessControlList.getOwner(); if (owner != null) { xmlWriter.startElement("Owner"); if (owner.getID() != null) { xmlWriter.startElement("ID").value(owner.getID()).endElement(); } if (owner.getDisplayName() != null) { xmlWriter.startElement("DisplayName").value(owner.getDisplayName()).endElement(); } xmlWriter.endElement(); } } java.util.List<S3Grant> s3AccessControlListGrantsList = accessControlList.getGrants(); if (s3AccessControlListGrantsList != null) { xmlWriter.startElement("Grants"); for (S3Grant s3AccessControlListGrantsListValue : s3AccessControlListGrantsList) { xmlWriter.startElement("member"); { S3Grantee grantee = s3AccessControlListGrantsListValue.getGrantee(); if (grantee != null) { xmlWriter.startElement("Grantee"); if (grantee.getTypeIdentifier() != null) { xmlWriter.startElement("TypeIdentifier").value(grantee.getTypeIdentifier()).endElement(); } if (grantee.getIdentifier() != null) { xmlWriter.startElement("Identifier").value(grantee.getIdentifier()).endElement(); } if (grantee.getDisplayName() != null) { xmlWriter.startElement("DisplayName").value(grantee.getDisplayName()).endElement(); } xmlWriter.endElement(); } } if (s3AccessControlListGrantsListValue.getPermission() != null) { xmlWriter.startElement("Permission").value(s3AccessControlListGrantsListValue.getPermission()) .endElement(); } xmlWriter.endElement(); } xmlWriter.endElement(); } xmlWriter.endElement(); } } if (accessControlPolicy.getCannedAccessControlList() != null) { xmlWriter.startElement("CannedAccessControlList").value(accessControlPolicy.getCannedAccessControlList()) .endElement(); } xmlWriter.endElement(); } } xmlWriter.endElement(); } } { S3SetObjectTaggingOperation s3PutObjectTagging = operation.getS3PutObjectTagging(); if (s3PutObjectTagging != null) { xmlWriter.startElement("S3PutObjectTagging"); java.util.List<S3Tag> s3SetObjectTaggingOperationTagSetList = s3PutObjectTagging.getTagSet(); if (s3SetObjectTaggingOperationTagSetList != null) { xmlWriter.startElement("TagSet"); for (S3Tag s3SetObjectTaggingOperationTagSetListValue : s3SetObjectTaggingOperationTagSetList) { xmlWriter.startElement("member"); if (s3SetObjectTaggingOperationTagSetListValue.getKey() != null) { xmlWriter.startElement("Key").value(s3SetObjectTaggingOperationTagSetListValue.getKey()).endElement(); } if (s3SetObjectTaggingOperationTagSetListValue.getValue() != null) { xmlWriter.startElement("Value").value(s3SetObjectTaggingOperationTagSetListValue.getValue()).endElement(); } xmlWriter.endElement(); } xmlWriter.endElement(); } xmlWriter.endElement(); } } { S3DeleteObjectTaggingOperation s3DeleteObjectTagging = operation.getS3DeleteObjectTagging(); if (s3DeleteObjectTagging != null) { xmlWriter.startElement("S3DeleteObjectTagging"); xmlWriter.endElement(); } } { S3InitiateRestoreObjectOperation s3InitiateRestoreObject = operation.getS3InitiateRestoreObject(); if (s3InitiateRestoreObject != null) { xmlWriter.startElement("S3InitiateRestoreObject"); if (s3InitiateRestoreObject.getExpirationInDays() != null) { xmlWriter.startElement("ExpirationInDays").value(s3InitiateRestoreObject.getExpirationInDays()).endElement(); } if (s3InitiateRestoreObject.getGlacierJobTier() != null) { xmlWriter.startElement("GlacierJobTier").value(s3InitiateRestoreObject.getGlacierJobTier()).endElement(); } xmlWriter.endElement(); } } { S3SetObjectLegalHoldOperation s3PutObjectLegalHold = operation.getS3PutObjectLegalHold(); if (s3PutObjectLegalHold != null) { xmlWriter.startElement("S3PutObjectLegalHold"); { S3ObjectLockLegalHold legalHold = s3PutObjectLegalHold.getLegalHold(); if (legalHold != null) { xmlWriter.startElement("LegalHold"); if (legalHold.getStatus() != null) { xmlWriter.startElement("Status").value(legalHold.getStatus()).endElement(); } xmlWriter.endElement(); } } xmlWriter.endElement(); } } { S3SetObjectRetentionOperation s3PutObjectRetention = operation.getS3PutObjectRetention(); if (s3PutObjectRetention != null) { xmlWriter.startElement("S3PutObjectRetention"); if (s3PutObjectRetention.getBypassGovernanceRetention() != null) { xmlWriter.startElement("BypassGovernanceRetention").value(s3PutObjectRetention.getBypassGovernanceRetention()).endElement(); } { S3Retention retention = s3PutObjectRetention.getRetention(); if (retention != null) { xmlWriter.startElement("Retention"); if (retention.getRetainUntilDate() != null) { xmlWriter.startElement("RetainUntilDate").value(retention.getRetainUntilDate()).endElement(); } if (retention.getMode() != null) { xmlWriter.startElement("Mode").value(retention.getMode()).endElement(); } xmlWriter.endElement(); } } xmlWriter.endElement(); } } { S3ReplicateObjectOperation s3ReplicateObject = operation.getS3ReplicateObject(); if (s3ReplicateObject != null) { xmlWriter.startElement("S3ReplicateObject"); xmlWriter.endElement(); } } xmlWriter.endElement(); } } { JobReport report = createJobRequest.getReport(); if (report != null) { xmlWriter.startElement("Report"); if (report.getBucket() != null) { xmlWriter.startElement("Bucket").value(report.getBucket()).endElement(); } if (report.getFormat() != null) { xmlWriter.startElement("Format").value(report.getFormat()).endElement(); } if (report.getEnabled() != null) { xmlWriter.startElement("Enabled").value(report.getEnabled()).endElement(); } if (report.getPrefix() != null) { xmlWriter.startElement("Prefix").value(report.getPrefix()).endElement(); } if (report.getReportScope() != null) { xmlWriter.startElement("ReportScope").value(report.getReportScope()).endElement(); } xmlWriter.endElement(); } } xmlWriter.startElement("ClientRequestToken").value(IdempotentUtils.resolveString(createJobRequest.getClientRequestToken())).endElement(); { JobManifest manifest = createJobRequest.getManifest(); if (manifest != null) { xmlWriter.startElement("Manifest"); { JobManifestSpec spec = manifest.getSpec(); if (spec != null) { xmlWriter.startElement("Spec"); if (spec.getFormat() != null) { xmlWriter.startElement("Format").value(spec.getFormat()).endElement(); } java.util.List<String> jobManifestSpecFieldsList = spec.getFields(); if (jobManifestSpecFieldsList != null) { xmlWriter.startElement("Fields"); for (String jobManifestSpecFieldsListValue : jobManifestSpecFieldsList) { xmlWriter.startElement("member"); xmlWriter.value(jobManifestSpecFieldsListValue); xmlWriter.endElement(); } xmlWriter.endElement(); } xmlWriter.endElement(); } } { JobManifestLocation location = manifest.getLocation(); if (location != null) { xmlWriter.startElement("Location"); if (location.getObjectArn() != null) { xmlWriter.startElement("ObjectArn").value(location.getObjectArn()).endElement(); } if (location.getObjectVersionId() != null) { xmlWriter.startElement("ObjectVersionId").value(location.getObjectVersionId()).endElement(); } if (location.getETag() != null) { xmlWriter.startElement("ETag").value(location.getETag()).endElement(); } xmlWriter.endElement(); } } xmlWriter.endElement(); } } if (createJobRequest.getDescription() != null) { xmlWriter.startElement("Description").value(createJobRequest.getDescription()).endElement(); } if (createJobRequest.getPriority() != null) { xmlWriter.startElement("Priority").value(createJobRequest.getPriority()).endElement(); } if (createJobRequest.getRoleArn() != null) { xmlWriter.startElement("RoleArn").value(createJobRequest.getRoleArn()).endElement(); } java.util.List<S3Tag> createJobRequestTagsList = createJobRequest.getTags(); if (createJobRequestTagsList != null) { xmlWriter.startElement("Tags"); for (S3Tag createJobRequestTagsListValue : createJobRequestTagsList) { xmlWriter.startElement("member"); if (createJobRequestTagsListValue.getKey() != null) { xmlWriter.startElement("Key").value(createJobRequestTagsListValue.getKey()).endElement(); } if (createJobRequestTagsListValue.getValue() != null) { xmlWriter.startElement("Value").value(createJobRequestTagsListValue.getValue()).endElement(); } xmlWriter.endElement(); } xmlWriter.endElement(); } { JobManifestGenerator manifestGenerator = createJobRequest.getManifestGenerator(); if (manifestGenerator != null) { xmlWriter.startElement("ManifestGenerator"); { S3JobManifestGenerator s3JobManifestGenerator = manifestGenerator.getS3JobManifestGenerator(); if (s3JobManifestGenerator != null) { xmlWriter.startElement("S3JobManifestGenerator"); if (s3JobManifestGenerator.getExpectedBucketOwner() != null) { xmlWriter.startElement("ExpectedBucketOwner").value(s3JobManifestGenerator.getExpectedBucketOwner()).endElement(); } if (s3JobManifestGenerator.getSourceBucket() != null) { xmlWriter.startElement("SourceBucket").value(s3JobManifestGenerator.getSourceBucket()).endElement(); } { S3ManifestOutputLocation manifestOutputLocation = s3JobManifestGenerator.getManifestOutputLocation(); if (manifestOutputLocation != null) { xmlWriter.startElement("ManifestOutputLocation"); if (manifestOutputLocation.getExpectedManifestBucketOwner() != null) { xmlWriter.startElement("ExpectedManifestBucketOwner") .value(manifestOutputLocation.getExpectedManifestBucketOwner()).endElement(); } if (manifestOutputLocation.getBucket() != null) { xmlWriter.startElement("Bucket").value(manifestOutputLocation.getBucket()).endElement(); } if (manifestOutputLocation.getManifestPrefix() != null) { xmlWriter.startElement("ManifestPrefix").value(manifestOutputLocation.getManifestPrefix()).endElement(); } { GeneratedManifestEncryption manifestEncryption = manifestOutputLocation.getManifestEncryption(); if (manifestEncryption != null) { xmlWriter.startElement("ManifestEncryption"); { SSES3Encryption sSES3 = manifestEncryption.getSSES3(); if (sSES3 != null) { xmlWriter.startElement("SSE-S3"); xmlWriter.endElement(); } } { SSEKMSEncryption sSEKMS = manifestEncryption.getSSEKMS(); if (sSEKMS != null) { xmlWriter.startElement("SSE-KMS"); if (sSEKMS.getKeyId() != null) { xmlWriter.startElement("KeyId").value(sSEKMS.getKeyId()).endElement(); } xmlWriter.endElement(); } } xmlWriter.endElement(); } } if (manifestOutputLocation.getManifestFormat() != null) { xmlWriter.startElement("ManifestFormat").value(manifestOutputLocation.getManifestFormat()).endElement(); } xmlWriter.endElement(); } } { JobManifestGeneratorFilter filter = s3JobManifestGenerator.getFilter(); if (filter != null) { xmlWriter.startElement("Filter"); if (filter.getEligibleForReplication() != null) { xmlWriter.startElement("EligibleForReplication").value(filter.getEligibleForReplication()).endElement(); } if (filter.getCreatedAfter() != null) { xmlWriter.startElement("CreatedAfter").value(filter.getCreatedAfter()).endElement(); } if (filter.getCreatedBefore() != null) { xmlWriter.startElement("CreatedBefore").value(filter.getCreatedBefore()).endElement(); } java.util.List<String> jobManifestGeneratorFilterObjectReplicationStatusesList = filter.getObjectReplicationStatuses(); if (jobManifestGeneratorFilterObjectReplicationStatusesList != null) { xmlWriter.startElement("ObjectReplicationStatuses"); for (String jobManifestGeneratorFilterObjectReplicationStatusesListValue : jobManifestGeneratorFilterObjectReplicationStatusesList) { xmlWriter.startElement("member"); xmlWriter.value(jobManifestGeneratorFilterObjectReplicationStatusesListValue); xmlWriter.endElement(); } xmlWriter.endElement(); } xmlWriter.endElement(); } } if (s3JobManifestGenerator.getEnableManifestOutput() != null) { xmlWriter.startElement("EnableManifestOutput").value(s3JobManifestGenerator.getEnableManifestOutput()).endElement(); } xmlWriter.endElement(); } } xmlWriter.endElement(); } } } xmlWriter.endElement(); request.setContent(new StringInputStream(stringWriter.getBuffer().toString())); request.addHeader("Content-Length", Integer.toString(stringWriter.getBuffer().toString().getBytes(UTF8).length)); if (!request.getHeaders().containsKey("Content-Type")) { request.addHeader("Content-Type", "application/xml"); } } catch (Throwable t) { throw new SdkClientException("Unable to marshall request to XML: " + t.getMessage(), t); } return request; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.common.options; import static com.google.common.truth.Truth.assertThat; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import com.google.common.testing.EqualsTester; import java.net.MalformedURLException; import java.net.URL; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Test for {@link Options}. */ @RunWith(JUnit4.class) public class OptionsTest { private static final String[] NO_ARGS = {}; public static class HttpOptions extends OptionsBase { @Option(name = "host", defaultValue = "www.google.com", help = "The URL at which the server will be running.") public String host; @Option(name = "port", abbrev = 'p', defaultValue = "80", help = "The port at which the server will be running.") public int port; @Option(name = "debug", abbrev = 'd', defaultValue = "false", help = "debug") public boolean isDebugging; @Option(name = "tristate", abbrev = 't', defaultValue = "auto", help = "tri-state option returning auto by default") public TriState triState; @Option(name = "special", defaultValue = "null", expansion = { "--host=special.google.com", "--port=8080"}) public Void special; } @Test public void paragraphFill() throws Exception { // TODO(bazel-team): don't include trailing space after last word in line. String input = "The quick brown fox jumps over the lazy dog."; assertEquals(" The quick \n brown fox \n jumps over \n the lazy \n" + " dog.", OptionsUsage.paragraphFill(input, 2, 13)); assertEquals(" The quick brown \n fox jumps over \n the lazy dog.", OptionsUsage.paragraphFill(input, 3, 19)); String input2 = "The quick brown fox jumps\nAnother paragraph."; assertEquals(" The quick brown fox \n jumps\n Another paragraph.", OptionsUsage.paragraphFill(input2, 2, 23)); } @Test public void getsDefaults() throws OptionsParsingException { Options<HttpOptions> options = Options.parse(HttpOptions.class, NO_ARGS); String[] remainingArgs = options.getRemainingArgs(); HttpOptions webFlags = options.getOptions(); assertEquals("www.google.com", webFlags.host); assertEquals(80, webFlags.port); assertEquals(false, webFlags.isDebugging); assertEquals(TriState.AUTO, webFlags.triState); assertEquals(0, remainingArgs.length); } @Test public void objectMethods() throws OptionsParsingException { String[] args = { "--host", "foo", "--port", "80" }; HttpOptions left = Options.parse(HttpOptions.class, args).getOptions(); HttpOptions likeLeft = Options.parse(HttpOptions.class, args).getOptions(); String [] rightArgs = {"--host", "other", "--port", "90" }; HttpOptions right = Options.parse(HttpOptions.class, rightArgs).getOptions(); String toString = left.toString(); // Don't rely on Set.toString iteration order: assertTrue(toString.startsWith( "com.google.devtools.common.options.OptionsTest" + "$HttpOptions{")); assertTrue(toString.contains("host=foo")); assertTrue(toString.contains("port=80")); assertTrue(toString.endsWith("}")); new EqualsTester().addEqualityGroup(left).testEquals(); assertTrue(left.toString().equals(likeLeft.toString())); assertTrue(left.equals(likeLeft)); assertTrue(likeLeft.equals(left)); assertFalse(left.equals(right)); assertFalse(right.equals(left)); assertFalse(left.equals(null)); assertFalse(likeLeft.equals(null)); assertEquals(likeLeft.hashCode(), likeLeft.hashCode()); assertEquals(left.hashCode(), likeLeft.hashCode()); // Strictly speaking this is not required for hashCode to be correct, // but a good hashCode should be different at least for some values. So, // we're making sure that at least this particular pair of inputs yields // different values. assertFalse(left.hashCode() == right.hashCode()); } @Test public void equals() throws OptionsParsingException { String[] args = { "--host", "foo", "--port", "80" }; HttpOptions options1 = Options.parse(HttpOptions.class, args).getOptions(); String[] args2 = { "-p", "80", "--host", "foo" }; HttpOptions options2 = Options.parse(HttpOptions.class, args2).getOptions(); assertEquals("order/abbreviations shouldn't matter", options1, options2); assertEquals("explicitly setting a default shouldn't matter", Options.parse(HttpOptions.class, "--port", "80").getOptions(), Options.parse(HttpOptions.class).getOptions()); assertThat(Options.parse(HttpOptions.class, "--port", "3").getOptions()) .isNotEqualTo(Options.parse(HttpOptions.class).getOptions()); } @Test public void getsFlagsProvidedInArguments() throws OptionsParsingException { String[] args = {"--host", "google.com", "-p", "8080", // short form "--debug"}; Options<HttpOptions> options = Options.parse(HttpOptions.class, args); String[] remainingArgs = options.getRemainingArgs(); HttpOptions webFlags = options.getOptions(); assertEquals("google.com", webFlags.host); assertEquals(8080, webFlags.port); assertEquals(true, webFlags.isDebugging); assertEquals(0, remainingArgs.length); } @Test public void getsFlagsProvidedWithEquals() throws OptionsParsingException { String[] args = {"--host=google.com", "--port=8080", "--debug"}; Options<HttpOptions> options = Options.parse(HttpOptions.class, args); String[] remainingArgs = options.getRemainingArgs(); HttpOptions webFlags = options.getOptions(); assertEquals("google.com", webFlags.host); assertEquals(8080, webFlags.port); assertEquals(true, webFlags.isDebugging); assertEquals(0, remainingArgs.length); } @Test public void booleanNo() throws OptionsParsingException { Options<HttpOptions> options = Options.parse(HttpOptions.class, new String[]{"--nodebug", "--notristate"}); HttpOptions webFlags = options.getOptions(); assertEquals(false, webFlags.isDebugging); assertEquals(TriState.NO, webFlags.triState); } @Test public void booleanNoUnderscore() throws OptionsParsingException { Options<HttpOptions> options = Options.parse(HttpOptions.class, new String[]{"--no_debug", "--no_tristate"}); HttpOptions webFlags = options.getOptions(); assertEquals(false, webFlags.isDebugging); assertEquals(TriState.NO, webFlags.triState); } @Test public void booleanAbbrevMinus() throws OptionsParsingException { Options<HttpOptions> options = Options.parse(HttpOptions.class, new String[]{"-d-", "-t-"}); HttpOptions webFlags = options.getOptions(); assertEquals(false, webFlags.isDebugging); assertEquals(TriState.NO, webFlags.triState); } @Test public void boolean0() throws OptionsParsingException { Options<HttpOptions> options = Options.parse(HttpOptions.class, new String[]{"--debug=0", "--tristate=0"}); HttpOptions webFlags = options.getOptions(); assertEquals(false, webFlags.isDebugging); assertEquals(TriState.NO, webFlags.triState); } @Test public void boolean1() throws OptionsParsingException { Options<HttpOptions> options = Options.parse(HttpOptions.class, new String[]{"--debug=1", "--tristate=1"}); HttpOptions webFlags = options.getOptions(); assertEquals(true, webFlags.isDebugging); assertEquals(TriState.YES, webFlags.triState); } @Test public void retainsStuffThatsNotOptions() throws OptionsParsingException { String[] args = {"these", "aint", "options"}; Options<HttpOptions> options = Options.parse(HttpOptions.class, args); String[] remainingArgs = options.getRemainingArgs(); assertEquals(asList(args), asList(remainingArgs)); } @Test public void retainsStuffThatsNotComplexOptions() throws OptionsParsingException { String[] args = {"--host", "google.com", "notta", "--port=8080", "option", "--debug=true"}; String[] notoptions = {"notta", "option" }; Options<HttpOptions> options = Options.parse(HttpOptions.class, args); String[] remainingArgs = options.getRemainingArgs(); assertEquals(asList(notoptions), asList(remainingArgs)); } @Test public void wontParseUnknownOptions() { String [] args = { "--unknown", "--other=23", "--options" }; try { Options.parse(HttpOptions.class, args); fail(); } catch (OptionsParsingException e) { assertEquals("Unrecognized option: --unknown", e.getMessage()); } } @Test public void requiresOptionValue() { String[] args = {"--port"}; try { Options.parse(HttpOptions.class, args); fail(); } catch (OptionsParsingException e) { assertEquals("Expected value after --port", e.getMessage()); } } @Test public void handlesDuplicateOptions_full() throws Exception { String[] args = {"--port=80", "--port", "81"}; Options<HttpOptions> options = Options.parse(HttpOptions.class, args); HttpOptions webFlags = options.getOptions(); assertEquals(81, webFlags.port); } @Test public void handlesDuplicateOptions_abbrev() throws Exception { String[] args = {"--port=80", "-p", "81"}; Options<HttpOptions> options = Options.parse(HttpOptions.class, args); HttpOptions webFlags = options.getOptions(); assertEquals(81, webFlags.port); } @Test public void duplicateOptionsOkWithSameValues() throws Exception { // These would throw OptionsParsingException if they failed. Options.parse(HttpOptions.class,"--port=80", "--port", "80"); Options.parse(HttpOptions.class, "--port=80", "-p", "80"); } @Test public void isPickyAboutBooleanValues() { try { Options.parse(HttpOptions.class, new String[]{"--debug=not_a_boolean"}); fail(); } catch (OptionsParsingException e) { assertEquals("While parsing option --debug=not_a_boolean: " + "\'not_a_boolean\' is not a boolean", e.getMessage()); } } @Test public void isPickyAboutBooleanNos() { try { Options.parse(HttpOptions.class, new String[]{"--nodebug=1"}); fail(); } catch (OptionsParsingException e) { assertEquals("Unexpected value after boolean option: --nodebug=1", e.getMessage()); } } @Test public void usageForBuiltinTypes() { String usage = Options.getUsage(HttpOptions.class); // We can't rely on the option ordering. assertTrue(usage.contains( " --[no]debug [-d] (a boolean; default: \"false\")\n" + " debug")); assertTrue(usage.contains( " --host (a string; default: \"www.google.com\")\n" + " The URL at which the server will be running.")); assertTrue(usage.contains( " --port [-p] (an integer; default: \"80\")\n" + " The port at which the server will be running.")); assertTrue(usage.contains( " --special\n" + " Expands to: --host=special.google.com --port=8080")); assertTrue(usage.contains( " --[no]tristate [-t] (a tri-state (auto, yes, no); default: \"auto\")\n" + " tri-state option returning auto by default")); } public static class NullTestOptions extends OptionsBase { @Option(name = "host", defaultValue = "null", help = "The URL at which the server will be running.") public String host; @Option(name = "none", defaultValue = "null", expansion = {"--host=www.google.com"}, help = "An expanded option.") public Void none; } @Test public void usageForNullDefault() { String usage = Options.getUsage(NullTestOptions.class); assertTrue(usage.contains( " --host (a string; default: see description)\n" + " The URL at which the server will be running.")); assertTrue(usage.contains( " --none\n" + " An expanded option.\n" + " Expands to: --host=www.google.com")); } public static class MyURLConverter implements Converter<URL> { @Override public URL convert(String input) throws OptionsParsingException { try { return new URL(input); } catch (MalformedURLException e) { throw new OptionsParsingException("Could not convert '" + input + "': " + e.getMessage()); } } @Override public String getTypeDescription() { return "a url"; } } public static class UsesCustomConverter extends OptionsBase { @Option(name = "url", defaultValue = "http://www.google.com/", converter = MyURLConverter.class) public URL url; } @Test public void customConverter() throws Exception { Options<UsesCustomConverter> options = Options.parse(UsesCustomConverter.class, new String[0]); URL expected = new URL("http://www.google.com/"); assertEquals(expected, options.getOptions().url); } @Test public void customConverterThrowsException() throws Exception { String[] args = {"--url=a_malformed:url"}; try { Options.parse(UsesCustomConverter.class, args); fail(); } catch (OptionsParsingException e) { assertEquals("While parsing option --url=a_malformed:url: " + "Could not convert 'a_malformed:url': " + "no protocol: a_malformed:url", e.getMessage()); } } @Test public void usageWithCustomConverter() { assertEquals( " --url (a url; default: \"http://www.google.com/\")\n", Options.getUsage(UsesCustomConverter.class)); } @Test public void unknownBooleanOption() { try { Options.parse(HttpOptions.class, new String[]{"--no-debug"}); fail(); } catch (OptionsParsingException e) { assertEquals("Unrecognized option: --no-debug", e.getMessage()); } } public static class J extends OptionsBase { @Option(name = "j", defaultValue = "null") public String string; } @Test public void nullDefaultForReferenceTypeOption() throws Exception { J options = Options.parse(J.class, NO_ARGS).getOptions(); assertNull(options.string); } public static class K extends OptionsBase { @Option(name = "1", defaultValue = "null") public int int1; } @Test public void nullDefaultForPrimitiveTypeOption() throws Exception { // defaultValue() = "null" is not treated specially for primitive types, so // we get an NumberFormatException from the converter (not a // ClassCastException from casting null to int), just as we would for any // other non-integer-literal string default. try { Options.parse(K.class, NO_ARGS).getOptions(); fail(); } catch (IllegalStateException e) { assertEquals("OptionsParsingException while retrieving default for " + "int1: 'null' is not an int", e.getMessage()); } } @Test public void nullIsntInterpretedSpeciallyExceptAsADefaultValue() throws Exception { HttpOptions options = Options.parse(HttpOptions.class, new String[] { "--host", "null" }).getOptions(); assertEquals("null", options.host); } @Test public void nonDecimalRadicesForIntegerOptions() throws Exception { Options<HttpOptions> options = Options.parse(HttpOptions.class, new String[] { "--port", "0x51"}); assertEquals(81, options.getOptions().port); } @Test public void expansionOptionSimple() throws Exception { Options<HttpOptions> options = Options.parse(HttpOptions.class, new String[] {"--special"}); assertEquals("special.google.com", options.getOptions().host); assertEquals(8080, options.getOptions().port); } @Test public void expansionOptionOverride() throws Exception { Options<HttpOptions> options = Options.parse(HttpOptions.class, new String[] {"--port=90", "--special", "--host=foo"}); assertEquals("foo", options.getOptions().host); assertEquals(8080, options.getOptions().port); } @Test public void expansionOptionEquals() throws Exception { Options<HttpOptions> options1 = Options.parse(HttpOptions.class, new String[] { "--host=special.google.com", "--port=8080"}); Options<HttpOptions> options2 = Options.parse(HttpOptions.class, new String[] { "--special" }); assertEquals(options1.getOptions(), options2.getOptions()); } }
/* * The MIT License * * Copyright 2013-2015 "Osric Wilkinson" <osric@fluffypeople.com>. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.fluffypeople.managesieve.xml; import com.fluffypeople.managesieve.ParseException; import java.io.IOException; import java.io.Reader; import java.io.StreamTokenizer; import java.io.StringReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Convert a Sieve script to its XML representation. * * @author "Osric Wilkinson" &lt;osric@fluffypeople.com&gt; */ public class SieveToXML { private static final Logger log = LoggerFactory.getLogger(SieveToXML.class); private StreamTokenizer in; private static final String[] CONTROL_NAMES = {"if", "elsif", "else", "stop", "require"}; public XML convert(final String script) throws IOException, ParseException { return convert(new StringReader(script)); } public XML convert(final Reader script) throws ParseException, IOException { XML xml = new XML(); in = new StreamTokenizer(script); setupTokenizer(); xml.start("sieve", "xmlns", "urn:ietf:params:xml:ns:sieve"); commands(xml); xml.end(); return xml; } private void commands(final XML xml) throws ParseException, IOException { log.debug("commands start"); while (command(xml)) { } log.debug("commands end"); } private boolean command(final XML xml) throws IOException, ParseException { log.debug("command"); int token = in.nextToken(); // First token should be an identifer if (token == StreamTokenizer.TT_WORD) { String name = in.sval; // Stricly, identifiers shouldn't start with a number // TODO: Check that String tag = nameIsControl(name) ? "control" : "action"; xml.start(tag, "name", name); arguments(xml); token = in.nextToken(); if (token == '{') { in.pushBack(); block(xml); } else if (token == ';') { // end of command } else { raiseError("{ or ;", token, in.lineno()); } xml.end(); return true; } else { in.pushBack(); return false; } } private void arguments(XML xml) throws ParseException, IOException { log.debug("arguments"); while (argument(xml)) { } int token = in.nextToken(); in.pushBack(); if (token == '(') { test_list(xml); } else if (token == StreamTokenizer.TT_WORD) { test(xml); } } private boolean argument(XML xml) throws IOException, ParseException { log.debug("argument"); int token = in.nextToken(); in.pushBack(); if (token == '[') { string_list(xml); return true; } else if (token == '"') { string(xml); return true; } else if (token == StreamTokenizer.TT_WORD && in.sval.equals("text")) { in.pushBack(); string(xml); return true; } else if (token == StreamTokenizer.TT_NUMBER) { number(xml); return true; } else if (token == ':') { tag(xml); return true; } else { return false; } } private void string(XML xml) throws ParseException, IOException { log.debug("string"); int token = in.nextToken(); if (token == '"') { xml.add("str", in.sval); } else if (token == StreamTokenizer.TT_WORD) { if (in.sval.equals("text")) { token = in.nextToken(); if (token == ':') { // multi line string. Set tokenizer to ignore everything // but line endings and # comments. in.resetSyntax(); in.ordinaryChars(0, 255); in.commentChar('#'); in.whitespaceChars('\r', '\r'); in.whitespaceChars('\n', '\n'); in.eolIsSignificant(true); // Read to end of line we're on token = in.nextToken(); if (token != StreamTokenizer.TT_EOL) { raiseError("EOL", token, in.lineno()); } // OK< start of multiline string. Comments are no longer // significant in.ordinaryChar('#'); StringBuilder rawString = new StringBuilder(); while (true) { StringBuilder line = new StringBuilder(); do { token = in.nextToken(); if (token == StreamTokenizer.TT_WORD) { // Unicode character line.append(in.sval); } else if (token == '\r' || token == '\n') { // skip it } else { try { line.append(Character.toChars(token)); } catch (java.lang.IllegalArgumentException ex) { log.error("{} is not a valid char ",token); throw ex; } } } while (token != StreamTokenizer.TT_EOL); System.out.println("line: " + line.toString()); if (line.length() == 1 && line.codePointAt(0) == '.') { // Found last line break; } else if (line.length() > 1 && line.codePointAt(0) == '.' && line.codePointAt(1) == '.') { // Dot has been doubled, so delete the extra line.deleteCharAt(0); } rawString.append(line).append("\r\n"); } xml.add("str", rawString.toString()); setupTokenizer(); } else { raiseError(":", token, in.lineno()); } } else { raiseError("'text'", token, in.lineno()); } } else { raiseError("\"", token, in.lineno()); } } private void string_list(XML xml) throws ParseException, IOException { log.debug("string_list"); int token = in.nextToken(); if (token == '[') { xml.start("list"); do { string(xml); token = in.nextToken(); } while (token == ','); if (token != ']') { raiseError("]", token, in.lineno()); } xml.end(); } else { raiseError("[", token, in.lineno()); } } private void tag(XML xml) throws ParseException, IOException { log.debug("tag"); int token = in.nextToken(); if (token == ':') { token = in.nextToken(); if (token == StreamTokenizer.TT_WORD) { xml.add("tag", in.sval); } else { raiseError("WORD", token, in.lineno()); } } else { raiseError(":", token, in.lineno()); } } private void number(XML xml) throws ParseException, IOException { log.debug("number"); int token = in.nextToken(); if (token == StreamTokenizer.TT_NUMBER) { Long raw = (long) in.nval; token = in.nextToken(); if (token == StreamTokenizer.TT_WORD) { String mult = in.sval; if (mult.equalsIgnoreCase("K")) { raw *= 1024; } else if (mult.equalsIgnoreCase("M")) { raw *= 1024 * 1024; } else if (mult.equalsIgnoreCase("G")) { raw *= 1024 * 1024 * 1024; } } else { in.pushBack(); } xml.add("num", Long.toString(raw, 10)); } else { raiseError("NUM", token, in.lineno()); } } private void block(XML xml) throws IOException, ParseException { log.debug("block"); int token = in.nextToken(); if (token == '{') { commands(xml); token = in.nextToken(); if (token != '}') { raiseError("}", token, in.lineno()); } } else { raiseError("{", token, in.lineno()); } log.debug("block end"); } private void test_list(XML xml) throws IOException, ParseException { log.debug("test_list"); int token = in.nextToken(); if (token == '(') { do { test(xml); token = in.nextToken(); } while (token == ','); if (token != ')') { raiseError(")", token, in.lineno()); } } else { raiseError("(", token, in.lineno()); } } private void test(XML xml) throws ParseException, IOException { log.debug("test"); int token = in.nextToken(); if (token == StreamTokenizer.TT_WORD) { xml.start("test", "name", in.sval); arguments(xml); xml.end(); } } private void raiseError(final String expecting, final int token, final int line) throws ParseException { StringBuilder message = new StringBuilder(); message.append("Expecting "); message.append(expecting); message.append(" but got "); message.append(tokenToString(token)); message.append(" at line "); message.append(Integer.toString(line, 10)); throw new ParseException(message.toString()); } private void setupTokenizer() { in.resetSyntax(); in.whitespaceChars(0x0D, 0x0D); // CR in.whitespaceChars(0x0A, 0x0A); // LF in.whitespaceChars(0x09, 0x09); // HTAB in.whitespaceChars(0x20, 0x20); // SP in.wordChars(0x41, 0x5A); // A-Z in.wordChars(0x61, 0x7A); // a-z in.wordChars(0x30, 0x39); // 0-9 in.wordChars(0x5F, 0x5F); // _ in.quoteChar(0x22); // " in.commentChar('#'); in.slashStarComments(true); in.eolIsSignificant(false); in.parseNumbers(); } private boolean nameIsControl(final String name) { for (String c : CONTROL_NAMES) { if (c.equals(name)) { return true; } } return false; } private static String tokenToString(final int c) { if (c > 0) { return new String(Character.toChars(c)); } else { switch (c) { case StreamTokenizer.TT_EOF: return "EOF"; case StreamTokenizer.TT_NUMBER: return "NUMBER"; case StreamTokenizer.TT_EOL: return "EOL"; case StreamTokenizer.TT_WORD: return "WORD"; default: return "UNKNOWN"; } } } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.firefox; import com.google.common.annotations.VisibleForTesting; import com.google.common.io.Resources; import org.openqa.selenium.Beta; import org.openqa.selenium.WebDriverException; import org.openqa.selenium.firefox.internal.ClasspathExtension; import org.openqa.selenium.firefox.internal.Extension; import org.openqa.selenium.firefox.internal.FileExtension; import org.openqa.selenium.io.FileHandler; import org.openqa.selenium.io.TemporaryFilesystem; import org.openqa.selenium.io.Zip; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringReader; import java.net.URL; import java.util.HashMap; import java.util.Map; public class FirefoxProfile { public static final String PORT_PREFERENCE = "webdriver_firefox_port"; public static final String ALLOWED_HOSTS_PREFERENCE = "webdriver_firefox_allowed_hosts"; private static final String defaultPrefs = "/org/openqa/selenium/firefox/webdriver_prefs.json"; private Preferences additionalPrefs; private Map<String, Extension> extensions = new HashMap<>(); private boolean loadNoFocusLib; private boolean acceptUntrustedCerts; private boolean untrustedCertIssuer; private File model; private static final String ACCEPT_UNTRUSTED_CERTS_PREF = "webdriver_accept_untrusted_certs"; private static final String ASSUME_UNTRUSTED_ISSUER_PREF = "webdriver_assume_untrusted_issuer"; public FirefoxProfile() { this(null); } /** * Constructs a firefox profile from an existing profile directory. * <p> * Users who need this functionality should consider using a named profile. * * @param profileDir The profile directory to use as a model. */ public FirefoxProfile(File profileDir) { this(null, profileDir); } @VisibleForTesting @Beta protected FirefoxProfile(Reader defaultsReader, File profileDir) { if (defaultsReader == null) { defaultsReader = onlyOverrideThisIfYouKnowWhatYouAreDoing(); } additionalPrefs = new Preferences(defaultsReader); model = profileDir; verifyModel(model); File prefsInModel = new File(model, "user.js"); if (prefsInModel.exists()) { StringReader reader = new StringReader("{\"frozen\": {}, \"mutable\": {}}"); Preferences existingPrefs = new Preferences(reader, prefsInModel); acceptUntrustedCerts = getBooleanPreference(existingPrefs, ACCEPT_UNTRUSTED_CERTS_PREF, true); untrustedCertIssuer = getBooleanPreference(existingPrefs, ASSUME_UNTRUSTED_ISSUER_PREF, true); existingPrefs.addTo(additionalPrefs); } else { acceptUntrustedCerts = true; untrustedCertIssuer = true; } // This is not entirely correct but this is not stored in the profile // so for now will always be set to false. loadNoFocusLib = false; try { defaultsReader.close(); } catch (IOException e) { throw new WebDriverException(e); } } /** * <strong>Internal method. This is liable to change at a moment's notice.</strong> * * @return InputStreamReader of the default firefox profile preferences */ @Beta protected Reader onlyOverrideThisIfYouKnowWhatYouAreDoing() { URL resource = Resources.getResource(FirefoxProfile.class, defaultPrefs); try { return new InputStreamReader(resource.openStream()); } catch (IOException e) { throw new WebDriverException(e); } } private boolean getBooleanPreference(Preferences prefs, String key, boolean defaultValue) { Object value = prefs.getPreference(key); if (value == null) { return defaultValue; } if (value instanceof Boolean) { return (Boolean) value; } throw new WebDriverException("Expected boolean value is not a boolean. It is: " + value); } public String getStringPreference(String key, String defaultValue) { Object preference = additionalPrefs.getPreference(key); if(preference != null && preference instanceof String) { return (String) preference; } return defaultValue; } public int getIntegerPreference(String key, int defaultValue) { Object preference = additionalPrefs.getPreference(key); if(preference != null && preference instanceof Integer) { return (Integer) preference; } return defaultValue; } public boolean getBooleanPreference(String key, boolean defaultValue) { Object preference = additionalPrefs.getPreference(key); if(preference != null && preference instanceof Boolean) { return (Boolean) preference; } return defaultValue; } private void verifyModel(File model) { if (model == null) { return; } if (!model.exists()) { throw new UnableToCreateProfileException( "Given model profile directory does not exist: " + model.getPath()); } if (!model.isDirectory()) { throw new UnableToCreateProfileException( "Given model profile directory is not a directory: " + model.getAbsolutePath()); } } public boolean containsWebDriverExtension() { return extensions.containsKey("webdriver"); } public void addExtension(Class<?> loadResourcesUsing, String loadFrom) { // Is loadFrom a file? File file = new File(loadFrom); if (file.exists()) { addExtension(file); return; } addExtension(loadFrom, new ClasspathExtension(loadResourcesUsing, loadFrom)); } /** * Attempt to add an extension to install into this instance. * * @param extensionToInstall File pointing to the extension */ public void addExtension(File extensionToInstall) { addExtension(extensionToInstall.getName(), new FileExtension(extensionToInstall)); } public void addExtension(String key, Extension extension) { String name = deriveExtensionName(key); extensions.put(name, extension); } private String deriveExtensionName(String originalName) { String[] pieces = originalName.replace('\\', '/').split("/"); String name = pieces[pieces.length - 1]; name = name.replaceAll("\\..*?$", ""); return name; } /** * Set a preference for this particular profile. The value will be properly quoted before use. * Note that if a value looks as if it is a quoted string (that is, starts with a quote character * and ends with one too) an IllegalArgumentException is thrown: Firefox fails to start properly * when some values are set to this. * * @param key The key * @param value The new value. */ public void setPreference(String key, String value) { additionalPrefs.setPreference(key, value); } /** * Set a preference for this particular profile. * * @param key The key * @param value The new value. */ public void setPreference(String key, boolean value) { additionalPrefs.setPreference(key, value); } /** * Set a preference for this particular profile. * * @param key The key * @param value The new value. */ public void setPreference(String key, int value) { additionalPrefs.setPreference(key, value); } protected Preferences getAdditionalPreferences() { return additionalPrefs; } public void updateUserPrefs(File userPrefs) { Preferences prefs = new Preferences(onlyOverrideThisIfYouKnowWhatYouAreDoing()); // Allow users to override these settings prefs.setPreference("browser.startup.homepage", "about:blank"); // The user must be able to override this setting (to 1) in order to // to change homepage on Firefox 3.0 prefs.setPreference("browser.startup.page", 0); if (userPrefs.exists()) { prefs = new Preferences(onlyOverrideThisIfYouKnowWhatYouAreDoing(), userPrefs); if (!userPrefs.delete()) { throw new WebDriverException("Cannot delete existing user preferences"); } } additionalPrefs.addTo(prefs); // Should we accept untrusted certificates or not? prefs.setPreference(ACCEPT_UNTRUSTED_CERTS_PREF, acceptUntrustedCerts); prefs.setPreference(ASSUME_UNTRUSTED_ISSUER_PREF, untrustedCertIssuer); // If the user sets the home page, we should also start up there Object homePage = prefs.getPreference("browser.startup.homepage"); if (homePage != null && homePage instanceof String) { prefs.setPreference("startup.homepage_welcome_url", ""); } if (!"about:blank".equals(prefs.getPreference("browser.startup.homepage"))) { prefs.setPreference("browser.startup.page", 1); } try (FileWriter writer = new FileWriter(userPrefs)) { prefs.writeTo(writer); } catch (IOException e) { throw new WebDriverException(e); } } protected void deleteLockFiles(File profileDir) { File macAndLinuxLockFile = new File(profileDir, ".parentlock"); File windowsLockFile = new File(profileDir, "parent.lock"); macAndLinuxLockFile.delete(); windowsLockFile.delete(); } public void deleteExtensionsCacheIfItExists(File profileDir) { File cacheFile = new File(profileDir, "extensions.cache"); if (cacheFile.exists()) { cacheFile.delete(); } } /** * Returns whether the no focus library should be loaded for Firefox profiles launched on Linux, * even if native events are disabled. * * @return Whether the no focus library should always be loaded for Firefox on Linux. */ public boolean shouldLoadNoFocusLib() { return loadNoFocusLib; } /** * Sets whether the no focus library should always be loaded on Linux. * * @param loadNoFocusLib Whether to always load the no focus library. */ public void setAlwaysLoadNoFocusLib(boolean loadNoFocusLib) { this.loadNoFocusLib = loadNoFocusLib; } /** * Sets whether Firefox should accept SSL certificates which have expired, signed by an unknown * authority or are generally untrusted. This is set to true by default. * * @param acceptUntrustedSsl Whether untrusted SSL certificates should be accepted. */ public void setAcceptUntrustedCertificates(boolean acceptUntrustedSsl) { this.acceptUntrustedCerts = acceptUntrustedSsl; } /** * By default, when accepting untrusted SSL certificates, assume that these certificates will come * from an untrusted issuer or will be self signed. Due to limitation within Firefox, it is easy * to find out if the certificate has expired or does not match the host it was served for, but * hard to find out if the issuer of the certificate is untrusted. * <p> * By default, it is assumed that the certificates were not be issued from a trusted CA. * <p> * If you are receive an "untrusted site" prompt on Firefox when using a certificate that was * issued by valid issuer, but has expired or is being served served for a different host (e.g. * production certificate served in a testing environment) set this to false. * * @param untrustedIssuer whether to assume untrusted issuer or not. */ public void setAssumeUntrustedCertificateIssuer(boolean untrustedIssuer) { this.untrustedCertIssuer = untrustedIssuer; } public void clean(File profileDir) { TemporaryFilesystem.getDefaultTmpFS().deleteTempDir(profileDir); } public String toJson() throws IOException { File file = layoutOnDisk(); try { return Zip.zip(file); } finally { clean(file); } } public static FirefoxProfile fromJson(String json) throws IOException { return new FirefoxProfile(Zip.unzipToTempDir(json, "webdriver", "duplicated")); } protected void cleanTemporaryModel() { clean(model); } /** * Call this to cause the current profile to be written to disk. The profile directory is * returned. Note that this profile directory is a temporary one and will be deleted when the JVM * exists (at the latest) * * This method should be called immediately before starting to use the profile and should only be * called once per instance of the {@link org.openqa.selenium.firefox.FirefoxDriver}. * * @return The directory containing the profile. */ public File layoutOnDisk() { try { File profileDir = TemporaryFilesystem.getDefaultTmpFS() .createTempDir("anonymous", "webdriver-profile"); File userPrefs = new File(profileDir, "user.js"); copyModel(model, profileDir); installExtensions(profileDir); deleteLockFiles(profileDir); deleteExtensionsCacheIfItExists(profileDir); updateUserPrefs(userPrefs); return profileDir; } catch (IOException e) { throw new UnableToCreateProfileException(e); } } protected void copyModel(File sourceDir, File profileDir) throws IOException { if (sourceDir == null || !sourceDir.exists()) { return; } FileHandler.copy(sourceDir, profileDir); } protected void installExtensions(File parentDir) throws IOException { File extensionsDir = new File(parentDir, "extensions"); for (Extension extension : extensions.values()) { extension.writeTo(extensionsDir); } } }
/* * Copyright (c) 1997, 2008, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.swing.plaf.basic; import java.awt.*; import java.awt.event.*; import javax.swing.*; import javax.swing.border.*; import javax.swing.plaf.*; import javax.swing.text.View; import sun.swing.SwingUtilities2; import sun.awt.AppContext; /** * RadioButtonUI implementation for BasicRadioButtonUI * * @author Jeff Dinkins */ public class BasicRadioButtonUI extends BasicToggleButtonUI { private static final Object BASIC_RADIO_BUTTON_UI_KEY = new Object(); protected Icon icon; private boolean defaults_initialized = false; private final static String propertyPrefix = "RadioButton" + "."; // ******************************** // Create PLAF // ******************************** public static ComponentUI createUI(JComponent b) { AppContext appContext = AppContext.getAppContext(); BasicRadioButtonUI radioButtonUI = (BasicRadioButtonUI) appContext.get(BASIC_RADIO_BUTTON_UI_KEY); if (radioButtonUI == null) { radioButtonUI = new BasicRadioButtonUI(); appContext.put(BASIC_RADIO_BUTTON_UI_KEY, radioButtonUI); } return radioButtonUI; } protected String getPropertyPrefix() { return propertyPrefix; } // ******************************** // Install PLAF // ******************************** protected void installDefaults(AbstractButton b){ super.installDefaults(b); if(!defaults_initialized) { icon = UIManager.getIcon(getPropertyPrefix() + "icon"); defaults_initialized = true; } } // ******************************** // Uninstall PLAF // ******************************** protected void uninstallDefaults(AbstractButton b){ super.uninstallDefaults(b); defaults_initialized = false; } public Icon getDefaultIcon() { return icon; } /* These Dimensions/Rectangles are allocated once for all * RadioButtonUI.paint() calls. Re-using rectangles * rather than allocating them in each paint call substantially * reduced the time it took paint to run. Obviously, this * method can't be re-entered. */ private static Dimension size = new Dimension(); private static Rectangle viewRect = new Rectangle(); private static Rectangle iconRect = new Rectangle(); private static Rectangle textRect = new Rectangle(); /** * paint the radio button */ public synchronized void paint(Graphics g, JComponent c) { AbstractButton b = (AbstractButton) c; ButtonModel model = b.getModel(); Font f = c.getFont(); g.setFont(f); FontMetrics fm = SwingUtilities2.getFontMetrics(c, g, f); Insets i = c.getInsets(); size = b.getSize(size); viewRect.x = i.left; viewRect.y = i.top; viewRect.width = size.width - (i.right + viewRect.x); viewRect.height = size.height - (i.bottom + viewRect.y); iconRect.x = iconRect.y = iconRect.width = iconRect.height = 0; textRect.x = textRect.y = textRect.width = textRect.height = 0; Icon altIcon = b.getIcon(); Icon selectedIcon = null; Icon disabledIcon = null; String text = SwingUtilities.layoutCompoundLabel( c, fm, b.getText(), altIcon != null ? altIcon : getDefaultIcon(), b.getVerticalAlignment(), b.getHorizontalAlignment(), b.getVerticalTextPosition(), b.getHorizontalTextPosition(), viewRect, iconRect, textRect, b.getText() == null ? 0 : b.getIconTextGap()); // fill background if(c.isOpaque()) { g.setColor(b.getBackground()); g.fillRect(0,0, size.width, size.height); } // Paint the radio button if(altIcon != null) { if(!model.isEnabled()) { if(model.isSelected()) { altIcon = b.getDisabledSelectedIcon(); } else { altIcon = b.getDisabledIcon(); } } else if(model.isPressed() && model.isArmed()) { altIcon = b.getPressedIcon(); if(altIcon == null) { // Use selected icon altIcon = b.getSelectedIcon(); } } else if(model.isSelected()) { if(b.isRolloverEnabled() && model.isRollover()) { altIcon = b.getRolloverSelectedIcon(); if (altIcon == null) { altIcon = b.getSelectedIcon(); } } else { altIcon = b.getSelectedIcon(); } } else if(b.isRolloverEnabled() && model.isRollover()) { altIcon = b.getRolloverIcon(); } if(altIcon == null) { altIcon = b.getIcon(); } altIcon.paintIcon(c, g, iconRect.x, iconRect.y); } else { getDefaultIcon().paintIcon(c, g, iconRect.x, iconRect.y); } // Draw the Text if(text != null) { View v = (View) c.getClientProperty(BasicHTML.propertyKey); if (v != null) { v.paint(g, textRect); } else { paintText(g, b, textRect, text); } if(b.hasFocus() && b.isFocusPainted() && textRect.width > 0 && textRect.height > 0 ) { paintFocus(g, textRect, size); } } } protected void paintFocus(Graphics g, Rectangle textRect, Dimension size){ } /* These Insets/Rectangles are allocated once for all * RadioButtonUI.getPreferredSize() calls. Re-using rectangles * rather than allocating them in each call substantially * reduced the time it took getPreferredSize() to run. Obviously, * this method can't be re-entered. */ private static Rectangle prefViewRect = new Rectangle(); private static Rectangle prefIconRect = new Rectangle(); private static Rectangle prefTextRect = new Rectangle(); private static Insets prefInsets = new Insets(0, 0, 0, 0); /** * The preferred size of the radio button */ public Dimension getPreferredSize(JComponent c) { if(c.getComponentCount() > 0) { return null; } AbstractButton b = (AbstractButton) c; String text = b.getText(); Icon buttonIcon = b.getIcon(); if(buttonIcon == null) { buttonIcon = getDefaultIcon(); } Font font = b.getFont(); FontMetrics fm = b.getFontMetrics(font); prefViewRect.x = prefViewRect.y = 0; prefViewRect.width = Short.MAX_VALUE; prefViewRect.height = Short.MAX_VALUE; prefIconRect.x = prefIconRect.y = prefIconRect.width = prefIconRect.height = 0; prefTextRect.x = prefTextRect.y = prefTextRect.width = prefTextRect.height = 0; SwingUtilities.layoutCompoundLabel( c, fm, text, buttonIcon, b.getVerticalAlignment(), b.getHorizontalAlignment(), b.getVerticalTextPosition(), b.getHorizontalTextPosition(), prefViewRect, prefIconRect, prefTextRect, text == null ? 0 : b.getIconTextGap()); // find the union of the icon and text rects (from Rectangle.java) int x1 = Math.min(prefIconRect.x, prefTextRect.x); int x2 = Math.max(prefIconRect.x + prefIconRect.width, prefTextRect.x + prefTextRect.width); int y1 = Math.min(prefIconRect.y, prefTextRect.y); int y2 = Math.max(prefIconRect.y + prefIconRect.height, prefTextRect.y + prefTextRect.height); int width = x2 - x1; int height = y2 - y1; prefInsets = b.getInsets(prefInsets); width += prefInsets.left + prefInsets.right; height += prefInsets.top + prefInsets.bottom; return new Dimension(width, height); } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import java.lang.management.ManagementFactory; import java.lang.management.RuntimeMXBean; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.NavigableSet; import java.util.SortedSet; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.CollectionBackedScanner; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.htrace.Trace; /** * The MemStore holds in-memory modifications to the Store. Modifications * are {@link Cell}s. When asked to flush, current memstore is moved * to snapshot and is cleared. We continue to serve edits out of new memstore * and backing snapshot until flusher reports in that the flush succeeded. At * this point we let the snapshot go. * <p> * The MemStore functions should not be called in parallel. Callers should hold * write and read locks. This is done in {@link HStore}. * </p> * * TODO: Adjust size of the memstore when we remove items because they have * been deleted. * TODO: With new KVSLS, need to make sure we update HeapSize with difference * in KV size. */ @InterfaceAudience.Private public class DefaultMemStore implements MemStore { private static final Log LOG = LogFactory.getLog(DefaultMemStore.class); static final String USEMSLAB_KEY = "hbase.hregion.memstore.mslab.enabled"; private static final boolean USEMSLAB_DEFAULT = true; static final String MSLAB_CLASS_NAME = "hbase.regionserver.mslab.class"; private Configuration conf; // MemStore. Use a CellSkipListSet rather than SkipListSet because of the // better semantics. The Map will overwrite if passed a key it already had // whereas the Set will not add new Cell if key is same though value might be // different. Value is not important -- just make sure always same // reference passed. volatile CellSkipListSet cellSet; // Snapshot of memstore. Made for flusher. volatile CellSkipListSet snapshot; final KeyValue.KVComparator comparator; // Used to track own heapSize final AtomicLong size; private volatile long snapshotSize; // Used to track when to flush volatile long timeOfOldestEdit = Long.MAX_VALUE; TimeRangeTracker timeRangeTracker; TimeRangeTracker snapshotTimeRangeTracker; volatile MemStoreLAB allocator; volatile MemStoreLAB snapshotAllocator; volatile long snapshotId; /** * Default constructor. Used for tests. */ public DefaultMemStore() { this(HBaseConfiguration.create(), KeyValue.COMPARATOR); } /** * Constructor. * @param c Comparator */ public DefaultMemStore(final Configuration conf, final KeyValue.KVComparator c) { this.conf = conf; this.comparator = c; this.cellSet = new CellSkipListSet(c); this.snapshot = new CellSkipListSet(c); timeRangeTracker = new TimeRangeTracker(); snapshotTimeRangeTracker = new TimeRangeTracker(); this.size = new AtomicLong(DEEP_OVERHEAD); this.snapshotSize = 0; if (conf.getBoolean(USEMSLAB_KEY, USEMSLAB_DEFAULT)) { String className = conf.get(MSLAB_CLASS_NAME, HeapMemStoreLAB.class.getName()); this.allocator = ReflectionUtils.instantiateWithCustomCtor(className, new Class[] { Configuration.class }, new Object[] { conf }); } else { this.allocator = null; } } void dump() { for (Cell cell: this.cellSet) { LOG.info(cell); } for (Cell cell: this.snapshot) { LOG.info(cell); } } /** * Creates a snapshot of the current memstore. * Snapshot must be cleared by call to {@link #clearSnapshot(long)} */ @Override public MemStoreSnapshot snapshot() { // If snapshot currently has entries, then flusher failed or didn't call // cleanup. Log a warning. if (!this.snapshot.isEmpty()) { LOG.warn("Snapshot called again without clearing previous. " + "Doing nothing. Another ongoing flush or did we fail last attempt?"); } else { this.snapshotId = EnvironmentEdgeManager.currentTime(); this.snapshotSize = keySize(); if (!this.cellSet.isEmpty()) { this.snapshot = this.cellSet; this.cellSet = new CellSkipListSet(this.comparator); this.snapshotTimeRangeTracker = this.timeRangeTracker; this.timeRangeTracker = new TimeRangeTracker(); // Reset heap to not include any keys this.size.set(DEEP_OVERHEAD); this.snapshotAllocator = this.allocator; // Reset allocator so we get a fresh buffer for the new memstore if (allocator != null) { String className = conf.get(MSLAB_CLASS_NAME, HeapMemStoreLAB.class.getName()); this.allocator = ReflectionUtils.instantiateWithCustomCtor(className, new Class[] { Configuration.class }, new Object[] { conf }); } timeOfOldestEdit = Long.MAX_VALUE; } } return new MemStoreSnapshot(this.snapshotId, snapshot.size(), this.snapshotSize, this.snapshotTimeRangeTracker, new CollectionBackedScanner(snapshot, this.comparator)); } /** * The passed snapshot was successfully persisted; it can be let go. * @param id Id of the snapshot to clean out. * @throws UnexpectedStateException * @see #snapshot() */ @Override public void clearSnapshot(long id) throws UnexpectedStateException { MemStoreLAB tmpAllocator = null; if (this.snapshotId != id) { throw new UnexpectedStateException("Current snapshot id is " + this.snapshotId + ",passed " + id); } // OK. Passed in snapshot is same as current snapshot. If not-empty, // create a new snapshot and let the old one go. if (!this.snapshot.isEmpty()) { this.snapshot = new CellSkipListSet(this.comparator); this.snapshotTimeRangeTracker = new TimeRangeTracker(); } this.snapshotSize = 0; this.snapshotId = -1; if (this.snapshotAllocator != null) { tmpAllocator = this.snapshotAllocator; this.snapshotAllocator = null; } if (tmpAllocator != null) { tmpAllocator.close(); } } @Override public long getFlushableSize() { return this.snapshotSize > 0 ? this.snapshotSize : keySize(); } @Override public long getSnapshotSize() { return this.snapshotSize; } /** * Write an update * @param cell * @return approximate size of the passed KV & newly added KV which maybe different than the * passed-in KV */ @Override public Pair<Long, Cell> add(Cell cell) { Cell toAdd = maybeCloneWithAllocator(cell); return new Pair<Long, Cell>(internalAdd(toAdd), toAdd); } @Override public long timeOfOldestEdit() { return timeOfOldestEdit; } private boolean addToCellSet(Cell e) { boolean b = this.cellSet.add(e); setOldestEditTimeToNow(); return b; } private boolean removeFromCellSet(Cell e) { boolean b = this.cellSet.remove(e); setOldestEditTimeToNow(); return b; } void setOldestEditTimeToNow() { if (timeOfOldestEdit == Long.MAX_VALUE) { timeOfOldestEdit = EnvironmentEdgeManager.currentTime(); } } /** * Internal version of add() that doesn't clone Cells with the * allocator, and doesn't take the lock. * * Callers should ensure they already have the read lock taken */ private long internalAdd(final Cell toAdd) { long s = heapSizeChange(toAdd, addToCellSet(toAdd)); timeRangeTracker.includeTimestamp(toAdd); this.size.addAndGet(s); return s; } private Cell maybeCloneWithAllocator(Cell cell) { if (allocator == null) { return cell; } int len = KeyValueUtil.length(cell); ByteRange alloc = allocator.allocateBytes(len); if (alloc == null) { // The allocation was too large, allocator decided // not to do anything with it. return cell; } assert alloc.getBytes() != null; KeyValueUtil.appendToByteArray(cell, alloc.getBytes(), alloc.getOffset()); KeyValue newKv = new KeyValue(alloc.getBytes(), alloc.getOffset(), len); newKv.setSequenceId(cell.getSequenceId()); return newKv; } /** * Remove n key from the memstore. Only cells that have the same key and the * same memstoreTS are removed. It is ok to not update timeRangeTracker * in this call. It is possible that we can optimize this method by using * tailMap/iterator, but since this method is called rarely (only for * error recovery), we can leave those optimization for the future. * @param cell */ @Override public void rollback(Cell cell) { // If the key is in the snapshot, delete it. We should not update // this.size, because that tracks the size of only the memstore and // not the snapshot. The flush of this snapshot to disk has not // yet started because Store.flush() waits for all rwcc transactions to // commit before starting the flush to disk. Cell found = this.snapshot.get(cell); if (found != null && found.getSequenceId() == cell.getSequenceId()) { this.snapshot.remove(cell); long sz = heapSizeChange(cell, true); this.snapshotSize -= sz; } // If the key is in the memstore, delete it. Update this.size. found = this.cellSet.get(cell); if (found != null && found.getSequenceId() == cell.getSequenceId()) { removeFromCellSet(cell); long s = heapSizeChange(cell, true); this.size.addAndGet(-s); } } /** * Write a delete * @param deleteCell * @return approximate size of the passed key and value. */ @Override public long delete(Cell deleteCell) { long s = 0; Cell toAdd = maybeCloneWithAllocator(deleteCell); s += heapSizeChange(toAdd, addToCellSet(toAdd)); timeRangeTracker.includeTimestamp(toAdd); this.size.addAndGet(s); return s; } /** * @param cell Find the row that comes after this one. If null, we return the * first. * @return Next row or null if none found. */ Cell getNextRow(final Cell cell) { return getLowest(getNextRow(cell, this.cellSet), getNextRow(cell, this.snapshot)); } /* * @param a * @param b * @return Return lowest of a or b or null if both a and b are null */ private Cell getLowest(final Cell a, final Cell b) { if (a == null) { return b; } if (b == null) { return a; } return comparator.compareRows(a, b) <= 0? a: b; } /* * @param key Find row that follows this one. If null, return first. * @param map Set to look in for a row beyond <code>row</code>. * @return Next row or null if none found. If one found, will be a new * KeyValue -- can be destroyed by subsequent calls to this method. */ private Cell getNextRow(final Cell key, final NavigableSet<Cell> set) { Cell result = null; SortedSet<Cell> tail = key == null? set: set.tailSet(key); // Iterate until we fall into the next row; i.e. move off current row for (Cell cell: tail) { if (comparator.compareRows(cell, key) <= 0) continue; // Note: Not suppressing deletes or expired cells. Needs to be handled // by higher up functions. result = cell; break; } return result; } /** * @param state column/delete tracking state */ @Override public void getRowKeyAtOrBefore(final GetClosestRowBeforeTracker state) { getRowKeyAtOrBefore(cellSet, state); getRowKeyAtOrBefore(snapshot, state); } /* * @param set * @param state Accumulates deletes and candidates. */ private void getRowKeyAtOrBefore(final NavigableSet<Cell> set, final GetClosestRowBeforeTracker state) { if (set.isEmpty()) { return; } if (!walkForwardInSingleRow(set, state.getTargetKey(), state)) { // Found nothing in row. Try backing up. getRowKeyBefore(set, state); } } /* * Walk forward in a row from <code>firstOnRow</code>. Presumption is that * we have been passed the first possible key on a row. As we walk forward * we accumulate deletes until we hit a candidate on the row at which point * we return. * @param set * @param firstOnRow First possible key on this row. * @param state * @return True if we found a candidate walking this row. */ private boolean walkForwardInSingleRow(final SortedSet<Cell> set, final Cell firstOnRow, final GetClosestRowBeforeTracker state) { boolean foundCandidate = false; SortedSet<Cell> tail = set.tailSet(firstOnRow); if (tail.isEmpty()) return foundCandidate; for (Iterator<Cell> i = tail.iterator(); i.hasNext();) { Cell kv = i.next(); // Did we go beyond the target row? If so break. if (state.isTooFar(kv, firstOnRow)) break; if (state.isExpired(kv)) { i.remove(); continue; } // If we added something, this row is a contender. break. if (state.handle(kv)) { foundCandidate = true; break; } } return foundCandidate; } /* * Walk backwards through the passed set a row at a time until we run out of * set or until we get a candidate. * @param set * @param state */ private void getRowKeyBefore(NavigableSet<Cell> set, final GetClosestRowBeforeTracker state) { Cell firstOnRow = state.getTargetKey(); for (Member p = memberOfPreviousRow(set, state, firstOnRow); p != null; p = memberOfPreviousRow(p.set, state, firstOnRow)) { // Make sure we don't fall out of our table. if (!state.isTargetTable(p.cell)) break; // Stop looking if we've exited the better candidate range. if (!state.isBetterCandidate(p.cell)) break; // Make into firstOnRow firstOnRow = new KeyValue(p.cell.getRowArray(), p.cell.getRowOffset(), p.cell.getRowLength(), HConstants.LATEST_TIMESTAMP); // If we find something, break; if (walkForwardInSingleRow(p.set, firstOnRow, state)) break; } } /** * Only used by tests. TODO: Remove * * Given the specs of a column, update it, first by inserting a new record, * then removing the old one. Since there is only 1 KeyValue involved, the memstoreTS * will be set to 0, thus ensuring that they instantly appear to anyone. The underlying * store will ensure that the insert/delete each are atomic. A scanner/reader will either * get the new value, or the old value and all readers will eventually only see the new * value after the old was removed. * * @param row * @param family * @param qualifier * @param newValue * @param now * @return Timestamp */ @Override public long updateColumnValue(byte[] row, byte[] family, byte[] qualifier, long newValue, long now) { Cell firstCell = KeyValueUtil.createFirstOnRow(row, family, qualifier); // Is there a Cell in 'snapshot' with the same TS? If so, upgrade the timestamp a bit. SortedSet<Cell> snSs = snapshot.tailSet(firstCell); if (!snSs.isEmpty()) { Cell snc = snSs.first(); // is there a matching Cell in the snapshot? if (CellUtil.matchingRow(snc, firstCell) && CellUtil.matchingQualifier(snc, firstCell)) { if (snc.getTimestamp() == now) { // poop, now += 1; } } } // logic here: the new ts MUST be at least 'now'. But it could be larger if necessary. // But the timestamp should also be max(now, mostRecentTsInMemstore) // so we cant add the new Cell w/o knowing what's there already, but we also // want to take this chance to delete some cells. So two loops (sad) SortedSet<Cell> ss = cellSet.tailSet(firstCell); for (Cell cell : ss) { // if this isnt the row we are interested in, then bail: if (!CellUtil.matchingColumn(cell, family, qualifier) || !CellUtil.matchingRow(cell, firstCell)) { break; // rows dont match, bail. } // if the qualifier matches and it's a put, just RM it out of the cellSet. if (cell.getTypeByte() == KeyValue.Type.Put.getCode() && cell.getTimestamp() > now && CellUtil.matchingQualifier(firstCell, cell)) { now = cell.getTimestamp(); } } // create or update (upsert) a new Cell with // 'now' and a 0 memstoreTS == immediately visible List<Cell> cells = new ArrayList<Cell>(1); cells.add(new KeyValue(row, family, qualifier, now, Bytes.toBytes(newValue))); return upsert(cells, 1L); } /** * Update or insert the specified KeyValues. * <p> * For each KeyValue, insert into MemStore. This will atomically upsert the * value for that row/family/qualifier. If a KeyValue did already exist, * it will then be removed. * <p> * Currently the memstoreTS is kept at 0 so as each insert happens, it will * be immediately visible. May want to change this so it is atomic across * all KeyValues. * <p> * This is called under row lock, so Get operations will still see updates * atomically. Scans will only see each KeyValue update as atomic. * * @param cells * @param readpoint readpoint below which we can safely remove duplicate KVs * @return change in memstore size */ @Override public long upsert(Iterable<Cell> cells, long readpoint) { long size = 0; for (Cell cell : cells) { size += upsert(cell, readpoint); } return size; } /** * Inserts the specified KeyValue into MemStore and deletes any existing * versions of the same row/family/qualifier as the specified KeyValue. * <p> * First, the specified KeyValue is inserted into the Memstore. * <p> * If there are any existing KeyValues in this MemStore with the same row, * family, and qualifier, they are removed. * <p> * Callers must hold the read lock. * * @param cell * @return change in size of MemStore */ private long upsert(Cell cell, long readpoint) { // Add the Cell to the MemStore // Use the internalAdd method here since we (a) already have a lock // and (b) cannot safely use the MSLAB here without potentially // hitting OOME - see TestMemStore.testUpsertMSLAB for a // test that triggers the pathological case if we don't avoid MSLAB // here. long addedSize = internalAdd(cell); // Get the Cells for the row/family/qualifier regardless of timestamp. // For this case we want to clean up any other puts Cell firstCell = KeyValueUtil.createFirstOnRow( cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()); SortedSet<Cell> ss = cellSet.tailSet(firstCell); Iterator<Cell> it = ss.iterator(); // versions visible to oldest scanner int versionsVisible = 0; while ( it.hasNext() ) { Cell cur = it.next(); if (cell == cur) { // ignore the one just put in continue; } // check that this is the row and column we are interested in, otherwise bail if (CellUtil.matchingRow(cell, cur) && CellUtil.matchingQualifier(cell, cur)) { // only remove Puts that concurrent scanners cannot possibly see if (cur.getTypeByte() == KeyValue.Type.Put.getCode() && cur.getSequenceId() <= readpoint) { if (versionsVisible >= 1) { // if we get here we have seen at least one version visible to the oldest scanner, // which means we can prove that no scanner will see this version // false means there was a change, so give us the size. long delta = heapSizeChange(cur, true); addedSize -= delta; this.size.addAndGet(-delta); it.remove(); setOldestEditTimeToNow(); } else { versionsVisible++; } } } else { // past the row or column, done break; } } return addedSize; } /* * Immutable data structure to hold member found in set and the set it was * found in. Include set because it is carrying context. */ private static class Member { final Cell cell; final NavigableSet<Cell> set; Member(final NavigableSet<Cell> s, final Cell kv) { this.cell = kv; this.set = s; } } /* * @param set Set to walk back in. Pass a first in row or we'll return * same row (loop). * @param state Utility and context. * @param firstOnRow First item on the row after the one we want to find a * member in. * @return Null or member of row previous to <code>firstOnRow</code> */ private Member memberOfPreviousRow(NavigableSet<Cell> set, final GetClosestRowBeforeTracker state, final Cell firstOnRow) { NavigableSet<Cell> head = set.headSet(firstOnRow, false); if (head.isEmpty()) return null; for (Iterator<Cell> i = head.descendingIterator(); i.hasNext();) { Cell found = i.next(); if (state.isExpired(found)) { i.remove(); continue; } return new Member(head, found); } return null; } /** * @return scanner on memstore and snapshot in this order. */ @Override public List<KeyValueScanner> getScanners(long readPt) { return Collections.<KeyValueScanner> singletonList(new MemStoreScanner(readPt)); } /** * Check if this memstore may contain the required keys * @param scan * @return False if the key definitely does not exist in this Memstore */ public boolean shouldSeek(Scan scan, long oldestUnexpiredTS) { return (timeRangeTracker.includesTimeRange(scan.getTimeRange()) || snapshotTimeRangeTracker.includesTimeRange(scan.getTimeRange())) && (Math.max(timeRangeTracker.getMaximumTimestamp(), snapshotTimeRangeTracker.getMaximumTimestamp()) >= oldestUnexpiredTS); } /* * MemStoreScanner implements the KeyValueScanner. * It lets the caller scan the contents of a memstore -- both current * map and snapshot. * This behaves as if it were a real scanner but does not maintain position. */ protected class MemStoreScanner extends NonLazyKeyValueScanner { // Next row information for either cellSet or snapshot private Cell cellSetNextRow = null; private Cell snapshotNextRow = null; // last iterated Cells for cellSet and snapshot (to restore iterator state after reseek) private Cell cellSetItRow = null; private Cell snapshotItRow = null; // iterator based scanning. private Iterator<Cell> cellSetIt; private Iterator<Cell> snapshotIt; // The cellSet and snapshot at the time of creating this scanner private CellSkipListSet cellSetAtCreation; private CellSkipListSet snapshotAtCreation; // the pre-calculated Cell to be returned by peek() or next() private Cell theNext; // The allocator and snapshot allocator at the time of creating this scanner volatile MemStoreLAB allocatorAtCreation; volatile MemStoreLAB snapshotAllocatorAtCreation; // A flag represents whether could stop skipping Cells for MVCC // if have encountered the next row. Only used for reversed scan private boolean stopSkippingCellsIfNextRow = false; private long readPoint; /* Some notes... So memstorescanner is fixed at creation time. this includes pointers/iterators into existing kvset/snapshot. during a snapshot creation, the kvset is null, and the snapshot is moved. since kvset is null there is no point on reseeking on both, we can save us the trouble. During the snapshot->hfile transition, the memstore scanner is re-created by StoreScanner#updateReaders(). StoreScanner should potentially do something smarter by adjusting the existing memstore scanner. But there is a greater problem here, that being once a scanner has progressed during a snapshot scenario, we currently iterate past the kvset then 'finish' up. if a scan lasts a little while, there is a chance for new entries in kvset to become available but we will never see them. This needs to be handled at the StoreScanner level with coordination with MemStoreScanner. Currently, this problem is only partly managed: during the small amount of time when the StoreScanner has not yet created a new MemStoreScanner, we will miss the adds to kvset in the MemStoreScanner. */ MemStoreScanner(long readPoint) { super(); this.readPoint = readPoint; cellSetAtCreation = cellSet; snapshotAtCreation = snapshot; if (allocator != null) { this.allocatorAtCreation = allocator; this.allocatorAtCreation.incScannerCount(); } if (snapshotAllocator != null) { this.snapshotAllocatorAtCreation = snapshotAllocator; this.snapshotAllocatorAtCreation.incScannerCount(); } if (Trace.isTracing() && Trace.currentSpan() != null) { Trace.currentSpan().addTimelineAnnotation("Creating MemStoreScanner"); } } /** * Lock on 'this' must be held by caller. * @param it * @return Next Cell */ private Cell getNext(Iterator<Cell> it) { Cell startCell = theNext; Cell v = null; try { while (it.hasNext()) { v = it.next(); if (v.getSequenceId() <= this.readPoint) { return v; } if (stopSkippingCellsIfNextRow && startCell != null && comparator.compareRows(v, startCell) > 0) { return null; } } return null; } finally { if (v != null) { // in all cases, remember the last Cell iterated to if (it == snapshotIt) { snapshotItRow = v; } else { cellSetItRow = v; } } } } /** * Set the scanner at the seek key. * Must be called only once: there is no thread safety between the scanner * and the memStore. * @param key seek value * @return false if the key is null or if there is no data */ @Override public synchronized boolean seek(Cell key) { if (key == null) { close(); return false; } // kvset and snapshot will never be null. // if tailSet can't find anything, SortedSet is empty (not null). cellSetIt = cellSetAtCreation.tailSet(key).iterator(); snapshotIt = snapshotAtCreation.tailSet(key).iterator(); cellSetItRow = null; snapshotItRow = null; return seekInSubLists(key); } /** * (Re)initialize the iterators after a seek or a reseek. */ private synchronized boolean seekInSubLists(Cell key){ cellSetNextRow = getNext(cellSetIt); snapshotNextRow = getNext(snapshotIt); // Calculate the next value theNext = getLowest(cellSetNextRow, snapshotNextRow); // has data return (theNext != null); } /** * Move forward on the sub-lists set previously by seek. * @param key seek value (should be non-null) * @return true if there is at least one KV to read, false otherwise */ @Override public synchronized boolean reseek(Cell key) { /* See HBASE-4195 & HBASE-3855 & HBASE-6591 for the background on this implementation. This code is executed concurrently with flush and puts, without locks. Two points must be known when working on this code: 1) It's not possible to use the 'kvTail' and 'snapshot' variables, as they are modified during a flush. 2) The ideal implementation for performance would use the sub skip list implicitly pointed by the iterators 'kvsetIt' and 'snapshotIt'. Unfortunately the Java API does not offer a method to get it. So we remember the last keys we iterated to and restore the reseeked set to at least that point. */ cellSetIt = cellSetAtCreation.tailSet(getHighest(key, cellSetItRow)).iterator(); snapshotIt = snapshotAtCreation.tailSet(getHighest(key, snapshotItRow)).iterator(); return seekInSubLists(key); } @Override public synchronized Cell peek() { //DebugPrint.println(" MS@" + hashCode() + " peek = " + getLowest()); return theNext; } @Override public synchronized Cell next() { if (theNext == null) { return null; } final Cell ret = theNext; // Advance one of the iterators if (theNext == cellSetNextRow) { cellSetNextRow = getNext(cellSetIt); } else { snapshotNextRow = getNext(snapshotIt); } // Calculate the next value theNext = getLowest(cellSetNextRow, snapshotNextRow); //long readpoint = ReadWriteConsistencyControl.getThreadReadPoint(); //DebugPrint.println(" MS@" + hashCode() + " next: " + theNext + " next_next: " + // getLowest() + " threadpoint=" + readpoint); return ret; } /* * Returns the lower of the two key values, or null if they are both null. * This uses comparator.compare() to compare the KeyValue using the memstore * comparator. */ private Cell getLowest(Cell first, Cell second) { if (first == null && second == null) { return null; } if (first != null && second != null) { int compare = comparator.compare(first, second); return (compare <= 0 ? first : second); } return (first != null ? first : second); } /* * Returns the higher of the two cells, or null if they are both null. * This uses comparator.compare() to compare the Cell using the memstore * comparator. */ private Cell getHighest(Cell first, Cell second) { if (first == null && second == null) { return null; } if (first != null && second != null) { int compare = comparator.compare(first, second); return (compare > 0 ? first : second); } return (first != null ? first : second); } public synchronized void close() { this.cellSetNextRow = null; this.snapshotNextRow = null; this.cellSetIt = null; this.snapshotIt = null; if (allocatorAtCreation != null) { this.allocatorAtCreation.decScannerCount(); this.allocatorAtCreation = null; } if (snapshotAllocatorAtCreation != null) { this.snapshotAllocatorAtCreation.decScannerCount(); this.snapshotAllocatorAtCreation = null; } this.cellSetItRow = null; this.snapshotItRow = null; } /** * MemStoreScanner returns max value as sequence id because it will * always have the latest data among all files. */ @Override public long getSequenceID() { return Long.MAX_VALUE; } @Override public boolean shouldUseScanner(Scan scan, SortedSet<byte[]> columns, long oldestUnexpiredTS) { return shouldSeek(scan, oldestUnexpiredTS); } /** * Seek scanner to the given key first. If it returns false(means * peek()==null) or scanner's peek row is bigger than row of given key, seek * the scanner to the previous row of given key */ @Override public synchronized boolean backwardSeek(Cell key) { seek(key); if (peek() == null || comparator.compareRows(peek(), key) > 0) { return seekToPreviousRow(key); } return true; } /** * Separately get the KeyValue before the specified key from kvset and * snapshotset, and use the row of higher one as the previous row of * specified key, then seek to the first KeyValue of previous row */ @Override public synchronized boolean seekToPreviousRow(Cell key) { Cell firstKeyOnRow = KeyValueUtil.createFirstOnRow(key.getRowArray(), key.getRowOffset(), key.getRowLength()); SortedSet<Cell> cellHead = cellSetAtCreation.headSet(firstKeyOnRow); Cell cellSetBeforeRow = cellHead.isEmpty() ? null : cellHead.last(); SortedSet<Cell> snapshotHead = snapshotAtCreation .headSet(firstKeyOnRow); Cell snapshotBeforeRow = snapshotHead.isEmpty() ? null : snapshotHead .last(); Cell lastCellBeforeRow = getHighest(cellSetBeforeRow, snapshotBeforeRow); if (lastCellBeforeRow == null) { theNext = null; return false; } Cell firstKeyOnPreviousRow = KeyValueUtil.createFirstOnRow(lastCellBeforeRow.getRowArray(), lastCellBeforeRow.getRowOffset(), lastCellBeforeRow.getRowLength()); this.stopSkippingCellsIfNextRow = true; seek(firstKeyOnPreviousRow); this.stopSkippingCellsIfNextRow = false; if (peek() == null || comparator.compareRows(peek(), firstKeyOnPreviousRow) > 0) { return seekToPreviousRow(lastCellBeforeRow); } return true; } @Override public synchronized boolean seekToLastRow() { Cell first = cellSetAtCreation.isEmpty() ? null : cellSetAtCreation .last(); Cell second = snapshotAtCreation.isEmpty() ? null : snapshotAtCreation.last(); Cell higherCell = getHighest(first, second); if (higherCell == null) { return false; } Cell firstCellOnLastRow = KeyValueUtil.createFirstOnRow(higherCell.getRowArray(), higherCell.getRowOffset(), higherCell.getRowLength()); if (seek(firstCellOnLastRow)) { return true; } else { return seekToPreviousRow(higherCell); } } } public final static long FIXED_OVERHEAD = ClassSize.align( ClassSize.OBJECT + (9 * ClassSize.REFERENCE) + (3 * Bytes.SIZEOF_LONG)); public final static long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD + ClassSize.ATOMIC_LONG + (2 * ClassSize.TIMERANGE_TRACKER) + (2 * ClassSize.CELL_SKIPLIST_SET) + (2 * ClassSize.CONCURRENT_SKIPLISTMAP)); /* * Calculate how the MemStore size has changed. Includes overhead of the * backing Map. * @param cell * @param notpresent True if the cell was NOT present in the set. * @return Size */ static long heapSizeChange(final Cell cell, final boolean notpresent) { return notpresent ? ClassSize.align(ClassSize.CONCURRENT_SKIPLISTMAP_ENTRY + CellUtil.estimatedHeapSizeOf(cell)) : 0; } private long keySize() { return heapSize() - DEEP_OVERHEAD; } /** * Get the entire heap usage for this MemStore not including keys in the * snapshot. */ @Override public long heapSize() { return size.get(); } @Override public long size() { return heapSize(); } /** * Code to help figure if our approximation of object heap sizes is close * enough. See hbase-900. Fills memstores then waits so user can heap * dump and bring up resultant hprof in something like jprofiler which * allows you get 'deep size' on objects. * @param args main args */ public static void main(String [] args) { RuntimeMXBean runtime = ManagementFactory.getRuntimeMXBean(); LOG.info("vmName=" + runtime.getVmName() + ", vmVendor=" + runtime.getVmVendor() + ", vmVersion=" + runtime.getVmVersion()); LOG.info("vmInputArguments=" + runtime.getInputArguments()); DefaultMemStore memstore1 = new DefaultMemStore(); // TODO: x32 vs x64 long size = 0; final int count = 10000; byte [] fam = Bytes.toBytes("col"); byte [] qf = Bytes.toBytes("umn"); byte [] empty = new byte[0]; for (int i = 0; i < count; i++) { // Give each its own ts Pair<Long, Cell> ret = memstore1.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, empty)); size += ret.getFirst(); } LOG.info("memstore1 estimated size=" + size); for (int i = 0; i < count; i++) { Pair<Long, Cell> ret = memstore1.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, empty)); size += ret.getFirst(); } LOG.info("memstore1 estimated size (2nd loading of same data)=" + size); // Make a variably sized memstore. DefaultMemStore memstore2 = new DefaultMemStore(); for (int i = 0; i < count; i++) { Pair<Long, Cell> ret = memstore2.add(new KeyValue(Bytes.toBytes(i), fam, qf, i, new byte[i])); size += ret.getFirst(); } LOG.info("memstore2 estimated size=" + size); final int seconds = 30; LOG.info("Waiting " + seconds + " seconds while heap dump is taken"); for (int i = 0; i < seconds; i++) { // Thread.sleep(1000); } LOG.info("Exiting."); } }
/* * Copyright 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package griffon.pivot.support.adapters; import groovy.lang.Closure; /** * @author Andres Almiray */ public class TablePaneAdapter implements GriffonPivotAdapter, org.apache.pivot.wtk.TablePaneListener { private Closure rowsRemoved; private Closure rowInserted; private Closure cellInserted; private Closure cellsRemoved; private Closure cellUpdated; private Closure rowHeightChanged; private Closure rowHighlightedChanged; private Closure columnInserted; private Closure columnsRemoved; private Closure columnWidthChanged; private Closure columnHighlightedChanged; public Closure getRowsRemoved() { return this.rowsRemoved; } public Closure getRowInserted() { return this.rowInserted; } public Closure getCellInserted() { return this.cellInserted; } public Closure getCellsRemoved() { return this.cellsRemoved; } public Closure getCellUpdated() { return this.cellUpdated; } public Closure getRowHeightChanged() { return this.rowHeightChanged; } public Closure getRowHighlightedChanged() { return this.rowHighlightedChanged; } public Closure getColumnInserted() { return this.columnInserted; } public Closure getColumnsRemoved() { return this.columnsRemoved; } public Closure getColumnWidthChanged() { return this.columnWidthChanged; } public Closure getColumnHighlightedChanged() { return this.columnHighlightedChanged; } public void setRowsRemoved(Closure rowsRemoved) { this.rowsRemoved = rowsRemoved; if (this.rowsRemoved != null) { this.rowsRemoved.setDelegate(this); this.rowsRemoved.setResolveStrategy(Closure.DELEGATE_FIRST); } } public void setRowInserted(Closure rowInserted) { this.rowInserted = rowInserted; if (this.rowInserted != null) { this.rowInserted.setDelegate(this); this.rowInserted.setResolveStrategy(Closure.DELEGATE_FIRST); } } public void setCellInserted(Closure cellInserted) { this.cellInserted = cellInserted; if (this.cellInserted != null) { this.cellInserted.setDelegate(this); this.cellInserted.setResolveStrategy(Closure.DELEGATE_FIRST); } } public void setCellsRemoved(Closure cellsRemoved) { this.cellsRemoved = cellsRemoved; if (this.cellsRemoved != null) { this.cellsRemoved.setDelegate(this); this.cellsRemoved.setResolveStrategy(Closure.DELEGATE_FIRST); } } public void setCellUpdated(Closure cellUpdated) { this.cellUpdated = cellUpdated; if (this.cellUpdated != null) { this.cellUpdated.setDelegate(this); this.cellUpdated.setResolveStrategy(Closure.DELEGATE_FIRST); } } public void setRowHeightChanged(Closure rowHeightChanged) { this.rowHeightChanged = rowHeightChanged; if (this.rowHeightChanged != null) { this.rowHeightChanged.setDelegate(this); this.rowHeightChanged.setResolveStrategy(Closure.DELEGATE_FIRST); } } public void setRowHighlightedChanged(Closure rowHighlightedChanged) { this.rowHighlightedChanged = rowHighlightedChanged; if (this.rowHighlightedChanged != null) { this.rowHighlightedChanged.setDelegate(this); this.rowHighlightedChanged.setResolveStrategy(Closure.DELEGATE_FIRST); } } public void setColumnInserted(Closure columnInserted) { this.columnInserted = columnInserted; if (this.columnInserted != null) { this.columnInserted.setDelegate(this); this.columnInserted.setResolveStrategy(Closure.DELEGATE_FIRST); } } public void setColumnsRemoved(Closure columnsRemoved) { this.columnsRemoved = columnsRemoved; if (this.columnsRemoved != null) { this.columnsRemoved.setDelegate(this); this.columnsRemoved.setResolveStrategy(Closure.DELEGATE_FIRST); } } public void setColumnWidthChanged(Closure columnWidthChanged) { this.columnWidthChanged = columnWidthChanged; if (this.columnWidthChanged != null) { this.columnWidthChanged.setDelegate(this); this.columnWidthChanged.setResolveStrategy(Closure.DELEGATE_FIRST); } } public void setColumnHighlightedChanged(Closure columnHighlightedChanged) { this.columnHighlightedChanged = columnHighlightedChanged; if (this.columnHighlightedChanged != null) { this.columnHighlightedChanged.setDelegate(this); this.columnHighlightedChanged.setResolveStrategy(Closure.DELEGATE_FIRST); } } public void rowsRemoved(org.apache.pivot.wtk.TablePane arg0, int arg1, org.apache.pivot.collections.Sequence arg2) { if (rowsRemoved != null) { rowsRemoved.call(arg0, arg1, arg2); } } public void rowInserted(org.apache.pivot.wtk.TablePane arg0, int arg1) { if (rowInserted != null) { rowInserted.call(arg0, arg1); } } public void cellInserted(org.apache.pivot.wtk.TablePane.Row arg0, int arg1) { if (cellInserted != null) { cellInserted.call(arg0, arg1); } } public void cellsRemoved(org.apache.pivot.wtk.TablePane.Row arg0, int arg1, org.apache.pivot.collections.Sequence arg2) { if (cellsRemoved != null) { cellsRemoved.call(arg0, arg1, arg2); } } public void cellUpdated(org.apache.pivot.wtk.TablePane.Row arg0, int arg1, org.apache.pivot.wtk.Component arg2) { if (cellUpdated != null) { cellUpdated.call(arg0, arg1, arg2); } } public void rowHeightChanged(org.apache.pivot.wtk.TablePane.Row arg0, int arg1, boolean arg2) { if (rowHeightChanged != null) { rowHeightChanged.call(arg0, arg1, arg2); } } public void rowHighlightedChanged(org.apache.pivot.wtk.TablePane.Row arg0) { if (rowHighlightedChanged != null) { rowHighlightedChanged.call(arg0); } } public void columnInserted(org.apache.pivot.wtk.TablePane arg0, int arg1) { if (columnInserted != null) { columnInserted.call(arg0, arg1); } } public void columnsRemoved(org.apache.pivot.wtk.TablePane arg0, int arg1, org.apache.pivot.collections.Sequence arg2) { if (columnsRemoved != null) { columnsRemoved.call(arg0, arg1, arg2); } } public void columnWidthChanged(org.apache.pivot.wtk.TablePane.Column arg0, int arg1, boolean arg2) { if (columnWidthChanged != null) { columnWidthChanged.call(arg0, arg1, arg2); } } public void columnHighlightedChanged(org.apache.pivot.wtk.TablePane.Column arg0) { if (columnHighlightedChanged != null) { columnHighlightedChanged.call(arg0); } } }
package com.arellomobile.mvp; import java.util.ArrayList; import java.util.List; import android.os.Bundle; import com.arellomobile.mvp.presenter.PresenterType; /** * Date: 18-Dec-15 * Time: 13:51 * <p> * This class represents a delegate which you can use to extend Mvp's support to any class. * <p> * When using an {@link MvpDelegate}, lifecycle methods which should be proxied to the delegate: * <ul> * <li>{@link #onCreate(Bundle)}</li> * <li>{@link #onAttach()}: inside onStart() of Activity or Fragment</li> * <li>{@link #onSaveInstanceState(android.os.Bundle)}</li> * <li>{@link #onDetach()}: inside onDestroy() for Activity or onDestroyView() for Fragment</li> * <li>{@link #onDestroy()}</li> * </ul> * <p> * Every {@link Object} can only be linked with one {@link MvpDelegate} instance, * so the instance returned from {@link #MvpDelegate(Object)}} should be kept * until the Object is destroyed. * * @author Yuri Shmakov * @author Alexander Blinov * @author Konstantin Tckhovrebov */ public class MvpDelegate<Delegated> { private static final String KEY_TAG = "com.arellomobile.mvp.MvpDelegate.KEY_TAG"; private String mKeyTag = KEY_TAG; private String mDelegateTag; private final Delegated mDelegated; private boolean mIsAttached; private MvpDelegate mParentDelegate; private List<MvpPresenter<? super Delegated>> mPresenters; private List<MvpDelegate> mChildDelegates; private Bundle mBundle; private Bundle mChildKeyTagsBundle; public MvpDelegate(Delegated delegated) { mDelegated = delegated; mChildDelegates = new ArrayList<>(); mChildKeyTagsBundle = new Bundle(); } public void setParentDelegate(MvpDelegate delegate, String childId) { if (mBundle != null) { throw new IllegalStateException("You should call setParentDelegate() before first onCreate()"); } if (mChildDelegates != null && mChildDelegates.size() > 0) { throw new IllegalStateException("You could not set parent delegate when it already has child presenters"); } mParentDelegate = delegate; mKeyTag = mParentDelegate.mKeyTag + "$" + childId; delegate.addChildDelegate(this); } private void addChildDelegate(MvpDelegate delegate) { mChildDelegates.add(delegate); } /** * <p>Similar like {@link #onCreate(Bundle)}. But this method try to get saved * state from parent presenter before get presenters</p> */ public void onCreate() { Bundle bundle = new Bundle(); if (mParentDelegate != null) { bundle = mParentDelegate.mBundle; } onCreate(bundle); } /** * <p>Get(or create if not exists) presenters for delegated object and bind * them to this object fields</p> * * @param bundle with saved state */ public void onCreate(Bundle bundle) { mIsAttached = false; mBundle = bundle != null ? bundle : new Bundle(); //get base tag for presenters if (bundle == null || !mBundle.containsKey(mKeyTag)) { mDelegateTag = generateTag(); } else { mDelegateTag = bundle.getString(mKeyTag); } //bind presenters to view mPresenters = MvpFacade.getInstance().getMvpProcessor().getMvpPresenters(mDelegated, mDelegateTag); for (MvpDelegate childDelegate : mChildDelegates) { childDelegate.onCreate(bundle); } } /** * <p>Attach delegated object as view to presenter fields of this object. * If delegate did not enter at {@link #onCreate(Bundle)}(or * {@link #onCreate()}) before this method, then view will not be attached to * presenters</p> */ public void onAttach() { for (MvpPresenter<? super Delegated> presenter : mPresenters) { if (mIsAttached && presenter.getAttachedViews().contains(mDelegated)) { continue; } presenter.attachView(mDelegated); } for (MvpDelegate<?> childDelegate : mChildDelegates) { childDelegate.onAttach(); } mIsAttached = true; } /** * <p>Detach delegated object from their presenters.</p> */ public void onDetach() { for (MvpPresenter<? super Delegated> presenter : mPresenters) { if (!mIsAttached && !presenter.getAttachedViews().contains(mDelegated)) { continue; } presenter.detachView(mDelegated); } mIsAttached = false; for (MvpDelegate<?> childDelegate : mChildDelegates) { childDelegate.onDetach(); } } /** * <p>View was being destroyed, but logical unit still alive</p> */ public void onDestroyView() { for (MvpPresenter<? super Delegated> presenter : mPresenters) { presenter.destroyView(mDelegated); } for (MvpDelegate<?> childDelegate : mChildDelegates) { childDelegate.onDestroyView(); } } /** * <p>Destroy presenters.</p> */ public void onDestroy() { PresentersCounter presentersCounter = MvpFacade.getInstance().getPresentersCounter(); PresenterStore presenterStore = MvpFacade.getInstance().getPresenterStore(); for (MvpPresenter<?> presenter : mPresenters) { boolean isRejected = presentersCounter.rejectPresenter(presenter, mDelegateTag); if (isRejected && presenter.getPresenterType() != PresenterType.GLOBAL) { presenterStore.remove(presenter.getPresenterType(), presenter.getTag(), presenter.getPresenterClass()); presenter.onDestroy(); } } for (MvpDelegate<?> childDelegate : mChildDelegates) { childDelegate.onDestroy(); } } /** * <p>Similar like {@link #onSaveInstanceState(Bundle)}. But this method try to save * state to parent presenter Bundle</p> */ public void onSaveInstanceState() { Bundle bundle = new Bundle(); if (mParentDelegate != null) { bundle = mParentDelegate.mChildKeyTagsBundle; } onSaveInstanceState(bundle); mParentDelegate.mBundle.putAll(bundle); } /** * Save presenters tag prefix to save state for restore presenters at future after delegate recreate * * @param outState out state from Android component */ public void onSaveInstanceState(Bundle outState) { outState.putAll(mChildKeyTagsBundle); outState.putString(mKeyTag, mDelegateTag); for (MvpDelegate childDelegate : mChildDelegates) { childDelegate.onSaveInstanceState(outState); } } public Bundle getChildrenSaveState() { return mBundle; } /** * @return generated tag in format: Delegated_class_full_name$MvpDelegate@hashCode * <p> * example: com.arellomobile.com.arellomobile.mvp.sample.SampleFragment$MvpDelegate@32649b0 */ private String generateTag() { return mDelegated.getClass().getName() + "$" + getClass().getSimpleName() + toString().replace(getClass().getName(), ""); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.wan.misc; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.GemFireConfigException; import org.apache.geode.IncompatibleSystemException; import org.apache.geode.internal.AvailablePortHelper; import org.apache.geode.internal.cache.wan.WANTestBase; import org.apache.geode.logging.internal.OSProcess; import org.apache.geode.test.dunit.Assert; import org.apache.geode.test.dunit.AsyncInvocation; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.IgnoredException; import org.apache.geode.test.dunit.LogWriterUtils; import org.apache.geode.test.junit.categories.WanTest; @Category({WanTest.class}) public class WanAutoDiscoveryDUnitTest extends WANTestBase { public WanAutoDiscoveryDUnitTest() { super(); } @Override protected void postSetUpWANTestBase() throws Exception { final Host host = Host.getHost(0); } /** * Test to validate that sender can not be started without locator started. else * GemFireConfigException will be thrown. */ @Test public void test_GatewaySender_Started_Before_Locator() { try { int port = AvailablePortHelper.getRandomAvailableTCPPort(); vm0.invoke(() -> WANTestBase.createCache(port)); vm0.invoke( () -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, false)); fail("Expected GemFireConfigException but not thrown"); } catch (Exception e) { if (!(e.getCause() instanceof GemFireConfigException)) { Assert.fail("Expected GemFireConfigException but received :", e); } } } /** * Test to validate that all locators in one DS should have same name. Though this test passes, it * causes other below tests to fail. In this test, VM1 is throwing IncompatibleSystemException * after startInitLocator. I think, after throwing this exception, locator is not stopped properly * and hence other tests are failing. * */ @Ignore @Test public void test_AllLocatorsInDSShouldHaveDistributedSystemId() throws Exception { try { Integer lnLocPort1 = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer lnLocPort2 = (Integer) vm1.invoke(() -> WANTestBase.createSecondLocator(2, lnLocPort1)); fail("Expected IncompatibleSystemException but not thrown"); } catch (Exception e) { if (!(e.getCause() instanceof IncompatibleSystemException)) { Assert.fail("Expected IncompatibleSystemException but received :", e); } } } /** * Test to validate that multiple locators added on LN site and multiple locators on Ny site * recognizes each other * */ @Test public void test_NY_Recognises_ALL_LN_Locators() throws Exception { Set<InetSocketAddress> locatorPorts = new HashSet<>(); Map<Integer, Set<InetSocketAddress>> dsVsPort = new HashMap<>(); dsVsPort.put(1, locatorPorts); Integer lnLocPort1 = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); locatorPorts.add(new InetSocketAddress("localhost", lnLocPort1)); Integer lnLocPort2 = (Integer) vm1.invoke(() -> WANTestBase.createSecondLocator(1, lnLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", lnLocPort2)); locatorPorts = new HashSet<>(); dsVsPort.put(2, locatorPorts); Integer nyLocPort1 = (Integer) vm2.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", nyLocPort1)); Integer nyLocPort2 = (Integer) vm3 .invoke(() -> WANTestBase.createSecondRemoteLocator(2, nyLocPort1, lnLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", nyLocPort2)); final int siteSizeToCheck = dsVsPort.size(); vm0.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm1.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm2.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm3.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); } /** * Test to validate that multiple locators added two sets receive eachothers hostname for client * setting even when the locator is started through the API. */ @Test public void locatorsReceiveHostnameForClientsFromRemoteSite() throws Exception { Set<InetSocketAddress> locatorPorts = new HashSet<>(); Map<Integer, Set<InetSocketAddress>> dsVsPort = new HashMap<>(); dsVsPort.put(1, locatorPorts); Integer lnLocPort1 = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); locatorPorts.add(new InetSocketAddress("localhost", lnLocPort1)); Integer lnLocPort2 = (Integer) vm1.invoke(() -> WANTestBase.createSecondLocator(1, lnLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", lnLocPort2)); locatorPorts = new HashSet<>(); dsVsPort.put(2, locatorPorts); Integer nyLocPort1 = (Integer) vm2.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", nyLocPort1)); Integer nyLocPort2 = (Integer) vm3.invoke( () -> WANTestBase.createSecondRemoteLocatorWithAPI(2, nyLocPort1, lnLocPort1, "localhost")); locatorPorts.add(new InetSocketAddress("localhost", nyLocPort2)); final int siteSizeToCheck = dsVsPort.size(); vm0.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm1.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm2.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm3.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); } /** * Test to validate that TK site's locator is recognized by LN and NY. Test to validate that HK * site's locator is recognized by LN , NY, TK. */ @Test public void test_NY_Recognises_TK_AND_HK_Through_LN_Locator() { Map<Integer, Set<InetSocketAddress>> dsVsPort = new HashMap<>(); Set<InetSocketAddress> locatorPorts = new HashSet<>(); dsVsPort.put(1, locatorPorts); Integer lnLocPort1 = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); locatorPorts.add(new InetSocketAddress("localhost", lnLocPort1)); locatorPorts = new HashSet<>(); dsVsPort.put(2, locatorPorts); Integer nyLocPort1 = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", nyLocPort1)); locatorPorts = new HashSet<>(); dsVsPort.put(3, locatorPorts); Integer tkLocPort = (Integer) vm2.invoke(() -> WANTestBase.createFirstRemoteLocator(3, lnLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", tkLocPort)); locatorPorts = new HashSet<>(); dsVsPort.put(4, locatorPorts); Integer hkLocPort = (Integer) vm3.invoke(() -> WANTestBase.createFirstRemoteLocator(4, lnLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", hkLocPort)); final int siteSizeToCheck = dsVsPort.size(); vm0.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm1.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm2.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm3.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); } @Test public void test_TK_Recognises_LN_AND_NY() { Map<Integer, Set<InetSocketAddress>> dsVsPort = new HashMap<>(); Set<InetSocketAddress> locatorPorts = new HashSet<>(); dsVsPort.put(1, locatorPorts); Integer lnLocPort1 = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); locatorPorts.add(new InetSocketAddress("localhost", lnLocPort1)); locatorPorts = new HashSet<>(); dsVsPort.put(2, locatorPorts); Integer nyLocPort1 = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", nyLocPort1)); locatorPorts = new HashSet<>(); dsVsPort.put(3, locatorPorts); Integer tkLocPort = (Integer) vm2.invoke(() -> WANTestBase.createFirstRemoteLocator(3, nyLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", tkLocPort)); final int siteSizeToCheck = dsVsPort.size(); vm0.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm1.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm2.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); } @Category({WanTest.class}) @Test public void test_NY_Recognises_TK_AND_HK_Simultaneously() { Map<Integer, Set<InetSocketAddress>> dsVsPort = new HashMap<>(); Set<InetSocketAddress> locatorPortsln = new HashSet<>(); dsVsPort.put(1, locatorPortsln); Integer lnLocPort1 = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); locatorPortsln.add(new InetSocketAddress("localhost", lnLocPort1)); Set<InetSocketAddress> locatorPortsny = new HashSet<>(); dsVsPort.put(2, locatorPortsny); Integer nyLocPort1 = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnLocPort1)); locatorPortsny.add(new InetSocketAddress("localhost", nyLocPort1)); int AsyncInvocationArrSize = 4; AsyncInvocation[] async = new AsyncInvocation[AsyncInvocationArrSize]; Set<InetSocketAddress> locatorPortstk = new HashSet<>(); dsVsPort.put(3, locatorPortstk); async[0] = vm2.invokeAsync(() -> WANTestBase.createFirstRemoteLocator(3, lnLocPort1)); Set<InetSocketAddress> locatorPortshk = new HashSet<>(); dsVsPort.put(4, locatorPortshk); async[1] = vm3.invokeAsync(() -> WANTestBase.createFirstRemoteLocator(4, nyLocPort1)); ArrayList<Integer> locatorPortsln2 = new ArrayList<Integer>(); async[2] = vm4.invokeAsync(() -> WANTestBase.createSecondLocator(1, lnLocPort1)); ArrayList<Integer> locatorPortsny2 = new ArrayList<Integer>(); async[3] = vm5.invokeAsync(() -> WANTestBase.createSecondLocator(2, nyLocPort1)); try { async[0].join(); async[1].join(); async[2].join(); async[3].join(); } catch (InterruptedException e) { e.printStackTrace(); fail(); } locatorPortstk.add(new InetSocketAddress("localhost", (Integer) async[0].getReturnValue())); locatorPortshk.add(new InetSocketAddress("localhost", (Integer) async[1].getReturnValue())); locatorPortsln.add(new InetSocketAddress("localhost", (Integer) async[2].getReturnValue())); locatorPortsny.add(new InetSocketAddress("localhost", (Integer) async[3].getReturnValue())); final int siteSizeToCheck = dsVsPort.size(); vm0.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm1.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm2.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm3.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); } @Test public void test_LN_Sender_recognises_ALL_NY_Locators() { IgnoredException ie = IgnoredException .addIgnoredException("could not get remote locator information for remote site"); try { Integer lnLocPort1 = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); Integer lnLocPort2 = (Integer) vm5.invoke(() -> WANTestBase.createSecondLocator(1, lnLocPort1)); vm2.invoke(() -> WANTestBase.createCache(lnLocPort1, lnLocPort2)); vm2.invoke(() -> WANTestBase.createSender("ln", 2, false, 100, 10, false, false, null, true)); Integer nyLocPort1 = (Integer) vm1.invoke(() -> WANTestBase.createFirstRemoteLocator(2, lnLocPort1)); vm2.invoke(() -> WANTestBase.startSender("ln")); // Since to fix Bug#46289, we have moved call to initProxy in getConnection which will be // called // only when batch is getting dispatched. // So for locator discovery callback to work, its now expected that atleast try to send a // batch // so that proxy will be initialized vm2.invoke( () -> WANTestBase.createReplicatedRegion(getTestMethodName() + "_RR", "ln", isOffHeap())); vm2.invoke(() -> WANTestBase.doPuts(getTestMethodName() + "_RR", 10)); Integer nyLocPort2 = (Integer) vm3 .invoke(() -> WANTestBase.createSecondRemoteLocator(2, nyLocPort1, lnLocPort1)); InetSocketAddress locatorToWaitFor = new InetSocketAddress("localhost", nyLocPort2); vm2.invoke(() -> WANTestBase.checkLocatorsinSender("ln", locatorToWaitFor)); Integer nyLocPort3 = (Integer) vm4 .invoke(() -> WANTestBase.createSecondRemoteLocator(2, nyLocPort1, lnLocPort1)); InetSocketAddress locatorToWaitFor2 = new InetSocketAddress("localhost", nyLocPort3); vm2.invoke(() -> WANTestBase.checkLocatorsinSender("ln", locatorToWaitFor2)); } finally { ie.remove(); } } @Test public void test_RingTopology() { int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(4); final Set<String> site1LocatorsPort = new HashSet<String>(); final Set<String> site11LocatorsPort = new HashSet<String>(); site1LocatorsPort.add("localhost[" + ports[0] + "]"); site11LocatorsPort .add("localhost[" + ports[0] + "],server=true,peer=true,hostname-for-clients=localhost"); final Set<String> site2LocatorsPort = new HashSet<String>(); final Set<String> site22LocatorsPort = new HashSet<String>(); site2LocatorsPort.add("localhost[" + ports[1] + "]"); site22LocatorsPort .add("localhost[" + ports[1] + "],server=true,peer=true,hostname-for-clients=localhost"); final Set<String> site3LocatorsPort = new HashSet<String>(); final Set<String> site33LocatorsPort = new HashSet<String>(); site3LocatorsPort.add("localhost[" + ports[2] + "]"); site33LocatorsPort .add("localhost[" + ports[2] + "],server=true,peer=true,hostname-for-clients=localhost"); final Set<String> site4LocatorsPort = new HashSet<String>(); final Set<String> site44LocatorsPort = new HashSet<String>(); site4LocatorsPort.add("localhost[" + ports[3] + "]"); site44LocatorsPort .add("localhost[" + ports[3] + "],server=true,peer=true,hostname-for-clients=localhost"); Map<Integer, Set<String>> dsVsPort = new HashMap<Integer, Set<String>>(); dsVsPort.put(1, site11LocatorsPort); dsVsPort.put(2, site22LocatorsPort); dsVsPort.put(3, site33LocatorsPort); dsVsPort.put(4, site44LocatorsPort); int AsyncInvocationArrSize = 9; AsyncInvocation[] async = new AsyncInvocation[AsyncInvocationArrSize]; async[0] = vm0.invokeAsync( () -> WANTestBase.createLocator(1, ports[0], site1LocatorsPort, site2LocatorsPort)); async[1] = vm1.invokeAsync( () -> WANTestBase.createLocator(2, ports[1], site2LocatorsPort, site3LocatorsPort)); async[2] = vm2.invokeAsync( () -> WANTestBase.createLocator(3, ports[2], site3LocatorsPort, site4LocatorsPort)); async[3] = vm3.invokeAsync( () -> WANTestBase.createLocator(4, ports[3], site4LocatorsPort, site1LocatorsPort)); // pause(5000); try { async[0].join(); async[1].join(); async[2].join(); async[3].join(); } catch (InterruptedException e) { e.printStackTrace(); fail("Could not join async operations"); } vm0.invoke(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); vm1.invoke(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); vm2.invoke(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); vm3.invoke(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); } @Ignore @Test public void test_3Sites3Locators() { final Set<String> site1LocatorsPort = new HashSet<String>(); int site1Port1 = AvailablePortHelper.getRandomAvailableTCPPort(); site1LocatorsPort.add("localhost[" + site1Port1 + "]"); int site1Port2 = AvailablePortHelper.getRandomAvailableTCPPort(); site1LocatorsPort.add("localhost[" + site1Port2 + "]"); int site1Port3 = AvailablePortHelper.getRandomAvailableTCPPort(); site1LocatorsPort.add("localhost[" + site1Port3 + "]"); final Set<String> site2LocatorsPort = new HashSet<String>(); int site2Port1 = AvailablePortHelper.getRandomAvailableTCPPort(); site2LocatorsPort.add("localhost[" + site2Port1 + "]"); int site2Port2 = AvailablePortHelper.getRandomAvailableTCPPort(); site2LocatorsPort.add("localhost[" + site2Port2 + "]"); int site2Port3 = AvailablePortHelper.getRandomAvailableTCPPort(); site2LocatorsPort.add("localhost[" + site2Port3 + "]"); final Set<String> site3LocatorsPort = new HashSet<String>(); int site3Port1 = AvailablePortHelper.getRandomAvailableTCPPort(); site3LocatorsPort.add("localhost[" + site3Port1 + "]"); final int site3Port2 = AvailablePortHelper.getRandomAvailableTCPPort(); site3LocatorsPort.add("localhost[" + site3Port2 + "]"); int site3Port3 = AvailablePortHelper.getRandomAvailableTCPPort(); site3LocatorsPort.add("localhost[" + site3Port3 + "]"); Map<Integer, Set<String>> dsVsPort = new HashMap<Integer, Set<String>>(); dsVsPort.put(1, site1LocatorsPort); dsVsPort.put(2, site2LocatorsPort); dsVsPort.put(3, site3LocatorsPort); int AsyncInvocationArrSize = 9; AsyncInvocation[] async = new AsyncInvocation[AsyncInvocationArrSize]; async[0] = vm0.invokeAsync( () -> WANTestBase.createLocator(1, site1Port1, site1LocatorsPort, site2LocatorsPort)); async[8] = vm0.invokeAsync(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); async[1] = vm1.invokeAsync( () -> WANTestBase.createLocator(1, site1Port2, site1LocatorsPort, site2LocatorsPort)); async[2] = vm2.invokeAsync( () -> WANTestBase.createLocator(1, site1Port3, site1LocatorsPort, site2LocatorsPort)); async[3] = vm3.invokeAsync( () -> WANTestBase.createLocator(2, site2Port1, site2LocatorsPort, site3LocatorsPort)); async[4] = vm4.invokeAsync( () -> WANTestBase.createLocator(2, site2Port2, site2LocatorsPort, site3LocatorsPort)); async[5] = vm5.invokeAsync( () -> WANTestBase.createLocator(2, site2Port3, site2LocatorsPort, site3LocatorsPort)); async[6] = vm6.invokeAsync( () -> WANTestBase.createLocator(3, site3Port1, site3LocatorsPort, site1LocatorsPort)); async[7] = vm7.invokeAsync( () -> WANTestBase.createLocator(3, site3Port2, site3LocatorsPort, site1LocatorsPort)); WANTestBase.createLocator(3, site3Port3, site3LocatorsPort, site1LocatorsPort); long startTime = System.currentTimeMillis(); try { async[0].join(); async[1].join(); async[2].join(); async[3].join(); async[4].join(); async[5].join(); async[6].join(); async[7].join(); async[8].join(); } catch (InterruptedException e) { e.printStackTrace(); fail("Could not join async operations"); } Long endTime = null; try { endTime = (Long) async[8].getResult(); } catch (Throwable e) { e.printStackTrace(); Assert.fail("Could not get end time", e); } LogWriterUtils.getLogWriter().info( "Time taken for all 9 locators discovery in 3 sites: " + (endTime.longValue() - startTime)); vm0.invoke(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); vm1.invoke(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); vm2.invoke(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); vm3.invoke(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); vm4.invoke(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); vm5.invoke(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); vm6.invoke(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); vm7.invoke(() -> WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort)); WANTestBase.checkAllSiteMetaDataFor3Sites(dsVsPort); } @Test public void test_LN_Peer_Locators_Exchange_Information() { Set<InetSocketAddress> locatorPorts = new HashSet<>(); Map<Integer, Set<InetSocketAddress>> dsVsPort = new HashMap<>(); dsVsPort.put(1, locatorPorts); Integer lnLocPort1 = (Integer) vm0.invoke(() -> WANTestBase.createFirstPeerLocator(1)); locatorPorts.add(new InetSocketAddress("localhost", lnLocPort1)); Integer lnLocPort2 = (Integer) vm1.invoke(() -> WANTestBase.createSecondPeerLocator(1, lnLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", lnLocPort2)); final int siteSizeToCheck = dsVsPort.size(); vm0.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm1.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); } @Test public void test_LN_NY_TK_5_PeerLocators_1_ServerLocator() { Map<Integer, Set<InetSocketAddress>> dsVsPort = new HashMap<>(); Set<InetSocketAddress> locatorPorts = new HashSet<>(); dsVsPort.put(1, locatorPorts); Integer lnLocPort1 = (Integer) vm0.invoke(() -> WANTestBase.createFirstPeerLocator(1)); locatorPorts.add(new InetSocketAddress("localhost", lnLocPort1)); Integer lnLocPort2 = (Integer) vm1.invoke(() -> WANTestBase.createSecondPeerLocator(1, lnLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", lnLocPort2)); locatorPorts = new HashSet<>(); dsVsPort.put(2, locatorPorts); Integer nyLocPort1 = (Integer) vm2.invoke(() -> WANTestBase.createFirstRemotePeerLocator(2, lnLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", nyLocPort1)); Integer nyLocPort2 = (Integer) vm3 .invoke(() -> WANTestBase.createSecondRemotePeerLocator(2, nyLocPort1, lnLocPort2)); locatorPorts.add(new InetSocketAddress("localhost", nyLocPort2)); locatorPorts = new HashSet<>(); dsVsPort.put(3, locatorPorts); Integer tkLocPort1 = (Integer) vm4.invoke(() -> WANTestBase.createFirstRemotePeerLocator(3, nyLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", tkLocPort1)); Integer tkLocPort2 = (Integer) vm5 .invoke(() -> WANTestBase.createSecondRemotePeerLocator(3, tkLocPort1, nyLocPort1)); locatorPorts.add(new InetSocketAddress("localhost", tkLocPort2)); Integer tkLocPort3 = (Integer) vm6 .invoke(() -> WANTestBase.createSecondRemoteLocator(3, tkLocPort1, nyLocPort2)); locatorPorts.add(new InetSocketAddress("localhost", tkLocPort3)); // pause(5000); final int siteSizeToCheck = dsVsPort.size(); vm0.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm1.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm2.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm3.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm4.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm5.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); vm6.invoke(() -> WANTestBase.checkAllSiteMetaData(dsVsPort, siteSizeToCheck)); } @Test public void testNoThreadLeftBehind() { // Get active thread count before test int activeThreadCountBefore = Thread.activeCount(); // Start / stop locator int port = AvailablePortHelper.getRandomAvailableTCPPort(); WANTestBase.createFirstRemoteLocator(2, port); disconnectFromDS(); // Validate active thread count after test // Wait up to 60 seconds for all threads started during the test // (including the 'WAN Locator Discovery Thread') to stop // Note: Awaitility is not being used since it adds threads for (int i = 0; i < 60; i++) { if (Thread.activeCount() > activeThreadCountBefore) { try { Thread.sleep(1000); } catch (InterruptedException e) { fail("Caught the following exception waiting for threads to stop: " + e); } } else { break; } } // Fail if the active thread count after the test is greater than the active thread count before // the test if (Thread.activeCount() > activeThreadCountBefore) { OSProcess.printStacks(0); StringBuilder builder = new StringBuilder(); builder.append("Expected ").append(activeThreadCountBefore).append(" threads but found ") .append(Thread.activeCount()).append(". Check log file for a thread dump."); fail(builder.toString()); } } @Test public void testNoRemoteLocators() { IgnoredException ie = IgnoredException .addIgnoredException("could not get remote locator information for remote site"); try { testRemoteLocators(null, false, 0); } finally { ie.remove(); } } @Test public void testValidHostRemoteLocators() { Set<String> remoteLocators = new HashSet(); remoteLocators.add("localhost[12345]"); testRemoteLocators(remoteLocators, true, 1); } // pool has been created even though locator address is not valid @Test public void testInvalidHostRemoteLocators() { IgnoredException ie = IgnoredException .addIgnoredException("could not get remote locator information for remote site"); try { Set<String> remoteLocators = new HashSet(); addUnknownHost(remoteLocators); // now we don't validata address upfront testRemoteLocators(remoteLocators, true, 1); } finally { ie.remove(); } } @Test public void testValidAndInvalidHostRemoteLocators() { Set<String> remoteLocators = new HashSet(); remoteLocators.add("localhost[12345]"); addUnknownHost(remoteLocators); // now we add the locator to pool, because we don't validate locator address testRemoteLocators(remoteLocators, true, 2); } private void addUnknownHost(Set<String> remoteLocators) { String unknownHostName = "unknownGeodeHostWanAutoDiscoveryDUnitTest"; boolean unknownHostFound = false; int numTries = 10; for (int i = 0; i < numTries; i++) { try { InetAddress.getByName(unknownHostName); } catch (UnknownHostException e) { unknownHostFound = true; break; } unknownHostName = "_" + unknownHostName + "_"; } assertTrue("An unknown host name could not be found in " + numTries + " tries", unknownHostFound); remoteLocators.add(unknownHostName + "[12345]"); } private void testRemoteLocators(Set<String> remoteLocators, boolean poolShouldExist, int expectedPoolLocatorsSize) { // Start locator Integer lnLocPort = (Integer) vm0.invoke(() -> WANTestBase.createFirstLocatorWithDSId(1)); // Add remote locators int remoteDsId = 2; vm0.invoke(() -> WANTestBase.putRemoteSiteLocators(remoteDsId, remoteLocators)); // Create cache vm2.invoke(() -> WANTestBase.createCache(lnLocPort)); // Create sender vm2.invoke(() -> WANTestBase.createSender("ln", remoteDsId, false, 100, 10, false, false, null, false)); // Verify sender is running vm2.invoke(() -> WANTestBase.verifySenderRunningState("ln")); // Verify pool exists or not vm2.invoke(() -> WANTestBase.verifyPool("ln", poolShouldExist, expectedPoolLocatorsSize)); } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hyperaware.conference.android.fragment; import android.app.Activity; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.text.format.DateUtils; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.TextView; import com.bumptech.glide.Glide; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseUser; import com.google.firebase.database.FirebaseDatabase; import com.hyperaware.conference.android.R; import com.hyperaware.conference.android.Singletons; import com.hyperaware.conference.android.activity.ContentHost; import com.hyperaware.conference.android.logging.Logging; import com.hyperaware.conference.android.ui.favsession.FavSessionButtonManager; import com.hyperaware.conference.android.util.AgendaItems; import com.hyperaware.conference.android.util.Strings; import com.hyperaware.conference.model.AgendaItem; import com.hyperaware.conference.model.AgendaSection; import com.hyperaware.conference.model.Event; import com.hyperaware.conference.model.SpeakerItem; import com.hyperaware.conference.model.SpeakersSection; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Formatter; import java.util.TimeZone; import java.util.logging.Logger; import de.halfbit.tinybus.Bus; import de.halfbit.tinybus.Subscribe; public class SpeakerDetailFragment extends Fragment implements Titled { private static final Logger LOGGER = Logging.getLogger(SpeakerDetailFragment.class); private static final String ARG_SPEAKER_ID = "speaker_id"; private String speakerId; private FavSessionButtonManager favSessionButtonManager; private Bus bus; private ContentHost host; private TextView tvName; private TextView tvCompany; private TextView tvTitle; private ImageView ivPic; private ViewGroup vgDetailLinks; private ViewGroup vgSessions; private TextView tvWebsite, tvTwitter, tvFacebook, tvLinkedin; private TextView tvAbout; private Event event; private TimeZone tz; private SpeakerItem speakerItem; private ArrayList<AgendaItem> agendaItems; @NonNull public static SpeakerDetailFragment instantiate(@NonNull String speaker_id) { if (Strings.isNullOrEmpty(speaker_id)) { throw new IllegalArgumentException(ARG_SPEAKER_ID + " can't be null or empty"); } final Bundle args = new Bundle(); args.putString(ARG_SPEAKER_ID, speaker_id); final SpeakerDetailFragment fragment = new SpeakerDetailFragment(); fragment.setArguments(args); return fragment; } @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); LOGGER.fine("onCreate"); final Bundle args = getArguments(); speakerId = args.getString(ARG_SPEAKER_ID); if (Strings.isNullOrEmpty(speakerId)) { throw new IllegalArgumentException(ARG_SPEAKER_ID + " can't be null or empty"); } favSessionButtonManager = new FavSessionButtonManager( FirebaseDatabase.getInstance(), FirebaseAuth.getInstance(), new MyAuthRequiredListener()); bus = Singletons.deps.getBus(); } @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.fragment_speaker_detail, container, false); } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); final Activity activity = getActivity(); if (activity instanceof ContentHost) { host = (ContentHost) activity; host.setTitle(null); } final View root = getView(); if (root == null) { throw new IllegalStateException(); } tvName = (TextView) root.findViewById(R.id.tv_name); tvCompany = (TextView) root.findViewById(R.id.tv_company); tvTitle = (TextView) root.findViewById(R.id.tv_title); ivPic = (ImageView) root.findViewById(R.id.iv_pic); vgDetailLinks = (ViewGroup) root.findViewById(R.id.vg_detail_links); vgDetailLinks.setVisibility(View.GONE); tvWebsite = (TextView) vgDetailLinks.findViewById(R.id.tv_website); tvWebsite.setVisibility(View.GONE); tvTwitter = (TextView) vgDetailLinks.findViewById(R.id.tv_twitter); tvTwitter.setVisibility(View.GONE); tvFacebook = (TextView) vgDetailLinks.findViewById(R.id.tv_facebook); tvFacebook.setVisibility(View.GONE); tvLinkedin = (TextView) vgDetailLinks.findViewById(R.id.tv_linkedin); tvLinkedin.setVisibility(View.GONE); vgSessions = (ViewGroup) root.findViewById(R.id.vg_sessions); vgSessions.setVisibility(View.GONE); vgSessions.removeAllViews(); tvAbout = (TextView) root.findViewById(R.id.tv_about); updateUi(); } @Override public void onStart() { super.onStart(); bus.register(this); favSessionButtonManager.start(); } @Override public void onStop() { favSessionButtonManager.stop(); bus.unregister(this); super.onStop(); } @Nullable @Override public String getTitle() { if (speakerItem != null) { return speakerItem.getName(); } else { return null; } } @Subscribe public void onEvent(final Event event) { if (this.event == null && event != null) { this.event = event; tz = TimeZone.getTimeZone(event.getTimezoneName()); updateUi(); } } @Subscribe public void onAgenda(final AgendaSection agenda) { if (this.agendaItems == null && agenda != null) { agendaItems = new ArrayList<>(); final Collection<AgendaItem> values = agenda.getItems().values(); for (final AgendaItem item : values) { if (item.getSpeakerIds().contains(speakerId)) { agendaItems.add(item); } } Collections.sort(agendaItems, AgendaItems.START_TIME_COMPARATOR); updateUi(); } } @Subscribe public void onSpeakers(final SpeakersSection speakers) { if (this.speakerItem == null && speakers != null) { speakerItem = speakers.getItems().get(speakerId); updateUi(); } } private void updateUi() { if (event != null && agendaItems != null && speakerItem != null) { updateSpeaker(); } else { } } private void updateSpeaker() { tvName.setText(speakerItem.getName()); host.setTitle(speakerItem.getName()); final String company = speakerItem.getCompanyName(); tvCompany.setVisibility(Strings.isNullOrEmpty(company) ? View.GONE : View.VISIBLE); tvCompany.setText(company); final String title = speakerItem.getTitle(); tvTitle.setVisibility(Strings.isNullOrEmpty(title) ? View.GONE : View.VISIBLE); tvTitle.setText(title); Glide .with(SpeakerDetailFragment.this) .load(speakerItem.getImage100()) .fitCenter() .placeholder(R.drawable.nopic) .into(ivPic); boolean links_visible = false; final String website = speakerItem.getWebsite(); if (!Strings.isNullOrEmpty(website)) { links_visible = true; tvWebsite.setVisibility(View.VISIBLE); tvWebsite.setText(website); } final String twitter = speakerItem.getTwitter(); if (!Strings.isNullOrEmpty(twitter)) { links_visible = true; tvTwitter.setVisibility(View.VISIBLE); tvTwitter.setText(twitter); } final String facebook = speakerItem.getFacebook(); if (!Strings.isNullOrEmpty(facebook)) { links_visible = true; tvFacebook.setVisibility(View.VISIBLE); tvFacebook.setText(facebook); } final String linkedin = speakerItem.getLinkedin(); if (!Strings.isNullOrEmpty(linkedin)) { links_visible = true; tvLinkedin.setVisibility(View.VISIBLE); tvLinkedin.setText(linkedin); } vgDetailLinks.setVisibility(links_visible ? View.VISIBLE : View.GONE); tvAbout.setText(speakerItem.getAbout()); if (agendaItems.size() > 0) { vgSessions.setVisibility(View.VISIBLE); vgSessions.removeAllViews(); final StringBuilder sb = new StringBuilder(); final Formatter formatter = new Formatter(sb); final LayoutInflater inflater = getActivity().getLayoutInflater(); for (final AgendaItem item : agendaItems) { final View view = inflater.inflate(R.layout.item_speaker_session, vgSessions, false); ((TextView) view.findViewById(R.id.tv_topic)).setText(item.getTopic()); sb.setLength(0); DateUtils.formatDateRange( getActivity(), formatter, item.getEpochStartTime() * 1000, item.getEpochEndTime() * 1000, DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_SHOW_WEEKDAY, tz.getID() ); ((TextView) view.findViewById(R.id.tv_date)).setText(formatter.toString()); sb.setLength(0); DateUtils.formatDateRange( getActivity(), formatter, item.getEpochStartTime() * 1000, item.getEpochEndTime() * 1000, DateUtils.FORMAT_SHOW_TIME, tz.getID() ); ((TextView) view.findViewById(R.id.tv_time)).setText(formatter.toString()); final String session_id = item.getId(); final ImageButton ib_favorite = (ImageButton) view.findViewById(R.id.button_favorite_session); favSessionButtonManager.attach(ib_favorite, session_id); if (host != null) { view.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Fragment next = SessionDetailFragment.instantiate(item.getId()); host.pushFragment(next, "session_detail"); } }); } vgSessions.addView(view); } } } private class MyAuthRequiredListener implements FavSessionButtonManager.AuthRequiredListener { @Override public void onAuthRequired(ImageButton view, String sessionId) { new SigninRequiredDialogFragment().show(getFragmentManager(), null); } } }
/* * Copyright (c) 2008-2017 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.haulmont.cuba.web.widgets; import com.haulmont.cuba.web.widgets.client.groupbox.CubaGroupBoxServerRpc; import com.haulmont.cuba.web.widgets.client.groupbox.CubaGroupBoxState; import com.vaadin.shared.ui.MarginInfo; import com.vaadin.ui.Component; import com.vaadin.ui.ComponentContainer; import com.vaadin.ui.Layout; import com.vaadin.ui.Panel; import org.slf4j.LoggerFactory; import java.util.Iterator; public class CubaGroupBox extends Panel implements ComponentContainer { protected ExpandChangeHandler expandChangeHandler = null; public CubaGroupBox() { registerRpc((CubaGroupBoxServerRpc) expanded -> { if (getState().collapsable) { setExpanded(expanded, true); } }); Layout content = new CubaVerticalActionsLayout(); setContent(content); setWidth(100, Unit.PERCENTAGE); } @Override protected CubaGroupBoxState getState() { return (CubaGroupBoxState) super.getState(); } @Override protected CubaGroupBoxState getState(boolean markAsDirty) { return (CubaGroupBoxState) super.getState(markAsDirty); } @Override public void setWidth(float width, Unit unit) { super.setWidth(width, unit); if (getContent() != null) { if (width < 0) { getContent().setWidth(-1, Unit.PIXELS); } else { getContent().setWidth(100, Unit.PERCENTAGE); } } } @Override public void setHeight(float height, Unit unit) { super.setHeight(height, unit); if (getContent() != null) { if (height < 0) { getContent().setHeight(-1, Unit.PIXELS); } else { getContent().setHeight(100, Unit.PERCENTAGE); } } } @Override public void setContent(Component content) { super.setContent(content); if (content != null) { if (getHeight() < 0) { getContent().setHeight(-1, Unit.PIXELS); } else { getContent().setHeight(100, Unit.PERCENTAGE); } if (getWidth() < 0) { getContent().setWidth(-1, Unit.PIXELS); } else { getContent().setWidth(100, Unit.PERCENTAGE); } } } public boolean isExpanded() { return !getState(false).collapsable || getState(false).expanded; } public void setExpanded(boolean expanded) { setExpanded(expanded, false); } public void setExpanded(boolean expanded, boolean invokedByUser) { if (expanded != getState(false).expanded) { getContent().setVisible(expanded); markAsDirtyRecursive(); } getState().expanded = expanded; if (expandChangeHandler != null) expandChangeHandler.expandStateChanged(expanded, invokedByUser); } public boolean isCollapsable() { return getState(false).collapsable; } public void setCollapsable(boolean collapsable) { getState().collapsable = collapsable; if (collapsable) setExpanded(true); } public ExpandChangeHandler getExpandChangeHandler() { return expandChangeHandler; } public void setExpandChangeHandler(ExpandChangeHandler expandChangeHandler) { this.expandChangeHandler = expandChangeHandler; } @Override public ComponentContainer getContent() { return (ComponentContainer) super.getContent(); } @Override public void addComponent(Component c) { getContent().addComponent(c); } @Override public void addComponents(Component... components) { getContent().addComponents(components); } @Override public void removeComponent(Component c) { getContent().addComponent(c); } @Override public void removeAllComponents() { getContent().removeAllComponents(); } @Override public void replaceComponent(Component oldComponent, Component newComponent) { getContent().replaceComponent(oldComponent, newComponent); } @Override public Iterator<Component> getComponentIterator() { return getContent().iterator(); } @Override public void moveComponentsFrom(ComponentContainer source) { getContent().moveComponentsFrom(source); } @Override public void beforeClientResponse(boolean initial) { super.beforeClientResponse(initial); if (getState(false).showAsPanel && getState(false).outerMarginsBitmask != 0) { LoggerFactory.getLogger(CubaGroupBox.class) .warn("GroupBox's 'showAsPanel' and 'outerMargin' properties are set simultaneously"); } } public interface ExpandChangeHandler { void expandStateChanged(boolean expanded, boolean invokedByUser); } public void setShowAsPanel(boolean showAsPanel) { if (getState(false).showAsPanel != showAsPanel) { if (showAsPanel) { setPrimaryStyleName("v-panel"); } else { setPrimaryStyleName("c-groupbox"); } getState().showAsPanel = showAsPanel; } } public boolean isShowAsPanel() { return getState(false).showAsPanel; } public MarginInfo getOuterMargin() { return new MarginInfo(getState(false).outerMarginsBitmask); } public void setOuterMargin(MarginInfo marginInfo) { getState().outerMarginsBitmask = marginInfo.getBitMask(); } }