gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.configuration.utils; import static javax.xml.XMLConstants.NULL_NS_URI; import static javax.xml.XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI; import static org.apache.geode.management.internal.configuration.utils.XmlConstants.W3C_XML_SCHEMA_INSTANCE_ATTRIBUTE_SCHEMA_LOCATION; import static org.apache.geode.management.internal.configuration.utils.XmlConstants.W3C_XML_SCHEMA_INSTANCE_PREFIX; import org.apache.commons.lang.StringUtils; import org.apache.geode.internal.cache.xmlcache.CacheXml; import org.apache.geode.internal.cache.xmlcache.CacheXmlParser; import org.apache.geode.management.internal.configuration.domain.CacheElement; import org.apache.geode.management.internal.configuration.domain.XmlEntity; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import java.io.IOException; import java.io.Reader; import java.io.StringReader; import java.io.StringWriter; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.StringTokenizer; import javax.xml.namespace.NamespaceContext; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.TransformerFactoryConfigurationError; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; public class XmlUtils { /** * Create an XML {@link Document} from the given {@link Reader}. * * @param reader to create document from. * @return {@link Document} if successful, otherwise false. * @throws ParserConfigurationException * @throws SAXException * @throws IOException * @since GemFire 8.1 */ public static Document createDocumentFromReader(final Reader reader) throws SAXException, ParserConfigurationException, IOException { Document doc = null; InputSource inputSource = new InputSource(reader); doc = getDocumentBuilder().parse(inputSource); return doc; } public static NodeList query(Node node, String searchString) throws XPathExpressionException { XPath xpath = XPathFactory.newInstance().newXPath(); return (NodeList) xpath.evaluate(searchString, node, XPathConstants.NODESET); } public static NodeList query(Node node, String searchString, XPathContext xpathcontext) throws XPathExpressionException { XPath xpath = XPathFactory.newInstance().newXPath(); xpath.setNamespaceContext(xpathcontext); return (NodeList) xpath.evaluate(searchString, node, XPathConstants.NODESET); } public static Element querySingleElement(Node node, String searchString, final XPathContext xPathContext) throws XPathExpressionException { XPath xpath = XPathFactory.newInstance().newXPath(); xpath.setNamespaceContext(xPathContext); Object result = xpath.evaluate(searchString, node, XPathConstants.NODE); try { return (Element) result; } catch (ClassCastException e) { throw new XPathExpressionException("Not an org.w3c.dom.Element: " + result); } } public static DocumentBuilder getDocumentBuilder() throws ParserConfigurationException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); // the actual builder or parser DocumentBuilder builder = factory.newDocumentBuilder(); builder.setEntityResolver(new CacheXmlParser()); return builder; } /***** * Adds a new node or replaces an existing node in the Document * * @param doc Target document where the node will added * @param xmlEntity contains definition of the xml entity * @throws IOException * @throws ParserConfigurationException * @throws SAXException * @throws XPathExpressionException */ public static void addNewNode(final Document doc, final XmlEntity xmlEntity) throws IOException, XPathExpressionException, SAXException, ParserConfigurationException { // Build up map per call to avoid issues with caching wrong version of the map. final LinkedHashMap<String, CacheElement> elementOrderMap = CacheElement.buildElementMap(doc); final Node newNode = createNode(doc, xmlEntity.getXmlDefinition()); final Node root = doc.getDocumentElement(); final int incomingElementOrder = getElementOrder(elementOrderMap, xmlEntity.getNamespace(), xmlEntity.getType()); boolean nodeAdded = false; NodeList nodes = root.getChildNodes(); final int length = nodes.getLength(); for (int i = 0; i < length; i++) { final Node node = nodes.item(i); if (node instanceof Element) { final Element childElement = (Element) node; final String type = childElement.getLocalName(); final String namespace = childElement.getNamespaceURI(); if (namespace.equals(xmlEntity.getNamespace()) && type.equals(xmlEntity.getType())) { // TODO this should really be checking all attributes in xmlEntity.getAttributes // First check if the element has a name String nameOrId = getAttribute(childElement, "name"); // If not then check if the element has an Id if (nameOrId == null) { nameOrId = getAttribute(childElement, "id"); } if (nameOrId != null) { // If there is a match , then replace the existing node with the incoming node if (nameOrId.equals(xmlEntity.getNameOrId())) { root.replaceChild(newNode, node); nodeAdded = true; break; } } else { // This element does not have any name or id identifier for e.g PDX and gateway-receiver // If there is only one element of that type then replace it with the incoming node if (!isMultiple(elementOrderMap, namespace, type)) { root.replaceChild(newNode, node); nodeAdded = true; break; } } } else { if (incomingElementOrder < getElementOrder(elementOrderMap, namespace, type)) { root.insertBefore(newNode, node); nodeAdded = true; break; } } } } if (!nodeAdded) { root.appendChild(newNode); } } /** * @param elementOrderMap * @param namespace * @param type * @return <code>true</code> if element allows multiple, otherwise <code>false</code>. * @since GemFire 8.1 */ private static boolean isMultiple(final LinkedHashMap<String, CacheElement> elementOrderMap, final String namespace, final String type) { if (CacheXml.GEODE_NAMESPACE.equals(namespace)) { // We only keep the cache elements in elementOrderMap final CacheElement cacheElement = elementOrderMap.get(type); if (null != cacheElement) { return cacheElement.isMultiple(); } } // Assume all extensions are not multiples. // To support multiple on extensions our map needs to included other // namespaces return false; } /** * @param elementOrderMap * @param namespace * @param type * @return position of the element if in map, otherwise {@link Integer#MAX_VALUE}. * @since GemFire 8.1 */ private static int getElementOrder(final LinkedHashMap<String, CacheElement> elementOrderMap, final String namespace, final String type) { if (CacheXml.GEODE_NAMESPACE.equals(namespace)) { // We only keep the cache elements in elementOrderMap final CacheElement cacheElement = elementOrderMap.get(type); if (null != cacheElement) { return cacheElement.getOrder(); } } // Assume all extensions are order independent. return Integer.MAX_VALUE; } /**** * Creates a node from the String xml definition * * @param owner * @param xmlDefintion * @return Node representing the xml definition * @throws ParserConfigurationException * @throws IOException * @throws SAXException */ private static Node createNode(Document owner, String xmlDefintion) throws SAXException, IOException, ParserConfigurationException { InputSource inputSource = new InputSource(new StringReader(xmlDefintion)); Document document = getDocumentBuilder().parse(inputSource); Node newNode = document.getDocumentElement(); return owner.importNode(newNode, true); } public static String getAttribute(Node node, String name) { NamedNodeMap attributes = node.getAttributes(); if (attributes == null) { return null; } Node attributeNode = node.getAttributes().getNamedItem(name); if (attributeNode == null) { return null; } return attributeNode.getTextContent(); } public static String getAttribute(Node node, String localName, String namespaceURI) { Node attributeNode = node.getAttributes().getNamedItemNS(namespaceURI, localName); if (attributeNode == null) { return null; } return attributeNode.getTextContent(); } /** * Build schema location map of schemas used in given <code>schemaLocationAttribute</code>. * * @see <a href="http://www.w3.org/TR/xmlschema-0/#schemaLocation">XML Schema Part 0: Primer * Second Edition | 5.6 schemaLocation</a> * * @param schemaLocation attribute value to build schema location map from. * @return {@link Map} of schema namespace URIs to location URLs. * @since GemFire 8.1 */ public static Map<String, List<String>> buildSchemaLocationMap(final String schemaLocation) { return buildSchemaLocationMap(new HashMap<String, List<String>>(), schemaLocation); } /** * Build schema location map of schemas used in given <code>schemaLocationAttribute</code> and * adds them to the given <code>schemaLocationMap</code>. * * @see <a href="http://www.w3.org/TR/xmlschema-0/#schemaLocation">XML Schema Part 0: Primer * Second Edition | 5.6 schemaLocation</a> * * @param schemaLocationMap {@link Map} to add schema locations to. * @param schemaLocation attribute value to build schema location map from. * @return {@link Map} of schema namespace URIs to location URLs. * @since GemFire 8.1 */ static Map<String, List<String>> buildSchemaLocationMap( Map<String, List<String>> schemaLocationMap, final String schemaLocation) { if (null == schemaLocation) { return schemaLocationMap; } if (null == schemaLocation || schemaLocation.isEmpty()) { // should really ever be null but being safe. return schemaLocationMap; } final StringTokenizer st = new StringTokenizer(schemaLocation, " \n\t\r"); while (st.hasMoreElements()) { final String ns = st.nextToken(); final String loc = st.nextToken(); List<String> locs = schemaLocationMap.get(ns); if (null == locs) { locs = new ArrayList<>(); schemaLocationMap.put(ns, locs); } if (!locs.contains(loc)) { locs.add(loc); } } return schemaLocationMap; } /***** * Deletes all the node from the document which match the definition provided by xmlentity * * @param doc * @param xmlEntity * @throws Exception */ public static void deleteNode(Document doc, XmlEntity xmlEntity) throws Exception { NodeList nodes = getNodes(doc, xmlEntity); if (nodes != null) { int length = nodes.getLength(); for (int i = 0; i < length; i++) { Node node = nodes.item(i); node.getParentNode().removeChild(node); } } } /**** * Gets all the nodes matching the definition given by the xml entity * * @param doc * @param xmlEntity * @return Nodes * @throws XPathExpressionException */ public static NodeList getNodes(Document doc, XmlEntity xmlEntity) throws XPathExpressionException { XPathContext context = new XPathContext(); context.addNamespace(xmlEntity.getPrefix(), xmlEntity.getNamespace()); if (xmlEntity.getChildPrefix() != null) { context.addNamespace(xmlEntity.getChildPrefix(), xmlEntity.getChildNamespace()); } return query(doc, xmlEntity.getSearchString(), context); } /** * An object used by an XPath query that maps namespaces to uris. * * @see NamespaceContext * */ public static class XPathContext implements NamespaceContext { private HashMap<String, String> prefixToUri = new HashMap<String, String>(); private HashMap<String, String> uriToPrefix = new HashMap<String, String>(); public XPathContext() {} public XPathContext(String prefix, String uri) { addNamespace(prefix, uri); } public void addNamespace(String prefix, String uri) { this.prefixToUri.put(prefix, uri); this.uriToPrefix.put(uri, prefix); } @Override public String getNamespaceURI(String prefix) { return prefixToUri.get(prefix); } @Override public String getPrefix(String namespaceURI) { return uriToPrefix.get(namespaceURI); } @Override public Iterator<String> getPrefixes(String namespaceURI) { return Collections.singleton(getPrefix(namespaceURI)).iterator(); } } /**** * Converts the document to a well formatted Xml string * * @param doc * @return pretty xml string * @throws IOException * @throws TransformerException * @throws TransformerFactoryConfigurationError */ public static String prettyXml(Node doc) throws IOException, TransformerFactoryConfigurationError, TransformerException { Transformer transformer = TransformerFactory.newInstance().newTransformer(); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); return transform(transformer, doc); } public static String elementToString(Node element) throws TransformerFactoryConfigurationError, TransformerException { Transformer transformer = TransformerFactory.newInstance().newTransformer(); return transform(transformer, element); } private static String transform(Transformer transformer, Node element) throws TransformerException { StreamResult result = new StreamResult(new StringWriter()); DOMSource source = new DOMSource(element); transformer.transform(source, result); String xmlString = result.getWriter().toString(); return xmlString; } /**** * Convert the xmlString to pretty well formatted xmlString * * @param xmlContent * @return pretty xml string * @throws IOException * @throws TransformerException * @throws TransformerFactoryConfigurationError * @throws ParserConfigurationException * @throws SAXException */ public static String prettyXml(String xmlContent) throws IOException, TransformerFactoryConfigurationError, TransformerException, SAXException, ParserConfigurationException { Document doc = createDocumentFromXml(xmlContent); return prettyXml(doc); } /*** * Create a document from the xml * * @param xmlContent * @return Document * @throws IOException * @throws ParserConfigurationException * @throws SAXException */ public static Document createDocumentFromXml(String xmlContent) throws SAXException, ParserConfigurationException, IOException { return createDocumentFromReader(new StringReader(xmlContent)); } /** * Create a {@link Document} using {@link XmlUtils#createDocumentFromXml(String)} and if the * version attribute is not equal to the current version then update the XML to the current schema * and return the document. * * @param xmlContent XML content to load and upgrade. * @return {@link Document} from xmlContent. * @since GemFire 8.1 */ public static Document createAndUpgradeDocumentFromXml(String xmlContent) throws SAXException, ParserConfigurationException, IOException, XPathExpressionException { Document doc = XmlUtils.createDocumentFromXml(xmlContent); if (!CacheXml.VERSION_LATEST.equals(XmlUtils.getAttribute(doc.getDocumentElement(), CacheXml.VERSION, CacheXml.GEODE_NAMESPACE))) { doc = upgradeSchema(doc, CacheXml.GEODE_NAMESPACE, CacheXml.LATEST_SCHEMA_LOCATION, CacheXml.VERSION_LATEST); } return doc; } /** * Upgrade the schema of a given Config XMl <code>document</code> to the given * <code>namespace</code>, <code>schemaLocation</code> and <code>version</code>. * * @param document Config XML {@link Document} to upgrade. * @param namespaceUri Namespace URI to upgrade to. * @param schemaLocation Schema location to upgrade to. * @throws XPathExpressionException * @throws ParserConfigurationException * @since GemFire 8.1 */ // UnitTest SharedConfigurationTest.testCreateAndUpgradeDocumentFromXml() public static Document upgradeSchema(Document document, final String namespaceUri, final String schemaLocation, String schemaVersion) throws XPathExpressionException, ParserConfigurationException { if (StringUtils.isBlank(namespaceUri)) { throw new IllegalArgumentException("namespaceUri"); } if (StringUtils.isBlank(schemaLocation)) { throw new IllegalArgumentException("schemaLocation"); } if (StringUtils.isBlank(schemaVersion)) { throw new IllegalArgumentException("schemaVersion"); } if (null != document.getDoctype()) { // doc.setDocType(null); Node root = document.getDocumentElement(); Document copiedDocument = getDocumentBuilder().newDocument(); Node copiedRoot = copiedDocument.importNode(root, true); copiedDocument.appendChild(copiedRoot); document = copiedDocument; } final Element root = document.getDocumentElement(); final Map<String, String> namespacePrefixMap = buildNamespacePrefixMap(root); // Add CacheXml namespace if missing. String cachePrefix = namespacePrefixMap.get(namespaceUri); if (null == cachePrefix) { // Default to null prefix. cachePrefix = NULL_NS_URI; // Move all into new namespace changeNamespace(root, NULL_NS_URI, namespaceUri); namespacePrefixMap.put(namespaceUri, cachePrefix); } // Add schema instance namespace if missing. String xsiPrefix = namespacePrefixMap.get(W3C_XML_SCHEMA_INSTANCE_NS_URI); if (null == xsiPrefix) { xsiPrefix = W3C_XML_SCHEMA_INSTANCE_PREFIX; root.setAttribute("xmlns:" + xsiPrefix, W3C_XML_SCHEMA_INSTANCE_NS_URI); namespacePrefixMap.put(W3C_XML_SCHEMA_INSTANCE_NS_URI, xsiPrefix); } // Create schemaLocation attribute if missing. final String schemaLocationAttribute = getAttribute(root, W3C_XML_SCHEMA_INSTANCE_ATTRIBUTE_SCHEMA_LOCATION, W3C_XML_SCHEMA_INSTANCE_NS_URI); // Update schemaLocation for namespace. final Map<String, List<String>> schemaLocationMap = buildSchemaLocationMap(schemaLocationAttribute); List<String> schemaLocations = schemaLocationMap.get(namespaceUri); if (null == schemaLocations) { schemaLocations = new ArrayList<String>(); schemaLocationMap.put(namespaceUri, schemaLocations); } schemaLocations.clear(); schemaLocations.add(schemaLocation); String schemaLocationValue = getSchemaLocationValue(schemaLocationMap); root.setAttributeNS(W3C_XML_SCHEMA_INSTANCE_NS_URI, xsiPrefix + ":" + W3C_XML_SCHEMA_INSTANCE_ATTRIBUTE_SCHEMA_LOCATION, schemaLocationValue); // Set schema version if (cachePrefix == null || cachePrefix.isEmpty()) { root.setAttribute("version", schemaVersion); } else { root.setAttributeNS(namespaceUri, cachePrefix + ":version", schemaVersion); } return document; } /** * Set the <code>schemaLocationAttribute</code> to the values of the * <code>schemaLocationMap</code>. * * @see <a href="http://www.w3.org/TR/xmlschema-0/#schemaLocation">XML Schema Part 0: Primer * Second Edition | 5.6 schemaLocation</a> * * @param schemaLocationMap {@link Map} to get schema locations from. * @since GemFire 8.1 */ private static String getSchemaLocationValue(final Map<String, List<String>> schemaLocationMap) { final StringBuilder sb = new StringBuilder(); for (final Map.Entry<String, List<String>> entry : schemaLocationMap.entrySet()) { for (final String schemaLocation : entry.getValue()) { if (sb.length() > 0) { sb.append(' '); } sb.append(entry.getKey()).append(' ').append(schemaLocation); } } return sb.toString(); } /** * Build {@link Map} of namespace URIs to prefixes. * * @param root {@link Element} to get namespaces and prefixes from. * @return {@link Map} of namespace URIs to prefixes. * @since GemFire 8.1 */ private static Map<String, String> buildNamespacePrefixMap(final Element root) { final HashMap<String, String> namespacePrefixMap = new HashMap<>(); // Look for all of the attributes of cache that start with // xmlns NamedNodeMap attributes = root.getAttributes(); for (int i = 0; i < attributes.getLength(); i++) { Node item = attributes.item(i); if (item.getNodeName().startsWith("xmlns")) { // Anything after the colon is the prefix // eg xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" // has a prefix of xsi String[] splitName = item.getNodeName().split(":"); String prefix; if (splitName.length > 1) { prefix = splitName[1]; } else { prefix = ""; } String uri = item.getTextContent(); namespacePrefixMap.put(uri, prefix); } } return namespacePrefixMap; } /** * Change the namespace URI of a <code>node</code> and its children to * <code>newNamespaceUri</code> if that node is in the given <code>oldNamespaceUri</code> * namespace URI. * * * @param node {@link Node} to change namespace URI on. * @param oldNamespaceUri old namespace URI to change from. * @param newNamespaceUri new Namespace URI to change to. * @throws XPathExpressionException * @return the modified version of the passed in node. * @since GemFire 8.1 */ static Node changeNamespace(final Node node, final String oldNamespaceUri, final String newNamespaceUri) throws XPathExpressionException { Node result = null; final NodeList nodes = query(node, "//*"); for (int i = 0; i < nodes.getLength(); i++) { final Node element = nodes.item(i); if (element.getNamespaceURI() == null || element.getNamespaceURI().equals(oldNamespaceUri)) { Node renamed = node.getOwnerDocument().renameNode(element, newNamespaceUri, element.getNodeName()); if (element == node) { result = renamed; } } } return result; } /**** * Method to modify the root attribute (cache) of the XML * * @param doc Target document whose root attributes must be modified * @param xmlEntity xml entity for the root , it also contains the attributes * @throws IOException */ public static void modifyRootAttributes(Document doc, XmlEntity xmlEntity) throws IOException { if (xmlEntity == null || xmlEntity.getAttributes() == null) { return; } String type = xmlEntity.getType(); Map<String, String> attributes = xmlEntity.getAttributes(); Element root = doc.getDocumentElement(); if (root.getLocalName().equals(type)) { for (Entry<String, String> entry : attributes.entrySet()) { String attributeName = entry.getKey(); String attributeValue = entry.getValue(); // Remove the existing attribute String rootAttribute = getAttribute(root, attributeName); if (null != rootAttribute) { root.removeAttribute(rootAttribute); } // Add the new attribute with new value root.setAttribute(attributeName, attributeValue); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.deltaspike.jsf.impl.util; import org.apache.deltaspike.core.api.config.view.metadata.ViewConfigResolver; import org.apache.deltaspike.core.api.provider.BeanProvider; import org.apache.deltaspike.core.api.config.view.navigation.NavigationParameterContext; import org.apache.deltaspike.jsf.api.config.JsfModuleConfig; import org.apache.deltaspike.jsf.impl.listener.phase.WindowMetaData; import org.apache.deltaspike.jsf.impl.message.FacesMessageEntry; import javax.enterprise.context.ContextNotActiveException; import javax.faces.context.ExternalContext; import javax.faces.context.FacesContext; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; public abstract class JsfUtils { public static <T> T getValueOfExpression(String expression, Class<T> targetType) { FacesContext facesContext = FacesContext.getCurrentInstance(); return facesContext.getApplication().evaluateExpressionGet(facesContext, expression, targetType); } public static String getValueOfExpressionAsString(String expression) { Object result = getValueOfExpression(expression, Object.class); return result != null ? result.toString() : "null"; } public static Set<RequestParameter> getViewConfigPageParameters() { ExternalContext externalContext = FacesContext.getCurrentInstance().getExternalContext(); Set<RequestParameter> result = new HashSet<RequestParameter>(); if (externalContext == null || //detection of early config for different mojarra versions externalContext.getRequestParameterValuesMap() == null || externalContext.getRequest() == null) { return result; } NavigationParameterContext navigationParameterContext = BeanProvider.getContextualReference(NavigationParameterContext.class); for (Map.Entry<String, String> entry : navigationParameterContext.getPageParameters().entrySet()) { //TODO add multi-value support result.add(new RequestParameter(entry.getKey(), new String[]{entry.getValue()})); } return result; } /** * Adds the current page-parameters to the given url * * @param externalContext current external-context * @param url current url * @param encodeValues flag which indicates if parameter values should be encoded or not * @return url with request-parameters */ public static String addPageParameters(ExternalContext externalContext, String url, boolean encodeValues) { StringBuilder finalUrl = new StringBuilder(url); boolean existingParameters = url.contains("?"); for (RequestParameter requestParam : getViewConfigPageParameters()) { String key = requestParam.getKey(); for (String parameterValue : requestParam.getValues()) { if (!url.contains(key + "=" + parameterValue) && !url.contains(key + "=" + encodeURLParameterValue(parameterValue, externalContext))) { if (!existingParameters) { finalUrl.append("?"); existingParameters = true; } else { finalUrl.append("&"); } finalUrl.append(key); finalUrl.append("="); if (encodeValues) { finalUrl.append(JsfUtils.encodeURLParameterValue(parameterValue, externalContext)); } else { finalUrl.append(parameterValue); } } } } return finalUrl.toString(); } /** * Adds a paramter to the given url. * * @param externalContext current external-context * @param url current url * @param encodeValues flag which indicates if parameter values should be encoded or not * @param name the paramter name * @param value the paramter value * @return url with appended parameter */ public static String addParameter(ExternalContext externalContext, String url, boolean encodeValues, String name, String value) { // don't append if already available if (url.contains(name + "=" + value) || url.contains(name + "=" + encodeURLParameterValue(value, externalContext))) { return url; } StringBuilder finalUrl = new StringBuilder(url); if (url.contains("?")) { finalUrl.append("&"); } else { finalUrl.append("?"); } finalUrl.append(name); finalUrl.append("="); if (encodeValues) { finalUrl.append(JsfUtils.encodeURLParameterValue(value, externalContext)); } else { finalUrl.append(value); } return finalUrl.toString(); } /** * Adds the current request-parameters to the given url * * @param externalContext current external-context * @param url current url * @param encodeValues flag which indicates if parameter values should be encoded or not * @return url with request-parameters */ public static String addRequestParameters(ExternalContext externalContext, String url, boolean encodeValues) { if (externalContext.getRequestParameterValuesMap().isEmpty()) { return url; } StringBuilder finalUrl = new StringBuilder(url); boolean existingParameters = url.contains("?"); for (Map.Entry<String, String[]> entry : externalContext.getRequestParameterValuesMap().entrySet()) { for (String value : entry.getValue()) { if (!url.contains(entry.getKey() + "=" + value) && !url.contains(entry.getKey() + "=" + encodeURLParameterValue(value, externalContext))) { if (!existingParameters) { finalUrl.append("?"); existingParameters = true; } else { finalUrl.append("&"); } finalUrl.append(entry.getKey()); finalUrl.append("="); if (encodeValues) { finalUrl.append(JsfUtils.encodeURLParameterValue(value, externalContext)); } else { finalUrl.append(value); } } } } return finalUrl.toString(); } /** * Encodes the given value using URLEncoder.encode() with the charset returned * from ExternalContext.getResponseCharacterEncoding(). * This is exactly how the ExternalContext impl encodes URL parameter values. * * @param value value which should be encoded * @param externalContext current external-context * @return encoded value */ public static String encodeURLParameterValue(String value, ExternalContext externalContext) { // copied from MyFaces ServletExternalContextImpl.encodeURL() try { return URLEncoder.encode(value, externalContext.getResponseCharacterEncoding()); } catch (UnsupportedEncodingException e) { throw new UnsupportedOperationException("Encoding type=" + externalContext.getResponseCharacterEncoding() + " not supported", e); } } public static ViewConfigResolver getViewConfigResolver() { return BeanProvider.getContextualReference(ViewConfigResolver.class); } public static void saveFacesMessages(ExternalContext externalContext) { JsfModuleConfig jsfModuleConfig = BeanProvider.getContextualReference(JsfModuleConfig.class); if (!jsfModuleConfig.isAlwaysKeepMessages()) { return; } try { WindowMetaData windowMetaData = BeanProvider.getContextualReference(WindowMetaData.class); Map<String, Object> requestMap = externalContext.getRequestMap(); @SuppressWarnings({ "unchecked" }) List<FacesMessageEntry> facesMessageEntryList = (List<FacesMessageEntry>)requestMap.get(FacesMessageEntry.class.getName()); if (facesMessageEntryList == null) { facesMessageEntryList = new CopyOnWriteArrayList<FacesMessageEntry>(); } windowMetaData.setFacesMessageEntryList(facesMessageEntryList); } catch (ContextNotActiveException e) { //TODO log it in case of project-stage development //we can't handle it correctly -> delegate to the jsf-api (which has some restrictions esp. before v2.2) FacesContext.getCurrentInstance().getExternalContext().getFlash().setKeepMessages(true); } } public static void tryToRestoreMessages(FacesContext facesContext) { JsfModuleConfig jsfModuleConfig = BeanProvider.getContextualReference(JsfModuleConfig.class); if (!jsfModuleConfig.isAlwaysKeepMessages()) { return; } try { WindowMetaData windowMetaData = BeanProvider.getContextualReference(WindowMetaData.class); @SuppressWarnings({ "unchecked" }) List<FacesMessageEntry> facesMessageEntryList = windowMetaData.getFacesMessageEntryList(); if (facesMessageEntryList != null) { for (FacesMessageEntry facesMessageEntry : facesMessageEntryList) { facesContext.addMessage(facesMessageEntry.getComponentId(), facesMessageEntry.getFacesMessage()); } facesMessageEntryList.clear(); } } catch (ContextNotActiveException e) { //TODO discuss how we handle it } } }
package com.ternsip.placemod; import net.minecraft.block.Block; import net.minecraft.block.state.IBlockState; import net.minecraft.entity.EntityList; import net.minecraft.entity.monster.EntityGolem; import net.minecraft.entity.passive.EntityVillager; import net.minecraft.init.Blocks; import net.minecraft.nbt.CompressedStreamTools; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.tileentity.TileEntity; import net.minecraft.tileentity.TileEntityChest; import net.minecraft.tileentity.TileEntityMobSpawner; import net.minecraft.util.ResourceLocation; import net.minecraft.util.WeightedRandomChestContent; import net.minecraft.util.WeightedSpawnerEntity; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.MathHelper; import net.minecraft.world.World; import net.minecraft.world.chunk.Chunk; import net.minecraft.world.chunk.storage.ExtendedBlockStorage; import net.minecraft.world.storage.loot.LootTableList; import net.minecraftforge.common.util.Constants; import net.minecraftforge.fml.common.registry.EntityRegistry; import net.minecraftforge.fml.common.registry.VillagerRegistry; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.text.DecimalFormat; import java.util.*; /* Holds schematic-extended information and can load/spawn/calibrate it */ public class Structure { public NBTTagCompound flags = new NBTTagCompound(); public File schematicFile = null; public File flagFile = null; public File structureFile = null; public Structure(File schematicFile, File flagFile, File structureFile) throws IOException { /* Load structure if it exists */ this.schematicFile = schematicFile; this.flagFile = flagFile; this.structureFile = structureFile; if (flagFile.exists() && structureFile.exists()) { FileInputStream fis = new FileInputStream(flagFile); flags = CompressedStreamTools.readCompressed(fis); fis.close(); return; } /* Load schematic */ FileInputStream fis = new FileInputStream(schematicFile); NBTTagCompound schematic = CompressedStreamTools.readCompressed(fis); fis.close(); String materials = schematic.getString("Materials"); if (!materials.equals("Alpha")) { throw new IOException("Material of schematic is not an alpha"); } int width = schematic.getShort("Width"); int height = schematic.getShort("Height"); int length = schematic.getShort("Length"); if (width == 0 || height == 0 || length == 0) { throw new IOException("Zero size schematic"); } byte[] addBlocks = schematic.getByteArray("AddBlocks"); byte[] blocksID = schematic.getByteArray("Blocks"); if (width * height * length != blocksID.length) { throw new IOException("Wrong schematic size: " + width * height * length + "/" + blocksID.length); } short[] blocks = compose(blocksID, addBlocks); /* Set flags */ String path = schematicFile.getPath().toLowerCase().replace("\\", "/").replace("//", "/"); flags.setString("Method", "Common"); if (path.contains("/underground/")) flags.setString("Method", "Underground"); if ( path.contains("/village/") || path.contains("/town/") || path.contains("/villa/")) flags.setString("Method", "Village"); if (path.contains("/floating/")) flags.setString("Method", "Floating"); if (path.contains("/water/")) flags.setString("Method", "Water"); if (path.contains("/underwater/")) flags.setString("Method", "Underwater"); flags.setInteger("Biome", Biome.detect(blocks).value); if ( flags.getString("Method").equalsIgnoreCase("Water") || flags.getString("Method").equalsIgnoreCase("Underwater")) { flags.setInteger("Biome", Biome.Style.WATER.value); } flags.setShort("Width", (short) width); flags.setShort("Height", (short) height); flags.setShort("Length", (short) length); flags.setInteger("Lift", getLift(blocks)); /* Generate structure over schematic */ schematic.setByteArray("Skin", getSkin(blocks).toByteArray()); /* Save flags */ flagFile.getParentFile().mkdirs(); FileOutputStream fosFlag = new FileOutputStream(flagFile); CompressedStreamTools.writeCompressed(flags, fosFlag); fosFlag.close(); /* Save structure */ structureFile.getParentFile().mkdirs(); FileOutputStream fosStruct = new FileOutputStream(structureFile); CompressedStreamTools.writeCompressed(schematic, fosStruct); fosStruct.close(); } void paste(World world, Posture posture, long seed) throws IOException { /* Load ad paste structure */ NBTTagCompound structure; FileInputStream fis = new FileInputStream(structureFile); structure = CompressedStreamTools.readCompressed(fis); fis.close(); NBTTagList entities = structure.getTagList("TileEntities", Constants.NBT.TAG_COMPOUND); byte[] blocksMetadata = structure.getByteArray("Data"); final byte[] addBlocks = structure.getByteArray("AddBlocks"); byte[] blocksID = structure.getByteArray("Blocks"); short[] blocks = compose(blocksID, addBlocks); BitSet skin = BitSet.valueOf(structure.getByteArray("Skin")); /* Prepare tiles */ Random random = new Random(seed); ArrayList<ResourceLocation> lootTables = new ArrayList<ResourceLocation>() {{ add(LootTableList.CHESTS_ABANDONED_MINESHAFT); add(LootTableList.CHESTS_JUNGLE_TEMPLE); add(LootTableList.CHESTS_SIMPLE_DUNGEON); add(LootTableList.CHESTS_SPAWN_BONUS_CHEST); add(LootTableList.CHESTS_STRONGHOLD_CORRIDOR); add(LootTableList.CHESTS_STRONGHOLD_CROSSING); add(LootTableList.CHESTS_STRONGHOLD_LIBRARY); }}; if (flags.getString("Method").equalsIgnoreCase("Village")) { lootTables.add(LootTableList.CHESTS_VILLAGE_BLACKSMITH); } if (flags.getString("Biome").equalsIgnoreCase("Sand")) { lootTables.add(LootTableList.CHESTS_DESERT_PYRAMID); } if (flags.getString("Biome").equalsIgnoreCase("Snow")) { lootTables.add(LootTableList.CHESTS_IGLOO_CHEST); } if (flags.getString("Biome").equalsIgnoreCase("Nether")) { lootTables.add(LootTableList.CHESTS_NETHER_BRIDGE); } if (flags.getString("Biome").equalsIgnoreCase("End")) { lootTables.add(LootTableList.CHESTS_END_CITY_TREASURE); } /* Paste */ int width = posture.getWidth(); int height = posture.getHeight(); int length = posture.getLength(); int startChunkX = posture.getStartChunkX(); int startChunkZ = posture.getStartChunkZ(); int sizeChunkX = posture.getEndChunkX() - startChunkX + 1; int sizeChunkZ = posture.getEndChunkZ() - startChunkZ + 1; ExtendedBlockStorage[][][] storage = new ExtendedBlockStorage[sizeChunkX][sizeChunkZ][16]; for (int cx = 0; cx < sizeChunkX; ++cx) { for (int cz = 0; cz < sizeChunkZ; ++cz) { Chunk chunk = world.getChunkFromChunkCoords(cx + startChunkX, cz + startChunkZ); for (int sy = 0; sy < 256; sy += 16) { IBlockState state = chunk.getBlockState(new BlockPos(0, sy, 0)); chunk.setBlockState(new BlockPos(0, sy, 0), Blocks.log.getDefaultState()); chunk.setBlockState(new BlockPos(0, sy, 0), state); } ExtendedBlockStorage[] stack = chunk.getBlockStorageArray(); System.arraycopy(stack, 0, storage[cx][cz], 0, 16); } } int [] blockReplaces = new int[256]; for (int blockID = 0; blockID < 256; ++blockID) { blockReplaces[blockID] = blockID; } if (Decorator.balanceMode) { blockReplaces[Block.getIdFromBlock(Blocks.bedrock)] = Block.getIdFromBlock(Blocks.cobblestone); blockReplaces[Block.getIdFromBlock(Blocks.iron_block)] = Block.getIdFromBlock(Blocks.mossy_cobblestone); blockReplaces[Block.getIdFromBlock(Blocks.gold_block)] = Block.getIdFromBlock(Blocks.stonebrick); blockReplaces[Block.getIdFromBlock(Blocks.diamond_block)] = Block.getIdFromBlock(Blocks.mossy_cobblestone); blockReplaces[Block.getIdFromBlock(Blocks.lapis_block)] = Block.getIdFromBlock(Blocks.stonebrick); blockReplaces[Block.getIdFromBlock(Blocks.emerald_block)] = Block.getIdFromBlock(Blocks.mossy_cobblestone); blockReplaces[Block.getIdFromBlock(Blocks.wool)] = Block.getIdFromBlock(Blocks.log); blockReplaces[Block.getIdFromBlock(Blocks.beacon)] = Block.getIdFromBlock(Blocks.quartz_block); } if (Decorator.preventCommandBlock) { blockReplaces[Block.getIdFromBlock(Blocks.command_block)] = Block.getIdFromBlock(Blocks.mossy_cobblestone); } if (Decorator.preventMobSpawners) { blockReplaces[Block.getIdFromBlock(Blocks.mob_spawner)] = Block.getIdFromBlock(Blocks.mossy_cobblestone); } double lootChance = Decorator.lootChance; String[] mobs = new String[]{"Enderman", "CaveSpider", "Chicken", "Creeper", "Witch", "Slime", "Spider", "Sheep", "Blaze", "Bat", "PigZombie", "Ghast", "Cow", "SnowMan", "LavaSlime", "Zombie", "Skeleton", "Pig"}; Block[] vanillaBlocks = Decorator.vanillaBlocks; boolean allowOnlyVanillaBlocks = Decorator.allowOnlyVanillaBlocks; // getStateFromMeta -> IllegalArgumentException for (int y = 0, index = 0; y < height; ++y) { for (int z = 0; z < length; ++z) { for (int x = 0; x < width ; ++x, ++index) { if (skin.get(index)) { continue; } BlockPos blockPos = posture.getWorldPos(x, y, z); if (blockPos.getY() < 0 || blockPos.getY() > 255) { continue; } int blockID = blocks[index]; Block block = null; int meta = 0; if (blockID >= 0 && blockID < 256) { blockID = blockReplaces[blockID]; block = vanillaBlocks[blockID]; } if (block == null) { if (allowOnlyVanillaBlocks) { continue; } block = Block.getBlockById(blockID); } else { meta = posture.getWorldMeta(block, blocksMetadata[index]); } IBlockState state = block.getDefaultState(); try { state = block.getStateFromMeta(meta); } catch (IllegalArgumentException ignore) {} int rx = blockPos.getX() - startChunkX * 16; int ry = blockPos.getY(); int rz = blockPos.getZ() - startChunkZ * 16; ExtendedBlockStorage store = storage[rx / 16][rz / 16][ry / 16]; if (store != null) { store.set(rx % 16, ry % 16, rz % 16, state); } else { world.setBlockState(blockPos, state); } //world.markBlockRangeForRenderUpdate(blockPos, blockPos); //world.setBlockState(blockPos, state); //chunk.setModified(true); //world.setBlockState(blockPos, state, 2); TileEntity blockTile = world.getTileEntity(blockPos); if (blockTile != null) { if (blockTile instanceof TileEntityChest && lootChance >= random.nextDouble()) { TileEntityChest chest = (TileEntityChest) blockTile; int id = Math.abs(random.nextInt() % lootTables.size()); chest.setLoot(lootTables.get(id), random.nextLong()); } if (blockTile instanceof TileEntityMobSpawner) { TileEntityMobSpawner spawner = (TileEntityMobSpawner) blockTile; spawner.getSpawnerBaseLogic().setEntityName(mobs[Math.abs(random.nextInt()) % mobs.length]); } } } } } world.markBlockRangeForRenderUpdate(posture.getWorldPos(0, 0, 0), posture.getWorldPos(width, height, length)); /* Populate village */ if (flags.getString("Method").equalsIgnoreCase("Village")) { int count = (int) (1 + Math.cbrt(Math.abs(posture.getWidth() * posture.getLength()))) / 2; int maxTries = 16 + count * count; for (int i = 0; i < maxTries && count > 0; ++i) { int xPos = posture.getPosX() + Math.abs(random.nextInt()) % posture.getSizeX(); int yPos = posture.getPosY() + Math.abs(random.nextInt()) % posture.getSizeY(); int zPos = posture.getPosZ() + Math.abs(random.nextInt()) % posture.getSizeZ(); if (!world.isAirBlock(new BlockPos(xPos, yPos, zPos)) || !world.isAirBlock(new BlockPos(xPos, yPos + 1, zPos))) { continue; } EntityVillager villager = new EntityVillager(world, Math.abs(random.nextInt()) % 5); float facing = MathHelper.wrapAngleTo180_float(random.nextFloat() * 360.0F); villager.setLocationAndAngles(xPos + 0.5, yPos + 0.1, zPos + 0.5, facing, 0.0F); world.spawnEntityInWorld(villager); villager.playLivingSound(); --count; } } /* Spawn entities */ } /* Combine all 8b-blocksID and 8b-addBlocks to 16b-block */ private short[] compose(byte[] blocksID, byte[] addBlocks) { short[] blocks = new short[blocksID.length]; for (int index = 0; index < blocksID.length; index++) { if ((index >> 1) >= addBlocks.length) { blocks[index] = (short) (blocksID[index] & 0xFF); } else { if ((index & 1) == 0) { blocks[index] = (short) (((addBlocks[index >> 1] & 0x0F) << 8) + (blocksID[index] & 0xFF)); } else { blocks[index] = (short) (((addBlocks[index >> 1] & 0xF0) << 4) + (blocksID[index] & 0xFF)); } } } return blocks; } /* Get structure ground level (lift) to dig it down */ private int getLift(short[] blocks) { int width = flags.getShort("Width"); int height = flags.getShort("Height"); int length = flags.getShort("Length"); int[][] level = new int[width][length]; int[][] levelMax = new int[width][length]; boolean[] liquid = Decorator.liquid; boolean[] soil = Decorator.soil; boolean dry = !flags.getString("Method").equalsIgnoreCase("Underwater"); Posture posture = new Posture(0, 0, 0, 0, 0, 0, false, false, false, width, height, length); for (int index = 0; index < blocks.length; ++index) { if (blocks[index] >= 0 && blocks[index] < 256) { if (soil[blocks[index]] || (dry && liquid[blocks[index]])) { level[posture.getX(index)][posture.getZ(index)] += 1; levelMax[posture.getX(index)][posture.getZ(index)] = posture.getY(index) + 1; } } } long borders = 0, totals = 0; for (int x = 0; x < width; ++x) { for (int z = 0; z < length; ++z) { totals += level[x][z]; if (x == 0 || z == 0 || x == width - 1 || z == length - 1) { borders += levelMax[x][z]; } } } int borderLevel = (int) Math.round(borders / ((width + length) * 2.0)); int wholeLevel = Math.round(totals / (width * length)); return Math.max(borderLevel, wholeLevel); } /* Generate schematic skin as bitset of possible(0) and restricted(1) to spawn blocks */ private BitSet getSkin(short[] blocks) { final byte Y_INC = 1; final byte Y_DEC = 32; final byte X_INC = 4; final byte X_DEC = 2; final byte Z_INC = 16; final byte Z_DEC = 8; int width = flags.getShort("Width"); int height = flags.getShort("Height"); int length = flags.getShort("Length"); Posture posture = new Posture(0, 0, 0, 0, 0, 0, false, false, false, width, height, length); HashSet<Integer> skinBlocks = new HashSet<Integer>(); skinBlocks.add(Block.getIdFromBlock(Blocks.air)); if ( flags.getString("Method").equalsIgnoreCase("Water") || flags.getString("Method").equalsIgnoreCase("Underwater")) { skinBlocks.add(Block.getIdFromBlock(Blocks.water)); skinBlocks.add(Block.getIdFromBlock(Blocks.flowing_water)); } Queue<Integer> indexQueue = new LinkedList<Integer>(); byte[] clipped = new byte[width * height * length]; BitSet working = new BitSet(width * height * length); BitSet skin = new BitSet(width * height * length); for (int dir = 0; dir <= 1; ++dir) { for (int y = 0; y < height; ++y) { for (int z = 0; z < length; ++z) { int index = dir == 0 ? posture.getIndex(0, y, z) : posture.getIndex(width - 1, y, z); int flag = dir == 0 ? X_INC : X_DEC; if (skinBlocks.contains((int) blocks[index])) { if (!working.get(index)) { indexQueue.add(index); working.set(index); } clipped[index] |= flag; skin.set(index); } } for (int x = 0; x < width; ++x) { int index = dir == 0 ? posture.getIndex(x, y, 0) : posture.getIndex(x, y, length - 1); int flag = dir == 0 ? Z_INC : Z_DEC; if (skinBlocks.contains((int) blocks[index])) { if (!working.get(index)) { indexQueue.add(index); working.set(index); } clipped[index] |= flag; skin.set(index); } } } } byte[] headID = {Y_INC, Y_DEC, X_INC, X_DEC, Z_INC, Z_DEC}; byte[] backID = {Y_DEC, Y_INC, X_DEC, X_INC, Z_DEC, Z_INC}; while (!indexQueue.isEmpty()) { int index = indexQueue.remove(); working.clear(index); int x = posture.getX(index); int y = posture.getY(index); int z = posture.getZ(index); int[] idx = { posture.getIndex(x, y + 1, z), posture.getIndex(x, y - 1, z), posture.getIndex(x + 1, y, z), posture.getIndex(x - 1, y, z), posture.getIndex(x, y, z + 1), posture.getIndex(x, y, z - 1) }; boolean[] cond = { y < height - 1, y > 0, x < width - 1, x > 0, z < length - 1, z > 0 }; for (int k = 0; k < 6; ++k) { int next = idx[k]; if (cond[k] && (clipped[index] & headID[k]) > 0 && (clipped[next] & headID[k]) == 0 && (clipped[next] & backID[k]) == 0 && (skinBlocks.contains((int) blocks[next]))) { if (!working.get(next)) { working.set(next); indexQueue.add(next); } clipped[next] |= headID[k]; skin.set(next); } } } for (int index = 0; index < skin.size(); ++index) { if (skin.get(index)) { int x = posture.getX(index); int y = posture.getY(index); int z = posture.getZ(index); while (y-- > 0) { int next = posture.getIndex(x, y, z); if (!skin.get(next) && skinBlocks.contains((int) blocks[next])) { skin.set(next); } else { break; } } } } return skin; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.testing; import com.facebook.presto.Session; import com.facebook.presto.spi.ConnectorPageSource; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.Page; import com.facebook.presto.spi.block.Block; import com.facebook.presto.spi.type.SqlDate; import com.facebook.presto.spi.type.SqlTime; import com.facebook.presto.spi.type.SqlTimeWithTimeZone; import com.facebook.presto.spi.type.SqlTimestamp; import com.facebook.presto.spi.type.SqlTimestampWithTimeZone; import com.facebook.presto.spi.type.Type; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.joda.time.DateTimeZone; import java.sql.Date; import java.sql.Time; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.TimeUnit; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; public class MaterializedResult implements Iterable<MaterializedRow> { public static final int DEFAULT_PRECISION = 5; private final List<MaterializedRow> rows; private final List<Type> types; private final Map<String, String> setSessionProperties; private final Set<String> resetSessionProperties; public MaterializedResult(List<MaterializedRow> rows, List<? extends Type> types) { this(rows, types, ImmutableMap.of(), ImmutableSet.of()); } public MaterializedResult(List<MaterializedRow> rows, List<? extends Type> types, Map<String, String> setSessionProperties, Set<String> resetSessionProperties) { this.rows = ImmutableList.copyOf(checkNotNull(rows, "rows is null")); this.types = ImmutableList.copyOf(checkNotNull(types, "types is null")); this.setSessionProperties = ImmutableMap.copyOf(checkNotNull(setSessionProperties, "setSessionProperties is null")); this.resetSessionProperties = ImmutableSet.copyOf(checkNotNull(resetSessionProperties, "resetSessionProperties is null")); } public int getRowCount() { return rows.size(); } @Override public Iterator<MaterializedRow> iterator() { return rows.iterator(); } public List<MaterializedRow> getMaterializedRows() { return rows; } public List<Type> getTypes() { return types; } public Map<String, String> getSetSessionProperties() { return setSessionProperties; } public Set<String> getResetSessionProperties() { return resetSessionProperties; } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if ((obj == null) || (getClass() != obj.getClass())) { return false; } MaterializedResult o = (MaterializedResult) obj; return Objects.equals(types, o.types) && Objects.equals(rows, o.rows) && Objects.equals(setSessionProperties, o.setSessionProperties) && Objects.equals(resetSessionProperties, o.resetSessionProperties); } @Override public int hashCode() { return Objects.hash(rows, types, setSessionProperties, resetSessionProperties); } @Override public String toString() { return toStringHelper(this) .add("rows", rows) .add("types", types) .add("setSessionProperties", setSessionProperties) .add("resetSessionProperties", resetSessionProperties) .toString(); } public MaterializedResult toJdbcTypes() { ImmutableList.Builder<MaterializedRow> jdbcRows = ImmutableList.builder(); for (MaterializedRow row : rows) { jdbcRows.add(convertToJdbcTypes(row)); } return new MaterializedResult(jdbcRows.build(), types, setSessionProperties, resetSessionProperties); } private static MaterializedRow convertToJdbcTypes(MaterializedRow prestoRow) { List<Object> jdbcValues = new ArrayList<>(); for (int field = 0; field < prestoRow.getFieldCount(); field++) { Object prestoValue = prestoRow.getField(field); Object jdbcValue; if (prestoValue instanceof SqlDate) { int days = ((SqlDate) prestoValue).getDays(); jdbcValue = new Date(TimeUnit.DAYS.toMillis(days)); } else if (prestoValue instanceof SqlTime) { jdbcValue = new Time(((SqlTime) prestoValue).getMillisUtc()); } else if (prestoValue instanceof SqlTimeWithTimeZone) { jdbcValue = new Time(((SqlTimeWithTimeZone) prestoValue).getMillisUtc()); } else if (prestoValue instanceof SqlTimestamp) { jdbcValue = new Timestamp(((SqlTimestamp) prestoValue).getMillisUtc()); } else if (prestoValue instanceof SqlTimestampWithTimeZone) { jdbcValue = new Timestamp(((SqlTimestampWithTimeZone) prestoValue).getMillisUtc()); } else { jdbcValue = prestoValue; } jdbcValues.add(jdbcValue); } return new MaterializedRow(prestoRow.getPrecision(), jdbcValues); } public MaterializedResult toTimeZone(DateTimeZone oldTimeZone, DateTimeZone newTimeZone) { ImmutableList.Builder<MaterializedRow> jdbcRows = ImmutableList.builder(); for (MaterializedRow row : rows) { jdbcRows.add(toTimeZone(row, oldTimeZone, newTimeZone)); } return new MaterializedResult(jdbcRows.build(), types); } private static MaterializedRow toTimeZone(MaterializedRow prestoRow, DateTimeZone oldTimeZone, DateTimeZone newTimeZone) { List<Object> values = new ArrayList<>(); for (int field = 0; field < prestoRow.getFieldCount(); field++) { Object value = prestoRow.getField(field); if (value instanceof Date) { long oldMillis = ((Date) value).getTime(); long newMillis = oldTimeZone.getMillisKeepLocal(newTimeZone, oldMillis); value = new Date(newMillis); } values.add(value); } return new MaterializedRow(prestoRow.getPrecision(), values); } public static MaterializedResult materializeSourceDataStream(Session session, ConnectorPageSource pageSource, List<Type> types) { return materializeSourceDataStream(session.toConnectorSession(), pageSource, types); } public static MaterializedResult materializeSourceDataStream(ConnectorSession session, ConnectorPageSource pageSource, List<Type> types) { MaterializedResult.Builder builder = resultBuilder(session, types); while (!pageSource.isFinished()) { Page outputPage = pageSource.getNextPage(); if (outputPage == null) { break; } builder.page(outputPage); } return builder.build(); } public static Builder resultBuilder(Session session, Type... types) { return resultBuilder(session.toConnectorSession(), types); } public static Builder resultBuilder(Session session, Iterable<? extends Type> types) { return resultBuilder(session.toConnectorSession(), types); } public static Builder resultBuilder(ConnectorSession session, Type... types) { return resultBuilder(session, ImmutableList.copyOf(types)); } public static Builder resultBuilder(ConnectorSession session, Iterable<? extends Type> types) { return new Builder(session, ImmutableList.copyOf(types)); } public static class Builder { private final ConnectorSession session; private final List<Type> types; private final ImmutableList.Builder<MaterializedRow> rows = ImmutableList.builder(); Builder(ConnectorSession session, List<Type> types) { this.session = session; this.types = ImmutableList.copyOf(types); } public Builder rows(List<MaterializedRow> rows) { this.rows.addAll(rows); return this; } public Builder row(Object... values) { rows.add(new MaterializedRow(DEFAULT_PRECISION, values)); return this; } public Builder rows(Object[][] rows) { for (Object[] row : rows) { row(row); } return this; } public Builder pages(Iterable<Page> pages) { for (Page page : pages) { this.page(page); } return this; } public Builder page(Page page) { checkNotNull(page, "page is null"); checkArgument(page.getChannelCount() == types.size(), "Expected a page with %s columns, but got %s columns", types.size(), page.getChannelCount()); for (int position = 0; position < page.getPositionCount(); position++) { List<Object> values = new ArrayList<>(page.getChannelCount()); for (int channel = 0; channel < page.getChannelCount(); channel++) { Type type = types.get(channel); Block block = page.getBlock(channel); values.add(type.getObjectValue(session, block, position)); } values = Collections.unmodifiableList(values); rows.add(new MaterializedRow(DEFAULT_PRECISION, values)); } return this; } public MaterializedResult build() { return new MaterializedResult(rows.build(), types); } } }
package org.edx.mobile.view; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.os.Parcelable; import android.support.v4.app.LoaderManager; import android.support.v4.content.Loader; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.CompoundButton; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.Switch; import android.widget.TextView; import com.facebook.Settings; import com.facebook.widget.FacebookDialog; import com.facebook.widget.LikeView; import org.apache.http.protocol.HTTP; import org.edx.mobile.R; import org.edx.mobile.base.CourseDetailBaseFragment; import org.edx.mobile.loader.AsyncTaskResult; import org.edx.mobile.loader.FriendsInCourseLoader; import org.edx.mobile.model.api.AnnouncementsModel; import org.edx.mobile.model.api.EnrolledCoursesResponse; import org.edx.mobile.module.facebook.FacebookSessionUtil; import org.edx.mobile.module.facebook.IUiLifecycleHelper; import org.edx.mobile.module.prefs.PrefManager; import org.edx.mobile.social.SocialMember; import org.edx.mobile.social.facebook.FacebookProvider; import org.edx.mobile.task.GetAnnouncementTask; import org.edx.mobile.util.FileUtil; import org.edx.mobile.util.SocialUtils; import org.edx.mobile.view.custom.EdxWebView; import org.edx.mobile.view.custom.SocialAffirmView; import org.edx.mobile.view.custom.SocialFacePileView; import org.edx.mobile.view.custom.SocialShareView; import org.edx.mobile.view.custom.URLInterceptorWebViewClient; import org.edx.mobile.view.dialog.InstallFacebookDialog; import java.io.IOException; import java.util.ArrayList; import java.util.List; public class CourseCombinedInfoFragment extends CourseDetailBaseFragment implements View.OnClickListener, LoaderManager.LoaderCallbacks<AsyncTaskResult<List<SocialMember>>> { static final String TAG = CourseCombinedInfoFragment.class.getCanonicalName(); private final int LOADER_ID = 0x416BED; private EdxWebView announcementWebView; private LinearLayout facePileContainer; private SocialFacePileView facePileView; private LayoutInflater inflater; private View certificateContainer; private TextView groupLauncher; private View notificationSettingRow; private Switch notificationSwitch; private EnrolledCoursesResponse courseData; private List<AnnouncementsModel> savedAnnouncements; private SocialAffirmView likeButton; private SocialShareView shareButton; private IUiLifecycleHelper uiHelper; private PrefManager featuresPref; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); logger.debug("created: " + getClass().getName()); featuresPref = new PrefManager(getActivity(), PrefManager.Pref.FEATURES); Settings.sdkInitialize(getActivity()); uiHelper = IUiLifecycleHelper.Factory.getInstance(getActivity(), null); uiHelper.onCreate(savedInstanceState); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_course_combined_info, container, false); certificateContainer = view.findViewById(R.id.combined_course_certificate_container); likeButton = (SocialAffirmView) view.findViewById(R.id.course_affirm_btn); //Register clicks with the OnClickListener interface shareButton = (SocialShareView) view.findViewById(R.id.combined_course_social_share); shareButton.setOnClickListener(this); TextView handoutText = (TextView) view.findViewById(R.id.combined_course_handout_text); View handoutArrow = view.findViewById(R.id.next_arrow); if (environment.getConfig().isNewCourseNavigationEnabled() ) { handoutText.setVisibility(View.GONE); handoutArrow.setVisibility(View.GONE); } else { handoutText.setOnClickListener(this); } TextView certificateButton = (TextView) view.findViewById(R.id.view_cert_button); certificateButton.setOnClickListener(this); facePileContainer = (LinearLayout) view.findViewById(R.id.social_face_pile_container); facePileContainer.setOnClickListener(this); facePileView = (SocialFacePileView) facePileContainer.findViewById(R.id.combined_course_facepile); groupLauncher = (TextView) view.findViewById(R.id.combined_course_social_group); groupLauncher.setOnClickListener(this); announcementWebView = (EdxWebView) view.findViewById(R.id.announcement_webview); URLInterceptorWebViewClient client = new URLInterceptorWebViewClient( getActivity(), announcementWebView); // treat every link as external link in this view, so that all links will open in external browser client.setAllLinksAsExternal(true); notificationSettingRow = view.findViewById(R.id.notificaton_setting_row); notificationSwitch = (Switch) view.findViewById(R.id.notification_switch); return view; } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); if (savedInstanceState != null) { try { savedAnnouncements = savedInstanceState.getParcelableArrayList(Router.EXTRA_ANNOUNCEMENTS); } catch (Exception ex) { logger.error(ex); } } try { final Bundle bundle = getArguments(); courseData = (EnrolledCoursesResponse) bundle.getSerializable(Router.EXTRA_ENROLLMENT); FacebookProvider fbProvider = new FacebookProvider(); if(courseData != null) { //Create the inflater used to create the announcement list inflater = (LayoutInflater) getActivity().getSystemService(Context.LAYOUT_INFLATER_SERVICE); if (savedAnnouncements == null) { loadAnnouncementData(courseData); } else { populateAnnouncements(savedAnnouncements); } String url = courseData.getCourse().getCourse_url(); SocialUtils.SocialType socialType = SocialUtils.SocialType.NONE; if (fbProvider.isLoggedIn()){ socialType = SocialUtils.SocialType.FACEBOOK; } if (url != null) { likeButton.setSocialAffirmType(socialType, url); } shareButton.setSocialShareType(socialType); updateInteractiveVisibility(); } showSocialEnabled(fbProvider.isLoggedIn()); if ( environment.getConfig().isNotificationEnabled() && courseData != null && courseData.getCourse() != null){ notificationSettingRow.setVisibility(View.VISIBLE); final String courseId = courseData.getCourse().getId(); final String subscriptionId = courseData.getCourse().getSubscription_id(); boolean isSubscribed = environment.getNotificationDelegate().isSubscribedByCourseId(courseId); notificationSwitch.setChecked(isSubscribed); notificationSwitch.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView,boolean isChecked) { environment.getNotificationDelegate().changeNotificationSetting( courseId, subscriptionId, isChecked); } }); } else { notificationSwitch.setOnCheckedChangeListener(null); notificationSettingRow.setVisibility(View.GONE); } } catch (Exception ex) { logger.error(ex); } } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); LikeView.handleOnActivityResult(getActivity(), requestCode, resultCode, data); // uiHelper.onActivityResult(requestCode, resultCode, data, null); } @Override public void onResume() { super.onResume(); uiHelper.onResume(); } @Override public void onPause() { super.onPause(); uiHelper.onPause(); } @Override public void onDestroy() { super.onDestroy(); uiHelper.onDestroy(); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); if (savedAnnouncements != null) { outState.putParcelableArrayList(Router.EXTRA_ANNOUNCEMENTS, new ArrayList<Parcelable>(savedAnnouncements)); } uiHelper.onSaveInstanceState(outState); } private void loadAnnouncementData(EnrolledCoursesResponse enrollment) { GetAnnouncementTask task = new GetAnnouncementTask(getActivity(), enrollment) { @Override public void onException(Exception ex) { showEmptyAnnouncementMessage(); } @Override public void onSuccess(List<AnnouncementsModel> announcementsList) { try { savedAnnouncements = announcementsList; populateAnnouncements(savedAnnouncements); } catch (Exception ex) { logger.error(ex); showEmptyAnnouncementMessage(); } } }; ProgressBar progressBar = (ProgressBar) getView().findViewById(R.id.api_spinner); task.setProgressDialog(progressBar); task.execute(); } private void showSocialEnabled(boolean enabled){ View view = getView(); if (view != null){ boolean allowSocialFeatures = featuresPref.getBoolean(PrefManager.Key.ALLOW_SOCIAL_FEATURES, true); View loggedInLayout = view.findViewById(R.id.social_layout); View groupLinkView = view.findViewById(R.id.combined_course_social_group_container); if (!allowSocialFeatures) { enabled = false; groupLinkView.setVisibility(View.GONE); } else { groupLinkView.setVisibility(View.VISIBLE); } loggedInLayout.setVisibility(enabled ? View.VISIBLE : View.GONE); if (enabled) { if (courseData.getCourse().getMembers_list() == null) { fetchCourseMembers(); } else { populateFacePile(); } } } } private void fetchCourseMembers(){ Bundle args = new Bundle(); args.putString(FriendsInCourseLoader.TAG_COURSE_ID, courseData.getCourse().getId()); args.putString(FriendsInCourseLoader.TAG_COURSE_OAUTH, FacebookSessionUtil.getAccessToken()); getLoaderManager().restartLoader(LOADER_ID, args, this); } private void populateFacePile(){ List<SocialMember> courseFriends = courseData.getCourse().getMembers_list(); facePileView.clearAvatars(); if (courseFriends != null && courseFriends.size() > 0) { facePileView.setMemberList(courseFriends); facePileContainer.setVisibility(View.VISIBLE); } else { facePileContainer.setVisibility(View.GONE); } } private void populateAnnouncements(List<AnnouncementsModel> announcementsList) { if (announcementsList != null && announcementsList.size() > 0) { hideEmptyAnnouncementMessage(); StringBuffer buff = new StringBuffer(); buff.append("<head>"); // add meta viewport buff.append("<meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">"); try { String cssFileContent = FileUtil.loadTextFileFromAssets(getActivity(), "css/handouts-announcements.css"); // add css file buff.append("<style>"); buff.append(cssFileContent); buff.append("</style>"); } catch (IOException e) { logger.error(e); } buff.append("</head>"); buff.append("<body>"); for (AnnouncementsModel m : announcementsList) { buff.append("<div class=\"announcement-header\">"); buff.append(m.getDate()); buff.append("</div>"); buff.append("<div class=\"announcement-separator\"></div>"); buff.append("<div class=\"announcement\">"); buff.append(m.getContent()); buff.append("</div>"); } buff.append("</body>"); announcementWebView.clearCache(true); announcementWebView.loadDataWithBaseURL(environment.getConfig().getApiHostURL(), buff.toString(), "text/html", HTTP.UTF_8, null); } else { showEmptyAnnouncementMessage(); } } private void updateInteractiveVisibility() { if (certificateContainer != null) { certificateContainer.setVisibility((courseData != null && courseData.isCertificateEarned()) ? View.VISIBLE : View.GONE); } if (groupLauncher != null) { groupLauncher.setVisibility((courseData != null && courseData.getCourse().isGroupAvailable(SocialUtils.SocialType.FACEBOOK)) ? View.VISIBLE : View.GONE); } } public void showEmptyAnnouncementMessage(){ try{ if(getView()!=null){ getView().findViewById(R.id.no_announcement_tv).setVisibility(View.VISIBLE); } }catch(Exception e){ logger.error(e); } } private void hideEmptyAnnouncementMessage(){ try{ if(getView()!=null){ getView().findViewById(R.id.no_announcement_tv).setVisibility(View.GONE); } }catch(Exception e){ logger.error(e); } } @Override public void onClick(View view) { switch (view.getId()){ case R.id.combined_course_handout_text: if (courseData != null) { environment.getRouter().showHandouts(getActivity(), courseData); } break; case R.id.view_cert_button: if (courseData != null) { Intent certificateIntent = new Intent(getActivity(), CertificateActivity.class); certificateIntent.putExtra(CertificateFragment.ENROLLMENT, courseData); startActivity(certificateIntent); } break; case R.id.combined_course_social_share: FacebookProvider fbProvider = new FacebookProvider(); FacebookDialog dialog = (FacebookDialog) fbProvider.shareCourse(getActivity(), courseData.getCourse()); if (dialog != null) { try{ environment.getSegment().courseShared(courseData.getCourse().getId(), SocialUtils.Values.FACEBOOK); }catch(Exception e){ logger.error(e); } uiHelper.trackPendingDialogCall(dialog.present()); } else { new InstallFacebookDialog().show(getFragmentManager(), null); } break; case R.id.social_face_pile_container: if (courseData != null) { Intent friendsInGroupIntent = new Intent(getActivity(), FriendsInCourseActivity.class); friendsInGroupIntent.putExtra(FriendsInCourseActivity.EXTRA_COURSE, courseData.getCourse()); startActivity(friendsInGroupIntent); } break; case R.id.combined_course_social_group: try{ environment.getSegment().courseGroupAccessed(courseData.getCourse().getId()); }catch(Exception e){ logger.error(e); } Intent groupLaunchIntent = SocialUtils.makeGroupLaunchIntent(getActivity(), String.valueOf(courseData.getCourse().getCourseGroup(SocialUtils.SocialType.FACEBOOK)), SocialUtils.SocialType.FACEBOOK); startActivity(groupLaunchIntent); break; } } @Override public Loader<AsyncTaskResult<List<SocialMember>>> onCreateLoader(int i, Bundle bundle) { return new FriendsInCourseLoader(getActivity(), bundle, environment); } @Override public void onLoadFinished(Loader<AsyncTaskResult<List<SocialMember>>> objectLoader, AsyncTaskResult<List<SocialMember>> result) { if (result.getResult() != null) { courseData.getCourse().setMembers_list(result.getResult()); populateFacePile(); } else { //TODO Handle error populateFacePile(); } } @Override public void onLoaderReset(Loader<AsyncTaskResult<List<SocialMember>>> objectLoader) { facePileView.clearAvatars(); } }
package com.rackspacecloud.blueflood.inputs.handlers; import com.codahale.metrics.Meter; import com.google.common.util.concurrent.ListenableFuture; import com.rackspacecloud.blueflood.inputs.formats.JSONMetric; import com.rackspacecloud.blueflood.inputs.formats.JSONMetricsContainer; import com.rackspacecloud.blueflood.io.Instrumentation; import com.rackspacecloud.blueflood.outputs.formats.ErrorResponse; import com.rackspacecloud.blueflood.outputs.handlers.HandlerTestsBase; import com.rackspacecloud.blueflood.service.Configuration; import com.rackspacecloud.blueflood.service.CoreConfig; import com.rackspacecloud.blueflood.utils.DefaultClockImpl; import com.rackspacecloud.blueflood.utils.TimeValue; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.*; import org.apache.commons.lang.StringUtils; import org.codehaus.jackson.map.ObjectMapper; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import java.io.IOException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertNull; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.*; public class HttpMetricsIngestionHandlerTest extends HandlerTestsBase { private HttpMetricsIngestionHandler handler; private HttpMetricsIngestionServer.Processor processor; private ChannelHandlerContext context; private Channel channel; private ChannelFuture channelFuture; private Meter ingestedMetrics; private Meter ingestedDelayedMetrics; private static final String TENANT = "tenant"; @Before public void setup() { processor = mock(HttpMetricsIngestionServer.Processor.class); handler = new HttpMetricsIngestionHandler(processor, new TimeValue(5, TimeUnit.SECONDS)); channel = mock(Channel.class); context = mock(ChannelHandlerContext.class); channelFuture = mock(ChannelFuture.class); when(context.channel()).thenReturn(channel); when(channel.write(anyString())).thenReturn(channelFuture); ingestedMetrics = Instrumentation.getIngestedMetricsMeter(TENANT); ingestedDelayedMetrics = Instrumentation.getIngestedDelayedMetricsMeter(TENANT); } @Test public void emptyRequest_shouldGenerateErrorResponse() throws IOException { String requestBody = ""; FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); assertEquals("Number of errors invalid", 1, errorResponse.getErrors().size()); assertEquals("Invalid error message", "Cannot parse content", errorResponse.getErrors().get(0).getMessage()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); } @Test public void testEmptyJsonRequest() throws IOException { String requestBody = "{}"; //causes JsonMappingException FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); assertEquals("Number of errors invalid", 1, errorResponse.getErrors().size()); assertEquals("Invalid error message", "Cannot parse content", errorResponse.getErrors().get(0).getMessage()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); } @Test public void testInvalidJsonRequest() throws IOException { String requestBody = "{\"xxxx\": yyyy}"; //causes JsonMappingException FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); assertEquals("Number of errors invalid", 1, errorResponse.getErrors().size()); assertEquals("Invalid error message", "Cannot parse content", errorResponse.getErrors().get(0).getMessage()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); } @Test public void testEmptyJsonArrayRequest() throws IOException { String requestBody = "[]"; //causes JsonMappingException FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); assertEquals("Number of errors invalid", 1, errorResponse.getErrors().size()); assertEquals("Invalid error message", "No valid metrics", errorResponse.getErrors().get(0).getMessage()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); } @Test public void testEmptyMetricRequest() throws IOException { String requestBody = "[{}]"; //causes JsonMappingException FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); assertEquals("Number of errors invalid", 3, errorResponse.getErrors().size()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); } @Test public void testSingleMetricInvalidMetricName() throws IOException { String metricName = ""; String singleMetric = createRequestBody(metricName, new DefaultClockImpl().now().getMillis(), 24 * 60 * 60, 1); //empty metric name String requestBody = "[" + singleMetric + "]"; FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); assertEquals("Number of errors invalid", 1, errorResponse.getErrors().size()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid metric name", metricName, errorResponse.getErrors().get(0).getMetricName()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); assertEquals("Invalid error source", "metricName", errorResponse.getErrors().get(0).getSource()); assertEquals("Invalid error message", "may not be empty", errorResponse.getErrors().get(0).getMessage()); } @Test public void testSingleMetricCollectionTimeInPast() throws IOException { long collectionTimeInPast = new DefaultClockImpl().now().getMillis() - 1000 - Configuration.getInstance().getLongProperty( CoreConfig.BEFORE_CURRENT_COLLECTIONTIME_MS ); String metricName = "a.b.c"; String singleMetric = createRequestBody(metricName, collectionTimeInPast, 24 * 60 * 60, 1); //collection in past String requestBody = "[" + singleMetric + "]"; FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); assertEquals("Number of errors invalid", 1, errorResponse.getErrors().size()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid metric name", metricName, errorResponse.getErrors().get(0).getMetricName()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); assertEquals("Invalid error source", "collectionTime", errorResponse.getErrors().get(0).getSource()); assertEquals("Invalid error message", "Out of bounds. Cannot be more than 259200000 milliseconds into the past." + " Cannot be more than 600000 milliseconds into the future", errorResponse.getErrors().get(0).getMessage()); } @Test public void testSingleMetricCollectionTimeInFuture() throws IOException { long collectionTimeInFuture = new DefaultClockImpl().now().getMillis() + 1000 + Configuration.getInstance().getLongProperty( CoreConfig.AFTER_CURRENT_COLLECTIONTIME_MS ); String metricName = "a.b.c"; String singleMetric = createRequestBody(metricName, collectionTimeInFuture, 24 * 60 * 60, 1); //collection in future String requestBody = "[" + singleMetric + "]"; FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); assertEquals("Number of errors invalid", 1, errorResponse.getErrors().size()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid metric name", metricName, errorResponse.getErrors().get(0).getMetricName()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); assertEquals("Invalid error source", "collectionTime", errorResponse.getErrors().get(0).getSource()); assertEquals("Invalid error message", "Out of bounds. Cannot be more than 259200000 milliseconds into the past." + " Cannot be more than 600000 milliseconds into the future", errorResponse.getErrors().get(0).getMessage()); } @Test public void testSingleMetricInvalidTTL() throws IOException { String metricName = "a.b.c"; String singleMetric = createRequestBody(metricName, new DefaultClockImpl().now().getMillis(), 0, 1); //ttl of 0 String requestBody = "[" + singleMetric + "]"; FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); assertEquals("Number of errors invalid", 1, errorResponse.getErrors().size()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid metric name", metricName, errorResponse.getErrors().get(0).getMetricName()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); assertEquals("Invalid error source", "ttlInSeconds", errorResponse.getErrors().get(0).getSource()); assertEquals("Invalid error message", "must be between 1 and 2147483647", errorResponse.getErrors().get(0).getMessage()); } @Test public void testSingleMetricInvalidMetricValue() throws IOException { String metricName = "a.b.c"; String singleMetric = createRequestBody(metricName, new DefaultClockImpl().now().getMillis(), 24 * 60 * 60, null); //empty metric value String requestBody = "[" + singleMetric + "]"; FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); System.out.println(errorResponse); assertEquals("Number of errors invalid", 1, errorResponse.getErrors().size()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid metric name", "", errorResponse.getErrors().get(0).getMetricName()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); assertNull("Invalid error source", errorResponse.getErrors().get(0).getSource()); assertEquals("Invalid error message", "No valid metrics", errorResponse.getErrors().get(0).getMessage()); } @Test public void testMultiMetricsInvalidRequest() throws IOException { String metricName1 = "a.b.c.1"; String metricName2 = "a.b.c.2"; FullHttpRequest request = createIngestRequest(generateInvalidMetrics(metricName1, metricName2)); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); assertEquals("Number of errors invalid", 2, errorResponse.getErrors().size()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid tenant", metricName1, errorResponse.getErrors().get(0).getMetricName()); assertEquals("Invalid error source", "ttlInSeconds", errorResponse.getErrors().get(0).getSource()); assertEquals("Invalid error message", "must be between 1 and 2147483647", errorResponse.getErrors().get(0).getMessage()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(1).getTenantId()); assertEquals("Invalid tenant", metricName2, errorResponse.getErrors().get(1).getMetricName()); assertEquals("Invalid error source", "collectionTime", errorResponse.getErrors().get(1).getSource()); assertEquals("Invalid error message", "Out of bounds. Cannot be more than 259200000 milliseconds into the past." + " Cannot be more than 600000 milliseconds into the future", errorResponse.getErrors().get(1).getMessage()); } @Test public void perTenantMetricsOn_emptyRequest_shouldNotRecordAnything() throws IOException { String requestBody = "[{}]"; FullHttpRequest request = createIngestRequest(requestBody); long ingestedMetricsBefore = ingestedMetrics.getCount(); long ingestedDelayedMetricsBefore = ingestedDelayedMetrics.getCount(); HttpMetricsIngestionHandler handler = spy(new HttpMetricsIngestionHandler(processor, new TimeValue(5, TimeUnit.SECONDS), true)); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); verify(handler, never()).recordPerTenantMetrics(eq(TENANT), anyInt(), anyInt()); assertEquals("ingested metrics count", 0, ingestedMetrics.getCount() - ingestedMetricsBefore); assertEquals("ingested delayed metrics count", 0, ingestedDelayedMetrics.getCount() - ingestedDelayedMetricsBefore); } @Test public void perTenantMetricsOn_invalidMetrics_shouldNotRecordAnything() throws IOException { String m1 = "foo.bar"; String m2 = "gee.wish"; FullHttpRequest request = createIngestRequest(generateInvalidMetrics(m1, m2)); long ingestedMetricsBefore = ingestedMetrics.getCount(); long ingestedDelayedMetricsBefore = ingestedDelayedMetrics.getCount(); HttpMetricsIngestionHandler handler = spy(new HttpMetricsIngestionHandler(processor, new TimeValue(5, TimeUnit.SECONDS), true)); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); verify(handler, never()).recordPerTenantMetrics(eq(TENANT), anyInt(), anyInt()); assertEquals("ingested metrics count", 0, ingestedMetrics.getCount() - ingestedMetricsBefore); assertEquals("ingested delayed metrics count", 0, ingestedDelayedMetrics.getCount() - ingestedDelayedMetricsBefore); } @Test public void perTenantMetricsOn_shouldRecordDelayedMetrics() throws Exception { String delayedMetric1 = "delayed.me.1"; String delayedMetric2 = "delayed.me.2"; FullHttpRequest request = createIngestRequest(generateDelayedMetricsRequestString(delayedMetric1, delayedMetric2)); long ingestedMetricsBefore = ingestedMetrics.getCount(); long ingestedDelayedMetricsBefore = ingestedDelayedMetrics.getCount(); ListenableFuture<List<Boolean>> futures = mock(ListenableFuture.class); List<Boolean> answers = new ArrayList<>(); answers.add(Boolean.TRUE); when(processor.apply(any())).thenReturn(futures); when(futures.get(anyLong(), any())).thenReturn(answers); HttpMetricsIngestionHandler handler = spy(new HttpMetricsIngestionHandler(processor, new TimeValue(5, TimeUnit.SECONDS), true)); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); verify(handler, times(1)).recordPerTenantMetrics(eq(TENANT), eq(0), eq(2)); assertEquals("ingested metrics count", 0, ingestedMetrics.getCount() - ingestedMetricsBefore); assertEquals("ingested delayed metrics count", 2, ingestedDelayedMetrics.getCount() - ingestedDelayedMetricsBefore); } @Test public void perTenantMetricsOn_shouldRecordNonDelayedMetrics() throws Exception { String metric1 = "i.am.on.time"; String metric2 = "i.am.on.time.again"; FullHttpRequest request = createIngestRequest(generateNonDelayedMetricsRequestString(metric1, metric2)); long ingestedMetricsBefore = ingestedMetrics.getCount(); long ingestedDelayedMetricsBefore = ingestedDelayedMetrics.getCount(); ListenableFuture<List<Boolean>> futures = mock(ListenableFuture.class); List<Boolean> answers = new ArrayList<>(); answers.add(Boolean.TRUE); when(processor.apply(any())).thenReturn(futures); when(futures.get(anyLong(), any())).thenReturn(answers); HttpMetricsIngestionHandler handler = spy(new HttpMetricsIngestionHandler(processor, new TimeValue(5, TimeUnit.SECONDS), true)); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); verify(handler, times(1)).recordPerTenantMetrics(eq(TENANT), eq(2), eq(0)); assertEquals("ingested metrics count", 2, ingestedMetrics.getCount() - ingestedMetricsBefore); assertEquals("ingested delayed metrics count", 0, ingestedDelayedMetrics.getCount() - ingestedDelayedMetricsBefore); } @Test public void perTenantMetricsOff_shouldNotRecordMetrics() throws Exception { String metric1 = "i.am.on.time"; String metric2 = "i.am.on.time.again"; FullHttpRequest request = createIngestRequest(generateNonDelayedMetricsRequestString(metric1, metric2)); long ingestedMetricsBefore = ingestedMetrics.getCount(); long ingestedDelayedMetricsBefore = ingestedDelayedMetrics.getCount(); ListenableFuture<List<Boolean>> futures = mock(ListenableFuture.class); List<Boolean> answers = new ArrayList<>(); answers.add(Boolean.TRUE); when(processor.apply(any())).thenReturn(futures); when(futures.get(anyLong(), any())).thenReturn(answers); // turn off per tenant metrics tracking HttpMetricsIngestionHandler handler = spy(new HttpMetricsIngestionHandler(processor, new TimeValue(5, TimeUnit.SECONDS), false)); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); verify(handler, times(1)).recordPerTenantMetrics(eq(TENANT), eq(2), eq(0)); assertEquals("ingested metrics count", 0, ingestedMetrics.getCount() - ingestedMetricsBefore); assertEquals("ingested delayed metrics count", 0, ingestedDelayedMetrics.getCount() - ingestedDelayedMetricsBefore); } private String generateInvalidMetrics(String invalidTtlMetricName, String invalidCollectionMetricName) throws IOException { long collectionTimeInPast = new DefaultClockImpl().now().getMillis() - 1000 - Configuration.getInstance().getLongProperty( CoreConfig.BEFORE_CURRENT_COLLECTIONTIME_MS ); //invalid ttl value String invalidTtl = createRequestBody(invalidTtlMetricName, new DefaultClockImpl().now().getMillis(), -1, 1); // collection in the past String invalidCollection = createRequestBody(invalidCollectionMetricName, collectionTimeInPast, 24 * 60 * 60, 1); return "[" + invalidTtl + "," + invalidCollection + "]"; } private String generateDelayedMetricsRequestString(String... metricNames) throws IOException { long delayedTime = new DefaultClockImpl().now().getMillis() - 100 - Configuration.getInstance().getLongProperty(CoreConfig.ROLLUP_DELAY_MILLIS); List<String> jsonMetrics = new ArrayList<>(); for (String metricName : metricNames) { jsonMetrics.add(createRequestBody(metricName, delayedTime, 24*60*60, 1)); } return "[" + StringUtils.join(jsonMetrics, ",") + "]"; } private String generateNonDelayedMetricsRequestString(String... metricNames) throws IOException { long timestamp = new DefaultClockImpl().now().getMillis(); List<String> jsonMetrics = new ArrayList<>(); for (String metricName : metricNames) { jsonMetrics.add(createRequestBody(metricName, timestamp, 24*60*60, 1)); } return "[" + StringUtils.join(jsonMetrics, ",") + "]"; } private String createRequestBody(String metricName, long collectionTime, int ttl, Object metricValue) throws IOException { JSONMetric metric = new JSONMetric(); if (!StringUtils.isEmpty(metricName)) metric.setMetricName(metricName); if (collectionTime > 0) metric.setCollectionTime(collectionTime); if (ttl > 0) metric.setTtlInSeconds(ttl); if (metricValue != null) metric.setMetricValue(metricValue); return new ObjectMapper().writeValueAsString(metric); } private FullHttpRequest createIngestRequest(String requestBody) { return super.createPostRequest("/v2.0/" + TENANT + "/ingest/", requestBody); } public JSONMetricsContainer getContainer(String tenantId, String jsonBody) throws IOException { HttpMetricsIngestionHandler handler = new HttpMetricsIngestionHandler(null, new TimeValue(5, TimeUnit.SECONDS)); return handler.createContainer(jsonBody, tenantId); } }
/*L * Copyright Oracle Inc * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cadsr-cgmdr-nci-uk/LICENSE.txt for details. */ package org.exist.xquery.modules.ngram; import org.exist.dom.*; import org.exist.indexing.ngram.NGramIndex; import org.exist.indexing.ngram.NGramIndexWorker; import org.exist.storage.ElementValue; import org.exist.xquery.*; import org.exist.xquery.NodeTest; import org.exist.xquery.util.Error; import org.exist.xquery.value.Item; import org.exist.xquery.value.Sequence; import org.exist.xquery.value.SequenceType; import org.exist.xquery.value.Type; import java.util.ArrayList; import java.util.List; public class NGramSearch extends Function implements Optimizable { public final static FunctionSignature signatures[] = { new FunctionSignature( new QName("contains", NGramModule.NAMESPACE_URI, NGramModule.PREFIX), "Returns all nodes from $a containing the string $b at any position. " + "Strings are compared case-insensitive.", new SequenceType[] { new SequenceType(Type.NODE, Cardinality.ZERO_OR_MORE), new SequenceType(Type.STRING, Cardinality.ZERO_OR_ONE) }, new SequenceType(Type.NODE, Cardinality.ZERO_OR_MORE) ), new FunctionSignature( new QName("ends-with", NGramModule.NAMESPACE_URI, NGramModule.PREFIX), "Returns all nodes from $a ending with the string $b. " + "Strings are compared case-insensitive.", new SequenceType[] { new SequenceType(Type.NODE, Cardinality.ZERO_OR_MORE), new SequenceType(Type.STRING, Cardinality.ZERO_OR_ONE) }, new SequenceType(Type.NODE, Cardinality.ZERO_OR_MORE) ), new FunctionSignature( new QName("starts-with", NGramModule.NAMESPACE_URI, NGramModule.PREFIX), "Returns all nodes from $a starting with the string $b. " + "Strings are compared case-insensitive.", new SequenceType[] { new SequenceType(Type.NODE, Cardinality.ZERO_OR_MORE), new SequenceType(Type.STRING, Cardinality.ZERO_OR_ONE) }, new SequenceType(Type.NODE, Cardinality.ZERO_OR_MORE) ), new FunctionSignature( new QName("equals", NGramModule.NAMESPACE_URI, NGramModule.PREFIX), "Returns all nodes from $a whose string content equals $b. " + "Strings are compared case-insensitive", new SequenceType[] { new SequenceType(Type.NODE, Cardinality.ZERO_OR_MORE), new SequenceType(Type.STRING, Cardinality.ZERO_OR_ONE) }, new SequenceType(Type.NODE, Cardinality.ZERO_OR_MORE) ) }; private LocationStep contextStep = null; protected QName contextQName = null; protected int axis = Constants.UNKNOWN_AXIS; private NodeSet preselectResult = null; protected boolean optimizeSelf = false; public NGramSearch(XQueryContext context, FunctionSignature signature) { super(context, signature); } public void setArguments(List arguments) throws XPathException { Expression path = (Expression) arguments.get(0); steps.add(path); Expression arg = (Expression) arguments.get(1); arg = new DynamicCardinalityCheck(context, Cardinality.ZERO_OR_ONE, arg, new org.exist.xquery.util.Error(Error.FUNC_PARAM_CARDINALITY, "2", mySignature)); if(!Type.subTypeOf(arg.returnsType(), Type.ATOMIC)) arg = new Atomize(context, arg); steps.add(arg); } /* (non-Javadoc) * @see org.exist.xquery.PathExpr#analyze(org.exist.xquery.Expression) */ public void analyze(AnalyzeContextInfo contextInfo) throws XPathException { super.analyze(contextInfo); List steps = BasicExpressionVisitor.findLocationSteps(getArgument(0)); if (!steps.isEmpty()) { LocationStep firstStep = (LocationStep) steps.get(0); LocationStep lastStep = (LocationStep) steps.get(steps.size() - 1); if (steps.size() == 1 && firstStep.getAxis() == Constants.SELF_AXIS) { Expression outerExpr = contextInfo.getContextStep(); if (outerExpr != null && outerExpr instanceof LocationStep) { LocationStep outerStep = (LocationStep) outerExpr; NodeTest test = outerStep.getTest(); if (!test.isWildcardTest() && test.getName() != null) { contextQName = new QName(test.getName()); if (outerStep.getAxis() == Constants.ATTRIBUTE_AXIS || outerStep.getAxis() == Constants.DESCENDANT_ATTRIBUTE_AXIS) contextQName.setNameType(ElementValue.ATTRIBUTE); contextStep = firstStep; axis = outerStep.getAxis(); optimizeSelf = true; } } } else { NodeTest test = lastStep.getTest(); if (!test.isWildcardTest() && test.getName() != null) { contextQName = new QName(test.getName()); if (lastStep.getAxis() == Constants.ATTRIBUTE_AXIS || lastStep.getAxis() == Constants.DESCENDANT_ATTRIBUTE_AXIS) contextQName.setNameType(ElementValue.ATTRIBUTE); axis = firstStep.getAxis(); contextStep = lastStep; } } } } public boolean canOptimize(Sequence contextSequence) { return contextQName != null; } public boolean optimizeOnSelf() { return optimizeSelf; } public int getOptimizeAxis() { return axis; } public NodeSet preSelect(Sequence contextSequence, boolean useContext) throws XPathException { // the expression can be called multiple times, so we need to clear the previous preselectResult preselectResult = null; NGramIndexWorker index = (NGramIndexWorker) context.getBroker().getIndexController().getWorkerByIndexId(NGramIndex.ID); DocumentSet docs = contextSequence.getDocumentSet(); String key = getArgument(1).eval(contextSequence).getStringValue(); String[] ngrams = index.getDistinctNGrams(key); List qnames = new ArrayList(1); qnames.add(contextQName); preselectResult = processMatches(index, docs, qnames, key, ngrams, useContext ? contextSequence.toNodeSet() : null, NodeSet.DESCENDANT); return preselectResult; } public Sequence eval(Sequence contextSequence, Item contextItem) throws XPathException { if (contextItem != null) contextSequence = contextItem.toSequence(); NodeSet result; if (preselectResult == null) { Sequence input = getArgument(0).eval(contextSequence, contextItem); if (input.isEmpty()) result = NodeSet.EMPTY_SET; else { NodeSet inNodes = input.toNodeSet(); DocumentSet docs = inNodes.getDocumentSet(); NGramIndexWorker index = (NGramIndexWorker) context.getBroker().getIndexController().getWorkerByIndexId(NGramIndex.ID); //Alternate design //NGramIndexWorker index = (NGramIndexWorker)context.getBroker().getBrokerPool().getIndexManager().getIndexById(NGramIndex.ID).getWorker(); String key = getArgument(1).eval(contextSequence, contextItem).getStringValue(); String[] ngrams = index.getDistinctNGrams(key); List qnames = null; if (contextQName != null) { qnames = new ArrayList(1); qnames.add(contextQName); } result = processMatches(index, docs, qnames, key, ngrams, inNodes, NodeSet.ANCESTOR); } } else { contextStep.setPreloadNodeSets(true); contextStep.setPreloadedData(contextSequence.getDocumentSet(), preselectResult); result = getArgument(0).eval(contextSequence).toNodeSet(); } return result; } private NodeSet processMatches(NGramIndexWorker index, DocumentSet docs, List qnames, String key, String[] ngrams, NodeSet nodeSet, int axis) throws TerminatedException { NodeSet result = null; for (int i = 0; i < ngrams.length; i++) { long start = System.currentTimeMillis(); String ngram = ngrams[i]; if (ngram.length() < index.getN() && i > 0) { // if this is the last ngram and its length is too small, // fill it up with characters from the previous ngram. too short // ngrams lead to a considerable performance loss. int fill = index.getN() - ngram.length(); ngram = ngrams[i - 1].substring(index.getN() - fill) + ngram; } NodeSet nodes = index.search(getExpressionId(), docs, qnames, ngram, ngrams[i], context, nodeSet, axis); if (LOG.isTraceEnabled()) LOG.trace("Found " + nodes.getLength() + " for " + ngram + " in " + (System.currentTimeMillis() - start)); if (result == null) { if (isCalledAs("starts-with")) result = startsWith(nodes); else result = nodes; } else { NodeSet temp = new ExtArrayNodeSet(); for (NodeSetIterator iterator = nodes.iterator(); iterator.hasNext();) { NodeProxy next = (NodeProxy) iterator.next(); NodeProxy before = result.get(next); if (before != null) { Match match = null; boolean found = false; Match mb = before.getMatches(); while (mb != null && !found) { Match mn = next.getMatches(); while (mn != null && !found) { if ((match = mb.isAfter(mn)) != null) { found = true; } mn = mn.getNextMatch(); } mb = mb.getNextMatch(); } if (found) { Match m = next.getMatches(); next.setMatches(null); while (m != null) { if (m.getContextId() != getExpressionId()) next.addMatch(m); m = m.getNextMatch(); } next.addMatch(match); temp.add(next); } } } result = temp; } } if (isCalledAs("starts-with")) result = startsWith(result); else if (isCalledAs("ends-with")) result = endsWith(result); else if (isCalledAs("equals")) result = equals(key, result); return result; } private NodeSet startsWith(NodeSet nodes) { NodeSet temp = new ExtArrayNodeSet(); for (NodeSetIterator iterator = nodes.iterator(); iterator.hasNext();) { NodeProxy next = (NodeProxy) iterator.next(); Match mn = next.getMatches(); while (mn != null) { if (mn.hasMatchAt(0)) { temp.add(next); break; } mn = mn.getNextMatch(); } } return temp; } private NodeSet endsWith(NodeSet nodes) { NodeSet temp = new ExtArrayNodeSet(); for (NodeSetIterator iterator = nodes.iterator(); iterator.hasNext();) { NodeProxy next = (NodeProxy) iterator.next(); String data = next.getNodeValue(); int len = data.length(); Match mn = next.getMatches(); while (mn != null) { if (mn.hasMatchAround(len)) { temp.add(next); break; } mn = mn.getNextMatch(); } } return temp; } private NodeSet equals(String key, NodeSet nodes) { NodeSet temp = new ExtArrayNodeSet(); for (NodeSetIterator iterator = nodes.iterator(); iterator.hasNext();) { NodeProxy next = (NodeProxy) iterator.next(); String data = next.getNodeValue(); if (key.equalsIgnoreCase(data)) temp.add(next); } return temp; } public int getDependencies() { final Expression stringArg = getArgument(0); if (Type.subTypeOf(stringArg.returnsType(), Type.NODE) && !Dependency.dependsOn(stringArg, Dependency.CONTEXT_ITEM)) { return Dependency.CONTEXT_SET; } else { return Dependency.CONTEXT_SET + Dependency.CONTEXT_ITEM; } } public int returnsType() { return Type.NODE; } }
package com.izforge.izpack.util; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.security.KeyFactory; import java.security.KeyPair; import java.security.Security; import java.security.cert.CertificateFactory; import java.security.cert.PKIXCertPathBuilderResult; import java.security.cert.X509Certificate; import java.security.spec.RSAPrivateCrtKeySpec; import java.security.spec.RSAPublicKeySpec; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import javax.crypto.Cipher; import javax.crypto.spec.IvParameterSpec; import javax.crypto.spec.SecretKeySpec; import org.bouncycastle.openssl.PEMDecryptorProvider; import org.bouncycastle.openssl.PEMEncryptedKeyPair; import org.bouncycastle.openssl.PEMKeyPair; import org.bouncycastle.openssl.PEMParser; import org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter; import org.bouncycastle.openssl.jcajce.JcePEMDecryptorProviderBuilder; import org.bouncycastle.util.io.pem.PemHeader; import org.bouncycastle.util.io.pem.PemObject; import org.bouncycastle.util.io.pem.PemReader; import com.izforge.izpack.installer.AutomatedInstallData; import com.izforge.izpack.installer.DataValidator; import com.izforge.izpack.installer.DataValidator.Status; import com.izforge.izpack.util.ssl.CertificateVerifier; public class CheckCertificateDataValidator implements com.izforge.izpack.installer.DataValidator { private String strMessage = ""; public static final String strMessageId = "messageid"; public static final String strMessageValue = "message.oldvalue"; // not to be stored public Status validateData(AutomatedInstallData adata) { Security.addProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()); String strCertPath = adata.getVariable("mongodb.dir.certs"); String hostname = adata.getVariable("HOST_NAME"); // notcreatecert + notupdate // at least pemkeyfile and certfile must be provided String fieldPemCertFile = adata.getVariable("mongodb.ssl.certfile"); String fieldPemKeyFile = adata.getVariable("mongodb.ssl.pemkeyfile"); String fieldPemKeyPassword = adata.getVariable("mongodb.ssl.pemkeypassword"); String fieldPemCaFile = adata.getVariable("mongodb.ssl.pemcafile"); Boolean useCaFile = false; // notcreatecert + update // pem has //String fieldPemCertFile = adata.getVariable("mongodb.ssl.certfile"); //String fieldPemKeyFile = adata.getVariable("mongodb.ssl.pemkeyfile"); //String fieldPemKeyPassword = adata.getVariable("mongodb.ssl.pemkeypassword"); //String fieldPemCaFile = adata.getVariable("mongodb.ssl.pemcafile"); //createcert //String str try { InputStream inPemKeyFile = new FileInputStream(fieldPemKeyFile); InputStream inPemCertFile = new FileInputStream(fieldPemCertFile); // first check the certificate CertificateFactory factory = CertificateFactory.getInstance("X.509"); X509Certificate cert = (X509Certificate) factory.generateCertificate(inPemCertFile); // if a CA was provided then we need to check the validity of our certificate if (fieldPemCaFile!=null && !fieldPemCaFile.trim().equals("")) { InputStream inPemCaFile = new FileInputStream(fieldPemCaFile); Collection<X509Certificate> certCAChain = (Collection<X509Certificate>) factory.generateCertificates(inPemCaFile); // cert should be part of the path to be validated certCAChain.add(cert); PKIXCertPathBuilderResult verifiedCertChain = CertificateVerifier.verifyCertificate(cert, new HashSet<X509Certificate> (certCAChain)); adata.setVariable("mongodb.ssl.usecafile", "true"); useCaFile = true; } // Then check the private key PEMParser pemParser = new PEMParser(new InputStreamReader(inPemKeyFile)); Object object = pemParser.readObject(); JcaPEMKeyConverter converter = new JcaPEMKeyConverter().setProvider("BC"); KeyPair kp; if (object instanceof PEMEncryptedKeyPair) { // Encrypted key - we will use provided password PEMEncryptedKeyPair ckp = (PEMEncryptedKeyPair) object; PEMDecryptorProvider decProv = new JcePEMDecryptorProviderBuilder().build(fieldPemKeyPassword.toCharArray()); kp = converter.getKeyPair(ckp.decryptKeyPair(decProv)); } else { // Unencrypted key - no password needed PEMKeyPair ukp = (PEMKeyPair) object; kp = converter.getKeyPair(ukp); } byte[] input = "1234567890ABCDEF".getBytes(); //System.out.println("input: " + new String(input)); Cipher cipher = Cipher.getInstance("RSA/ECB/PKCS1Padding"); cipher.init(Cipher.ENCRYPT_MODE, cert.getPublicKey() ); byte[] cipherText = cipher.doFinal(input); //System.out.println("cipher: " + new String(cipherText)); cipher.init(Cipher.DECRYPT_MODE, kp.getPrivate()); byte[] decrypted = cipher.doFinal(cipherText); //System.out.println("plain : " + new String(decrypted)); if (Arrays.equals(decrypted, input)) { File certpath = new File(strCertPath); if (!certpath.exists()) certpath.mkdirs(); // we need to copy things File pemKeyFile = new File(strCertPath + File.separator + hostname + ".pem"); File certFile = new File(fieldPemCertFile); File privKeyFile = new File(fieldPemKeyFile); File destCertFile = new File(strCertPath + File.separator + hostname + ".crt"); Files.copy(certFile.toPath(), destCertFile.toPath(), StandardCopyOption.REPLACE_EXISTING); File destKeyFile = new File(strCertPath + File.separator + hostname + ".key"); Files.copy(privKeyFile.toPath(), destKeyFile.toPath(), StandardCopyOption.REPLACE_EXISTING); KeyPairGeneratorDataValidator.mergeFiles(new File[]{certFile,privKeyFile}, pemKeyFile); if (useCaFile) { File caPath = new File (fieldPemCaFile); File certCaPath = new File (strCertPath+File.separator+"ca.cacrt"); Files.copy(caPath.toPath(), certCaPath.toPath(), StandardCopyOption.REPLACE_EXISTING); } CheckCertificateP12Validator.writeP12File(fieldPemKeyPassword,adata); // we need to says that this step was done at least one time adata.setVariable("mongodb.ssl.alreadydone", "true"); return Status.OK; } else { strMessage = "Unknow error"; adata.setVariable(strMessageValue, strMessage); return Status.ERROR; } // RSA // KeyFactory keyFac = KeyFactory.getInstance("RSA"); // RSAPrivateCrtKeySpec privateKey = keyFac.getKeySpec(kp.getPrivate(), RSAPrivateCrtKeySpec.class); // RSAPublicKeySpec publicKey = keyFac.getKeySpec(cert.getPublicKey(), RSAPublicKeySpec.class); } catch (Exception ex) { strMessage = ex.getMessage(); adata.setVariable(strMessageValue, strMessage); return Status.ERROR; } // strMessage = "OS error #" + error + " - " ; // adata.setVariable(strMessageValue, strMessage); // return Status.WARNING; } public String getErrorMessageId() { return strMessageId; } public String getWarningMessageId() { return strMessageId; } public boolean getDefaultAnswer() { // By default do not continue if an error occurs return false; } }
/* * Copyright 2021 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config; import com.thoughtworks.go.config.remote.ConfigOrigin; import com.thoughtworks.go.config.remote.FileConfigOrigin; import com.thoughtworks.go.config.remote.RepoConfigOrigin; import com.thoughtworks.go.config.remote.UIConfigOrigin; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.EnvironmentPipelineMatcher; import com.thoughtworks.go.helper.GoConfigMother; import com.thoughtworks.go.util.command.EnvironmentVariableContext; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; import static com.thoughtworks.go.config.ConfigSaveValidationContext.forChain; import static com.thoughtworks.go.helper.EnvironmentConfigMother.environment; import static com.thoughtworks.go.helper.EnvironmentConfigMother.remote; import static com.thoughtworks.go.util.command.EnvironmentVariableContext.GO_ENVIRONMENT_NAME; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatCode; import static org.junit.jupiter.api.Assertions.*; class BasicEnvironmentConfigTest extends EnvironmentConfigTestBase { @BeforeEach void setUp() throws Exception { environmentConfig = new BasicEnvironmentConfig(new CaseInsensitiveString("UAT")); } @Test void shouldReturnEmptyForRemotePipelinesWhenIsLocal() { environmentConfig.addPipeline(new CaseInsensitiveString("pipe")); assertThat(environmentConfig.getRemotePipelines().isEmpty()).isTrue(); } @Test void shouldReturnAllPipelinesForRemotePipelinesWhenIsRemote() { environmentConfig.setOrigins(new RepoConfigOrigin()); environmentConfig.addPipeline(new CaseInsensitiveString("pipe")); assertThat(environmentConfig.getRemotePipelines().isEmpty()).isFalse(); } @Test void shouldReturnTrueThatLocalWhenOriginIsNotSet() { environmentConfig.setOrigins(null); assertThat(environmentConfig.isLocal()).isTrue(); } @Test void shouldSetOriginSetOriginForEnvConfigAndEnvVariables(){ environmentConfig.addEnvironmentVariable("var1", "value1"); environmentConfig.addEnvironmentVariable("var2", "value2"); FileConfigOrigin fileOrigin = new FileConfigOrigin(); environmentConfig.setOrigins(fileOrigin); assertThat(environmentConfig.getOrigin()).isEqualTo(fileOrigin); assertThat(environmentConfig.getVariables().size() == 2); environmentConfig.getVariables().forEach(var -> assertThat(var.getOrigin()).isEqualTo(fileOrigin)); } @Test void shouldSetOriginSetOriginToNullForEnvConfigAndEnvVariables(){ environmentConfig.addEnvironmentVariable("var1", "value1"); environmentConfig.addEnvironmentVariable("var2", "value2"); environmentConfig.setOrigins(null); assertThat(environmentConfig.getOrigin()).isNull(); assertThat(environmentConfig.getVariables().size() == 2); environmentConfig.getVariables().forEach(var -> assertThat(var.getOrigin()).isNull()); } @Test void shouldReturnTrueThatLocalWhenOriginIsFile() { environmentConfig.setOrigins(new FileConfigOrigin()); assertThat(environmentConfig.isLocal()).isTrue(); } @Test void shouldReturnFalseThatLocalWhenOriginIsConfigRepo() { environmentConfig.setOrigins(new RepoConfigOrigin()); assertThat(environmentConfig.isLocal()).isFalse(); } @Test void shouldReturnSelfAsLocalPartWhenOriginIsFile() { environmentConfig.setOrigins(new FileConfigOrigin()); assertThat(environmentConfig.getLocal()).isSameAs(environmentConfig); } @Test void shouldReturnSelfAsLocalPartWhenOriginIsUI() { environmentConfig.setOrigins(new UIConfigOrigin()); assertThat(environmentConfig.getLocal()).isSameAs(environmentConfig); } @Test void shouldReturnNullAsLocalPartWhenOriginIsConfigRepo() { environmentConfig.setOrigins(new RepoConfigOrigin()); assertThat(environmentConfig.getLocal()).isNull(); } @Test void shouldUpdateName() { environmentConfig.setConfigAttributes(Collections.singletonMap(BasicEnvironmentConfig.NAME_FIELD, "PROD")); assertThat(environmentConfig.name()).isEqualTo(new CaseInsensitiveString("PROD")); } @Nested class Validate { @Test void shouldReturnTrueIfAssociatedAgentUUIDsAreFromSpecifiedSetOfUUIDs(){ environmentConfig.addAgent("uuid1"); environmentConfig.addAgent("uuid2"); environmentConfig.addAgent("uuid3"); boolean result = environmentConfig.validateContainsAgentUUIDsFrom(new HashSet<>(asList("uuid1", "uuid2", "uuid3", "uuid4"))); assertThat(result).isTrue(); } @Test void shouldReturnFalseIfAssociatedAgentUUIDsAreNotFromSpecifiedSetOfUUIDs(){ environmentConfig.addAgent("uuid1"); environmentConfig.addAgent("uuid2"); environmentConfig.addAgent("uuid3"); boolean result = environmentConfig.validateContainsAgentUUIDsFrom(new HashSet<>(asList("uuid1", "uuid2", "uuid4"))); assertThat(result).isFalse(); } @Test void shouldReturnFalseIfAssociatedAgentUUIDsAreNotFromSpecifiedSetOfUUIDsBecauseSpecifiedSetIsEmpty(){ environmentConfig.addAgent("uuid1"); environmentConfig.addAgent("uuid2"); environmentConfig.addAgent("uuid3"); boolean result = environmentConfig.validateContainsAgentUUIDsFrom(new HashSet<>(emptyList())); assertThat(result).isFalse(); } @Test void shouldNotAllowToReferencePipelineDefinedInConfigRepo_WhenEnvironmentDefinedInFile() { ConfigOrigin pipelineOrigin = new RepoConfigOrigin(); ConfigOrigin envOrigin = new FileConfigOrigin(); BasicCruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("pipe1"); cruiseConfig.getPipelineConfigByName(new CaseInsensitiveString("pipe1")).setOrigin(pipelineOrigin); BasicEnvironmentConfig environmentConfig = (BasicEnvironmentConfig) BasicEnvironmentConfigTest.this.environmentConfig; environmentConfig.setOrigins(envOrigin); environmentConfig.addPipeline(new CaseInsensitiveString("pipe1")); cruiseConfig.addEnvironment(environmentConfig); environmentConfig.validate(forChain(cruiseConfig, environmentConfig)); EnvironmentPipelineConfig reference = environmentConfig.getPipelines().first(); assertThat(reference.errors()).isNotEmpty(); assertThat(reference.errors().on(EnvironmentPipelineConfig.ORIGIN)).startsWith("Environment defined in"); } @Test void shouldAllowToReferencePipelineDefinedInConfigRepo_WhenEnvironmentDefinedInConfigRepo() { ConfigOrigin pipelineOrigin = new RepoConfigOrigin(); ConfigOrigin envOrigin = new RepoConfigOrigin(); passReferenceValidationHelper(pipelineOrigin, envOrigin); } @Test void shouldAllowToReferencePipelineDefinedInFile_WhenEnvironmentDefinedInFile() { ConfigOrigin pipelineOrigin = new FileConfigOrigin(); ConfigOrigin envOrigin = new FileConfigOrigin(); passReferenceValidationHelper(pipelineOrigin, envOrigin); } @Test void shouldAllowToReferencePipelineDefinedInFile_WhenEnvironmentDefinedInConfigRepo() { ConfigOrigin pipelineOrigin = new FileConfigOrigin(); ConfigOrigin envOrigin = new RepoConfigOrigin(); passReferenceValidationHelper(pipelineOrigin, envOrigin); } } @Test void shouldReturnPipelineNames() { CaseInsensitiveString pipeline1 = new CaseInsensitiveString("Pipeline1"); CaseInsensitiveString pipeline2 = new CaseInsensitiveString("Pipeline2"); environmentConfig.addPipeline(pipeline1); environmentConfig.addPipeline(pipeline2); assertThat(environmentConfig.getPipelineNames()).isEqualTo(asList(pipeline1, pipeline2)); } @Test void shouldReturnEmptyListAsPipelineNamesWhenThereAreNoPipelinesAssociated() { assertThat(environmentConfig.getPipelineNames()).isEqualTo(emptyList()); } @Test void shouldValidateWhenPipelineNotFound() { ConfigOrigin pipelineOrigin = new RepoConfigOrigin(); ConfigOrigin envOrigin = new FileConfigOrigin(); BasicCruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("pipe1"); cruiseConfig.getPipelineConfigByName(new CaseInsensitiveString("pipe1")).setOrigin(pipelineOrigin); BasicEnvironmentConfig environmentConfig = (BasicEnvironmentConfig) this.environmentConfig; environmentConfig.setOrigins(envOrigin); environmentConfig.addPipeline(new CaseInsensitiveString("unknown")); cruiseConfig.addEnvironment(environmentConfig); environmentConfig.validate(forChain(cruiseConfig, environmentConfig)); EnvironmentPipelineConfig reference = environmentConfig.getPipelines().first(); assertThat(reference.errors().isEmpty()).isTrue(); } @Nested class ValidateTree { @Test void shouldNotAllowToReferencePipelineDefinedInConfigRepo_WhenEnvironmentDefinedInFile() { ConfigOrigin pipelineOrigin = new RepoConfigOrigin(); ConfigOrigin envOrigin = new FileConfigOrigin(); BasicCruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("pipe1"); cruiseConfig.getPipelineConfigByName(new CaseInsensitiveString("pipe1")).setOrigin(pipelineOrigin); BasicEnvironmentConfig environmentConfig = (BasicEnvironmentConfig) BasicEnvironmentConfigTest.this.environmentConfig; environmentConfig.setOrigins(envOrigin); environmentConfig.addPipeline(new CaseInsensitiveString("pipe1")); cruiseConfig.addEnvironment(environmentConfig); environmentConfig.validateTree(forChain(cruiseConfig, environmentConfig), cruiseConfig); EnvironmentPipelineConfig reference = environmentConfig.getPipelines().first(); assertThat(reference.errors()).isNotEmpty(); assertThat(reference.errors().on(EnvironmentPipelineConfig.ORIGIN)).startsWith("Environment defined in"); } @Test void shouldAllowToReferencePipelineDefinedInConfigRepo_WhenEnvironmentDefinedInConfigRepo() { ConfigOrigin pipelineOrigin = new RepoConfigOrigin(); ConfigOrigin envOrigin = new RepoConfigOrigin(); passReferenceValidationHelperForValidateTree(pipelineOrigin, envOrigin); } @Test void shouldAllowToReferencePipelineDefinedInFile_WhenEnvironmentDefinedInFile() { ConfigOrigin pipelineOrigin = new FileConfigOrigin(); ConfigOrigin envOrigin = new FileConfigOrigin(); passReferenceValidationHelperForValidateTree(pipelineOrigin, envOrigin); } @Test void shouldAllowToReferencePipelineDefinedInFile_WhenEnvironmentDefinedInConfigRepo() { ConfigOrigin pipelineOrigin = new FileConfigOrigin(); ConfigOrigin envOrigin = new RepoConfigOrigin(); passReferenceValidationHelperForValidateTree(pipelineOrigin, envOrigin); } @Test void shouldValidateEnvVariables() { ConfigOrigin repoConfigOrigin = new RepoConfigOrigin(); BasicCruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("pipe1"); cruiseConfig.getPipelineConfigByName(new CaseInsensitiveString("pipe1")).setOrigin(repoConfigOrigin); BasicEnvironmentConfig environmentConfig = (BasicEnvironmentConfig) BasicEnvironmentConfigTest.this.environmentConfig; environmentConfig.setOrigins(repoConfigOrigin); environmentConfig.addPipeline(new CaseInsensitiveString("pipe1")); environmentConfig.addEnvironmentVariable(" ", "bar"); cruiseConfig.addEnvironment(environmentConfig); boolean validate = environmentConfig.validateTree(forChain(cruiseConfig, environmentConfig), cruiseConfig); List<ConfigErrors> configErrors = environmentConfig.getAllErrors(); assertThat(validate).isFalse(); assertThat(configErrors).isNotEmpty(); assertThat(configErrors.get(0).on("name")).isEqualTo("Environment Variable cannot have an empty name for environment 'UAT'."); } @Test void shouldValidateViaValidateTreeWhenPipelineNotFound() { ConfigOrigin pipelineOrigin = new RepoConfigOrigin(); ConfigOrigin envOrigin = new FileConfigOrigin(); BasicCruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("pipe1"); cruiseConfig.getPipelineConfigByName(new CaseInsensitiveString("pipe1")).setOrigin(pipelineOrigin); BasicEnvironmentConfig environmentConfig = (BasicEnvironmentConfig) BasicEnvironmentConfigTest.this.environmentConfig; environmentConfig.setOrigins(envOrigin); environmentConfig.addPipeline(new CaseInsensitiveString("unknown")); cruiseConfig.addEnvironment(environmentConfig); environmentConfig.validate(forChain(cruiseConfig, environmentConfig)); EnvironmentPipelineConfig reference = environmentConfig.getPipelines().first(); assertThat(reference.errors().isEmpty()).isTrue(); } } @Test void shouldReturnEnvironmentContextWithGO_ENVIRONMENT_NAMEVariableWhenNoEnvironmentVariablesAreDefined() { EnvironmentVariableContext environmentContext = environmentConfig.createEnvironmentContext(); assertThat(environmentContext.getProperties()).hasSize(1); assertThat(environmentContext.getProperty(GO_ENVIRONMENT_NAME)).isEqualTo(environmentConfig.name().toString()); } @Test void shouldReturnEnvironmentContextWithGO_ENVIRONMENT_NAMEVariableWhenEnvironmentVariablesAreDefined() { environmentConfig.addEnvironmentVariable("foo", "bar"); EnvironmentVariableContext environmentContext = environmentConfig.createEnvironmentContext(); assertThat(environmentContext.getProperties()).hasSize(2); assertThat(environmentContext.getProperty(GO_ENVIRONMENT_NAME)).isEqualTo(environmentConfig.name().toString()); assertThat(environmentContext.getProperty("foo")).isEqualTo("bar"); } @Test void shouldAddErrorToTheConfig() { assertTrue(environmentConfig.errors().isEmpty()); environmentConfig.addError("field-name", "some error message."); assertThat(environmentConfig.errors().size()).isEqualTo(1); assertThat(environmentConfig.errors().on("field-name")).isEqualTo("some error message."); } @Test void shouldReturnMatchersWithTheProperties() { environmentConfig.addPipeline(new CaseInsensitiveString("pipeline-1")); environmentConfig.addAgent("agent-1"); EnvironmentPipelineMatcher matcher = environmentConfig.createMatcher(); assertNotNull(matcher); assertThat(matcher.name()).isEqualTo(environmentConfig.name()); assertTrue(matcher.hasPipeline("pipeline-1")); assertTrue(matcher.match("pipeline-1", "agent-1")); assertFalse(matcher.hasPipeline("non-existent-pipeline")); } @Test void shouldNotThrowExceptionIfAllThePipelinesArePresent() { CaseInsensitiveString p1 = new CaseInsensitiveString("pipeline-1"); CaseInsensitiveString p2 = new CaseInsensitiveString("pipeline-2"); environmentConfig.addPipeline(p1); environmentConfig.addPipeline(p2); assertThatCode(() -> environmentConfig.validateContainsOnlyPipelines(asList(p1, p2))) .doesNotThrowAnyException(); } @Test void shouldThrowExceptionIfOneOfThePipelinesAreNotPassed() { CaseInsensitiveString p1 = new CaseInsensitiveString("pipeline-1"); CaseInsensitiveString p2 = new CaseInsensitiveString("pipeline-2"); CaseInsensitiveString p3 = new CaseInsensitiveString("pipeline-3"); environmentConfig.addPipeline(p1); environmentConfig.addPipeline(p2); assertThatCode(() -> environmentConfig.validateContainsOnlyPipelines(asList(p1, p3))) .isInstanceOf(RuntimeException.class) .hasMessage("Environment 'UAT' refers to an unknown pipeline 'pipeline-2'."); } @Test void shouldReturnTrueIsChildConfigContainsNoPipelineAgentsAndVariables() { assertTrue(environmentConfig.isEnvironmentEmpty()); } @Test void shouldReturnFalseIfNotEmpty() { environmentConfig.addPipeline(new CaseInsensitiveString("pipeline1")); assertFalse(environmentConfig.isEnvironmentEmpty()); } @Nested class ContainsAgentRemotely { @Test void shouldReturnTrueIfTheEnvAgentAssociationIsFromConfigRepo() { BasicEnvironmentConfig environmentConfig = remote("env1"); String uuid = "uuid"; environmentConfig.addAgent(uuid); assertThat(environmentConfig.containsAgentRemotely(uuid)).isTrue(); } @Test void shouldReturnFalseIfTheAgentIsNotPresentInTheEnvFromConfigRepo() { BasicEnvironmentConfig environmentConfig = remote("env1"); String uuid = "uuid"; assertThat(environmentConfig.containsAgentRemotely(uuid)).isFalse(); } @Test void shouldReturnFalseIfTheEnvAgentAssociationIsFromConfigXml() { BasicEnvironmentConfig environmentConfig = environment("env1"); String uuid = "uuid"; environmentConfig.addAgent(uuid); assertThat(environmentConfig.containsAgentRemotely(uuid)).isFalse(); } @Test void shouldReturnFalseIfTheOriginIsNull() { BasicEnvironmentConfig environmentConfig = new BasicEnvironmentConfig(new CaseInsensitiveString("env1")); String uuid = "uuid"; environmentConfig.addAgent(uuid); assertThat(environmentConfig.getOrigin()).isNull(); assertThat(environmentConfig.containsAgentRemotely(uuid)).isFalse(); } } @Nested class ContainsPipelineRemotely { @Test void shouldReturnTrueIfTheEnvPipelineAssociationIsFromConfigRepo() { BasicEnvironmentConfig environmentConfig = remote("env1"); CaseInsensitiveString pipeline = new CaseInsensitiveString("Pipeline"); environmentConfig.addPipeline(pipeline); assertThat(environmentConfig.containsPipelineRemotely(pipeline)).isTrue(); } @Test void shouldReturnFalseIfThePipelineIsNotPresentInTheEnvFromConfigRepo() { BasicEnvironmentConfig environmentConfig = remote("env1"); CaseInsensitiveString pipeline = new CaseInsensitiveString("Pipeline"); assertThat(environmentConfig.containsPipelineRemotely(pipeline)).isFalse(); } @Test void shouldReturnFalseIfTheEnvPipelineAssociationIsFromConfigXml() { BasicEnvironmentConfig environmentConfig = environment("env1"); assertThat(environmentConfig.getRemotePipelines()).isEqualTo(emptyList()); assertThat(environmentConfig.getPipelines()).size().isEqualTo(1); environmentConfig.getPipelineNames().forEach(pipeline -> assertThat(environmentConfig.containsPipelineRemotely(pipeline)).isFalse()); } @Test void shouldReturnFalseIfTheOriginIsNull() { BasicEnvironmentConfig environmentConfig = new BasicEnvironmentConfig(new CaseInsensitiveString("env1")); CaseInsensitiveString pipeline = new CaseInsensitiveString("Pipeline"); environmentConfig.addPipeline(pipeline); assertThat(environmentConfig.getOrigin()).isNull(); assertThat(environmentConfig.containsPipelineRemotely(pipeline)).isFalse(); } } @Nested class ContainsEnvironmentVariablesRemotely { @Test void shouldReturnTrueIfTheEnvVarAssociationIsFromConfigRepo() { BasicEnvironmentConfig environmentConfig = remote("env1"); environmentConfig.addEnvironmentVariable("var1", "value1"); assertThat(environmentConfig.containsEnvironmentVariableRemotely("var1")).isTrue(); } @Test void shouldReturnFalseIfTheVarIsNotPresentInTheEnvFromConfigRepo() { BasicEnvironmentConfig environmentConfig = remote("env1"); assertThat(environmentConfig.containsEnvironmentVariableRemotely("var1")).isFalse(); } @Test void shouldReturnFalseIfTheEnvVarAssociationIsFromConfigXml() { BasicEnvironmentConfig environmentConfig = environment("env1"); environmentConfig.addEnvironmentVariable("var1", "value1"); environmentConfig.getVariables().forEach(var -> assertThat(environmentConfig.containsEnvironmentVariableRemotely("var1")).isFalse()); } @Test void shouldReturnFalseIfTheOriginIsNull() { BasicEnvironmentConfig environmentConfig = new BasicEnvironmentConfig(new CaseInsensitiveString("env1")); environmentConfig.addEnvironmentVariable("var1", "value1"); assertThat(environmentConfig.getOrigin()).isNull(); assertThat(environmentConfig.containsEnvironmentVariableRemotely("var1")).isFalse(); } } @Test void shouldReturnEmptyOptionalIfEnvDoesNotContainTheAgent() { Optional<ConfigOrigin> originForAgent = this.environmentConfig.originForAgent("uuid"); assertThat(originForAgent.isPresent()).isFalse(); } @Test void shouldReturnOriginIfEnvContainsTheAgent() { String uuid = "uuid"; this.environmentConfig.addAgent(uuid); this.environmentConfig.setOrigins(new FileConfigOrigin()); Optional<ConfigOrigin> originForAgent = this.environmentConfig.originForAgent(uuid); assertThat(originForAgent.isPresent()).isTrue(); assertThat(originForAgent.get().displayName()).isEqualTo("cruise-config.xml"); } @Test void shouldReturnFalseIfEnvDoesNotContainTheSpecifiedAgentUuid() { assertThat(this.environmentConfig.hasAgent("uuid")).isFalse(); } @Test void shouldReturnTrueIfEnvContainsTheSpecifiedAgentUuid() { this.environmentConfig.addAgent("uuid"); assertThat(this.environmentConfig.hasAgent("uuid")).isTrue(); } private void passReferenceValidationHelper(ConfigOrigin pipelineOrigin, ConfigOrigin envOrigin) { BasicCruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("pipe1"); cruiseConfig.getPipelineConfigByName(new CaseInsensitiveString("pipe1")).setOrigin(pipelineOrigin); BasicEnvironmentConfig environmentConfig = (BasicEnvironmentConfig) this.environmentConfig; environmentConfig.setOrigins(envOrigin); environmentConfig.addPipeline(new CaseInsensitiveString("pipe1")); cruiseConfig.addEnvironment(environmentConfig); environmentConfig.validate(forChain(cruiseConfig, environmentConfig)); EnvironmentPipelineConfig reference = environmentConfig.getPipelines().first(); assertThat(reference.errors().isEmpty()).isTrue(); } private void passReferenceValidationHelperForValidateTree(ConfigOrigin pipelineOrigin, ConfigOrigin envOrigin) { BasicCruiseConfig cruiseConfig = GoConfigMother.configWithPipelines("pipe1"); cruiseConfig.getPipelineConfigByName(new CaseInsensitiveString("pipe1")).setOrigin(pipelineOrigin); BasicEnvironmentConfig environmentConfig = (BasicEnvironmentConfig) this.environmentConfig; environmentConfig.setOrigins(envOrigin); environmentConfig.addPipeline(new CaseInsensitiveString("pipe1")); cruiseConfig.addEnvironment(environmentConfig); environmentConfig.validateTree(forChain(cruiseConfig, environmentConfig), cruiseConfig); EnvironmentPipelineConfig reference = environmentConfig.getPipelines().first(); assertThat(reference.errors().isEmpty()).isTrue(); } }
package net.ggelardi.uoccin.serv; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.UUID; import java.util.concurrent.TimeUnit; import net.ggelardi.uoccin.R; import net.ggelardi.uoccin.serv.Commons.PK; import net.ggelardi.uoccin.serv.Commons.SN; import net.ggelardi.uoccin.serv.Commons.SR; import android.app.AlarmManager; import android.app.PendingIntent; import android.content.ContentValues; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.SharedPreferences.OnSharedPreferenceChangeListener; import android.content.pm.PackageManager.NameNotFoundException; import android.content.res.Resources; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.preference.PreferenceManager; import android.text.TextUtils; import android.util.Log; import com.commonsware.cwac.wakeful.WakefulIntentService; import com.squareup.okhttp.Interceptor; import com.squareup.okhttp.OkHttpClient; import com.squareup.okhttp.Response; import com.squareup.picasso.OkHttpDownloader; import com.squareup.picasso.Picasso; import com.squareup.picasso.RequestCreator; public class Session implements OnSharedPreferenceChangeListener { private static final String TAG = "Session"; private static Session singleton; public static Session getInstance(Context context) { if (singleton == null) singleton = new Session(context); return singleton; } private final Context acntx; private final SharedPreferences prefs; private final Storage dbhlp; private SQLiteDatabase dbconn; private Picasso picasso; public Session(Context context) { acntx = context.getApplicationContext(); prefs = PreferenceManager.getDefaultSharedPreferences(acntx); dbhlp = new Storage(acntx); driveDeviceID(); prefs.registerOnSharedPreferenceChangeListener(this); } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { if (key.equals(PK.TVDBFEED)) registerAlarms(); else if (key.equals(PK.GDRVSYNC)) { registerAlarms(); if (driveSyncEnabled()) { if (!driveAccountSet()) acntx.sendBroadcast(new Intent(SN.CONNECT_FAIL)); else WakefulIntentService.sendWakefulWork(acntx, new Intent(acntx, Service.class).setAction(SR.GDRIVE_SYNCNOW)); } } else if (key.equals(PK.GDRVUUID)) { // ? } } public Context getContext() { return acntx; } public SharedPreferences getPrefs() { return prefs; } public Resources getRes() { return acntx.getResources(); } public String getString(int id) { return acntx.getResources().getString(id); } public String[] getStringArray(int id) { return acntx.getResources().getStringArray(id); } public SQLiteDatabase getDB() { if (dbconn == null) dbconn = dbhlp.getWritableDatabase(); return dbconn; } public boolean isConnected() { ConnectivityManager cm = (ConnectivityManager) acntx.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo ni = cm.getActiveNetworkInfo(); return ni != null && ni.isConnectedOrConnecting(); } public boolean isOnWIFI() { ConnectivityManager cm = (ConnectivityManager) acntx.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo ni = cm.getActiveNetworkInfo(); return ni != null && ni.isConnectedOrConnecting() && ni.getType() == ConnectivityManager.TYPE_WIFI; } private PendingIntent getPI(String action, boolean create) { return PendingIntent.getBroadcast(acntx, 0, new Intent(acntx, Receiver.class).setAction(action), create ? PendingIntent.FLAG_UPDATE_CURRENT : PendingIntent.FLAG_NO_CREATE); } public void registerAlarms() { Log.d(TAG, "registerAlarms() begin"); AlarmManager am = (AlarmManager) acntx.getSystemService(Context.ALARM_SERVICE); PendingIntent cc = getPI(SR.CLEAN_DB_CACHE, false); if (cc == null) { cc = getPI(SR.CLEAN_DB_CACHE, true); am.setInexactRepeating(AlarmManager.ELAPSED_REALTIME, 60000, AlarmManager.INTERVAL_HOUR, cc); Log.d(TAG, "CLEAN_DB_CACHE alarm set"); } PendingIntent tv = getPI(SR.CHECK_TVDB_RSS, false); if (tvdbCheckFeed() && tv == null) { tv = getPI(SR.CHECK_TVDB_RSS, true); am.setInexactRepeating(AlarmManager.ELAPSED_REALTIME_WAKEUP, 5 * 60000, AlarmManager.INTERVAL_HOUR, tv); Log.d(TAG, "CHECK_TVDB_RSS alarm set"); } else if (!tvdbCheckFeed() && tv != null) { am.cancel(tv); tv.cancel(); Log.d(TAG, "CHECK_TVDB_RSS alarm canceled"); } PendingIntent gd = getPI(SR.GDRIVE_SYNCNOW, false); boolean should = driveSyncEnabled() && driveAccountSet(); if (should && gd == null) { gd = getPI(SR.GDRIVE_SYNCNOW, true); am.setInexactRepeating(AlarmManager.ELAPSED_REALTIME_WAKEUP, 2 * 60000, driveSyncInterval(), gd); Log.d(TAG, "GDRIVE_SYNCNOW alarm set"); } else if (!should && gd != null) { am.cancel(gd); gd.cancel(); Log.d(TAG, "GDRIVE_SYNCNOW alarm canceled"); } Log.d(TAG, "registerAlarms() end"); } public String versionName() { try { return acntx.getPackageManager().getPackageInfo(acntx.getPackageName(), 0).versionName; } catch (NameNotFoundException e) { return "Unknown"; } } // user preferences public String language() { return prefs.getString(PK.LANGUAGE, Locale.getDefault().getLanguage()); } public boolean specials() { return prefs.getBoolean(PK.SPECIALS, false); } public boolean autorefrWifiOnly() { return prefs.getBoolean(PK.METAWIFI, true); } public boolean tvdbCheckFeed() { return prefs.getBoolean(PK.TVDBFEED, false); } public List<String> tvdbGenreFilter() { return new ArrayList<String>(Arrays.asList(prefs.getString(PK.TVDBGFLT, "").toLowerCase(Locale.getDefault()).split(","))); } public List<String> tvdbNetworkFilter() { return new ArrayList<String>(Arrays.asList(prefs.getString(PK.TVDBGNET, "").toLowerCase(Locale.getDefault()).split(","))); } public long tvdbLastCheck() { return prefs.getLong(PK.TVDBLAST, 0); } public boolean notificationSound() { return prefs.getBoolean(PK.NOTIFSND, false); } public boolean notifyMovWlst() { return prefs.getBoolean(PK.NOTMOVWL, true); } public boolean notifyMovColl() { return prefs.getBoolean(PK.NOTMOVCO, true); } public boolean notifySerWlst() { return prefs.getBoolean(PK.NOTSERWL, true); } public boolean notifySerColl() { return prefs.getBoolean(PK.NOTSERCO, true); } public boolean blockSpoilers() { return prefs.getBoolean(PK.SPLRPROT, true); } public boolean driveSyncEnabled() { return prefs.getBoolean(PK.GDRVSYNC, false); } public boolean driveSyncWifiOnly() { return prefs.getBoolean(PK.GDRVWIFI, true); } public long driveSyncInterval() { return prefs.getInt(PK.GDRVINTV, 30) * 60000; } // app saved stuff public String driveDeviceID() { String res = getPrefs().getString(PK.GDRVUUID, ""); if (TextUtils.isEmpty(res)) { res = UUID.randomUUID().toString(); SharedPreferences.Editor editor = prefs.edit(); editor.putString(PK.GDRVUUID, res); editor.commit(); } return res; } public boolean driveAccountSet() { return !TextUtils.isEmpty(driveAccountName()); } public String driveAccountName() { return prefs.getString(PK.GDRVAUTH, ""); } public long driveLastChangeID() { return prefs.getLong(PK.GDRVLCID, 0); } public long driveLastFileUpdate(String filename) { return prefs.getLong("pk_lastupd_" + filename, 0); } public long driveLastFileUpdateUTC(String filename) { long res = driveLastFileUpdate(filename); if (res > 0 && !Calendar.getInstance().getTimeZone().getID().equals("UTC")) res = Commons.convertTZ(res, Calendar.getInstance().getTimeZone().getID(), "UTC"); return res; } public void setDriveUserAccount(String value) { Log.v(TAG, "Account selected: " + value); SharedPreferences.Editor editor = prefs.edit(); editor.putString(PK.GDRVAUTH, value); editor.commit(); } public void setDriveLastChangeID(long value) { SharedPreferences.Editor editor = prefs.edit(); editor.putLong(PK.GDRVLCID, value); editor.commit(); } public void setDriveLastFileUpdate(String filename, long datetime) { SharedPreferences.Editor editor = prefs.edit(); editor.putLong("pk_lastupd_" + filename, datetime); editor.commit(); } // utilities private Picasso getPicasso() { if (picasso == null) { OkHttpClient client = new OkHttpClient(); client.setConnectTimeout(15, TimeUnit.SECONDS); client.setReadTimeout(15, TimeUnit.SECONDS); client.setWriteTimeout(15, TimeUnit.SECONDS); client.interceptors().add(new Interceptor() { @Override public Response intercept(Chain chain) throws IOException { return chain.proceed(chain.request().newBuilder().addHeader("User-Agent", Commons.USER_AGENT).build()); } }); picasso = new Picasso.Builder(acntx).downloader(new OkHttpDownloader(client)).build(); } return picasso; } public Picasso picasso() { return getPicasso(); } public RequestCreator picasso(String path) { return getPicasso().load(path).noPlaceholder(); } public RequestCreator picasso(String path, int placeholder) { return getPicasso().load(path).placeholder(placeholder); } public RequestCreator picasso(String path, boolean placeholder) { if (!placeholder) return picasso(path); return getPicasso().load(path).placeholder(R.drawable.ic_action_image); } public String defaultText(String value, int resId) { if (TextUtils.isEmpty(value)) return acntx.getResources().getString(resId); return value; } public List<String> getAllTags() { List<String> res = new ArrayList<String>(); Cursor cr = getDB().query(true, "movtag", new String[] { "tag" }, null, null, null, null, null, null, null); try { while (cr.moveToNext()) res.add(cr.getString(0)); } finally { cr.close(); } cr = getDB().query(true, "sertag", new String[] { "tag" }, null, null, null, null, null, null, null); try { while (cr.moveToNext()) res.add(cr.getString(0)); } finally { cr.close(); } res.removeAll(Arrays.asList("", null)); Set<String> hs = new HashSet<String>(); hs.addAll(res); res.clear(); res.addAll(hs); Collections.sort(res.subList(1, res.size())); return res; } public static final String QUEUE_MOVIE = "movie"; public static final String QUEUE_SERIES = "series"; public synchronized void driveQueue(String target, String title, String field, String value) { if (!driveSyncEnabled()) return; ContentValues cv = new ContentValues(); cv.put("timestamp", System.currentTimeMillis()); cv.put("target", target); cv.put("title", title); cv.put("field", field); cv.put("value", value); getDB().insertOrThrow("queue_out", null, cv); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.painless; import java.lang.invoke.LambdaConversionException; import java.time.Instant; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; public class FunctionRefTests extends ScriptTestCase { public void testStaticMethodReference() { assertEquals(1, exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(Integer::compare); return l.get(0);")); } public void testStaticMethodReferenceDef() { assertEquals(1, exec("def l = new ArrayList(); l.add(2); l.add(1); l.sort(Integer::compare); return l.get(0);")); } public void testVirtualMethodReference() { assertEquals(2, exec("List l = new ArrayList(); l.add(1); l.add(1); return l.stream().mapToInt(Integer::intValue).sum();")); } public void testVirtualMethodReferenceDef() { assertEquals(2, exec("def l = new ArrayList(); l.add(1); l.add(1); return l.stream().mapToInt(Integer::intValue).sum();")); } public void testQualifiedStaticMethodReference() { assertEquals(true, exec("List l = [true]; l.stream().map(org.elasticsearch.painless.FeatureTestObject::overloadedStatic).findFirst().get()")); } public void testQualifiedStaticMethodReferenceDef() { assertEquals(true, exec("def l = [true]; l.stream().map(org.elasticsearch.painless.FeatureTestObject::overloadedStatic).findFirst().get()")); } public void testQualifiedVirtualMethodReference() { long instant = randomLong(); assertEquals(instant, exec( "List l = [params.d]; return l.stream().mapToLong(Instant::toEpochMilli).sum()", singletonMap("d", Instant.ofEpochMilli(instant)), true)); } public void testQualifiedVirtualMethodReferenceDef() { long instant = randomLong(); assertEquals(instant, exec( "def l = [params.d]; return l.stream().mapToLong(Instant::toEpochMilli).sum()", singletonMap("d", Instant.ofEpochMilli(instant)), true)); } public void testCtorMethodReference() { assertEquals(3.0D, exec("List l = new ArrayList(); l.add(1.0); l.add(2.0); " + "DoubleStream doubleStream = l.stream().mapToDouble(Double::doubleValue);" + "DoubleSummaryStatistics stats = doubleStream.collect(DoubleSummaryStatistics::new, " + "DoubleSummaryStatistics::accept, " + "DoubleSummaryStatistics::combine); " + "return stats.getSum()")); } public void testCtorMethodReferenceDef() { assertEquals(3.0D, exec("def l = new ArrayList(); l.add(1.0); l.add(2.0); " + "def doubleStream = l.stream().mapToDouble(Double::doubleValue);" + "def stats = doubleStream.collect(DoubleSummaryStatistics::new, " + "DoubleSummaryStatistics::accept, " + "DoubleSummaryStatistics::combine); " + "return stats.getSum()")); } public void testCtorWithParams() { assertArrayEquals(new Object[] { "foo", "bar" }, (Object[]) exec("List l = new ArrayList(); l.add('foo'); l.add('bar'); " + "Stream stream = l.stream().map(StringBuilder::new);" + "return stream.map(Object::toString).toArray()")); } public void testArrayCtorMethodRef() { assertEquals(1.0D, exec("List l = new ArrayList(); l.add(1.0); l.add(2.0); " + "def[] array = l.stream().toArray(Double[]::new);" + "return array[0];")); } public void testArrayCtorMethodRefDef() { assertEquals(1.0D, exec("def l = new ArrayList(); l.add(1.0); l.add(2.0); " + "def[] array = l.stream().toArray(Double[]::new);" + "return array[0];")); } public void testCapturingMethodReference() { assertEquals("5", exec("Integer x = Integer.valueOf(5); return Optional.empty().orElseGet(x::toString);")); assertEquals("[]", exec("List l = new ArrayList(); return Optional.empty().orElseGet(l::toString);")); } public void testCapturingMethodReferenceDefImpl() { assertEquals("5", exec("def x = Integer.valueOf(5); return Optional.empty().orElseGet(x::toString);")); assertEquals("[]", exec("def l = new ArrayList(); return Optional.empty().orElseGet(l::toString);")); } public void testCapturingMethodReferenceDefInterface() { assertEquals("5", exec("Integer x = Integer.valueOf(5); def opt = Optional.empty(); return opt.orElseGet(x::toString);")); assertEquals("[]", exec("List l = new ArrayList(); def opt = Optional.empty(); return opt.orElseGet(l::toString);")); } public void testCapturingMethodReferenceDefEverywhere() { assertEquals("5", exec("def x = Integer.valueOf(5); def opt = Optional.empty(); return opt.orElseGet(x::toString);")); assertEquals("[]", exec("def l = new ArrayList(); def opt = Optional.empty(); return opt.orElseGet(l::toString);")); } public void testCapturingMethodReferenceMultipleLambdas() { assertEquals("testingcdefg", exec( "String x = 'testing';" + "String y = 'abcdefg';" + "org.elasticsearch.painless.FeatureTestObject test = new org.elasticsearch.painless.FeatureTestObject(2,3);" + "return test.twoFunctionsOfX(x::concat, y::substring);")); } public void testCapturingMethodReferenceMultipleLambdasDefImpls() { assertEquals("testingcdefg", exec( "def x = 'testing';" + "def y = 'abcdefg';" + "org.elasticsearch.painless.FeatureTestObject test = new org.elasticsearch.painless.FeatureTestObject(2,3);" + "return test.twoFunctionsOfX(x::concat, y::substring);")); } public void testCapturingMethodReferenceMultipleLambdasDefInterface() { assertEquals("testingcdefg", exec( "String x = 'testing';" + "String y = 'abcdefg';" + "def test = new org.elasticsearch.painless.FeatureTestObject(2,3);" + "return test.twoFunctionsOfX(x::concat, y::substring);")); } public void testCapturingMethodReferenceMultipleLambdasDefEverywhere() { assertEquals("testingcdefg", exec( "def x = 'testing';" + "def y = 'abcdefg';" + "def test = new org.elasticsearch.painless.FeatureTestObject(2,3);" + "return test.twoFunctionsOfX(x::concat, y::substring);")); } public void testOwnMethodReference() { assertEquals(2, exec("int mycompare(int i, int j) { j - i } " + "List l = new ArrayList(); l.add(2); l.add(1); l.sort(this::mycompare); return l.get(0);")); } public void testOwnMethodReferenceDef() { assertEquals(2, exec("int mycompare(int i, int j) { j - i } " + "def l = new ArrayList(); l.add(2); l.add(1); l.sort(this::mycompare); return l.get(0);")); } public void testInterfaceDefaultMethod() { assertEquals("bar", exec("String f(BiFunction function) { function.apply('foo', 'bar') }" + "Map map = new HashMap(); f(map::getOrDefault)")); } public void testInterfaceDefaultMethodDef() { assertEquals("bar", exec("String f(BiFunction function) { function.apply('foo', 'bar') }" + "def map = new HashMap(); f(map::getOrDefault)")); } public void testInterfaceStaticMethod() { assertEquals(-1, exec("Supplier get(Supplier supplier) { return supplier }" + "Supplier s = get(Comparator::naturalOrder); s.get().compare(1, 2)")); } public void testMethodMissing() { Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = [2, 1]; l.sort(Integer::bogus); return l.get(0);"); }); assertThat(e.getMessage(), containsString("function reference [Integer::bogus/2] matching [java.util.Comparator")); } public void testQualifiedMethodMissing() { Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = [2, 1]; l.sort(java.time.Instant::bogus); return l.get(0);", false); }); assertThat(e.getMessage(), containsString("function reference [java.time.Instant::bogus/2] matching [java.util.Comparator, compare/2")); } public void testClassMissing() { Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = [2, 1]; l.sort(Bogus::bogus); return l.get(0);", false); }); assertThat(e.getMessage(), endsWith("variable [Bogus] is not defined")); } public void testQualifiedClassMissing() { Exception e = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = [2, 1]; l.sort(org.joda.time.BogusDateTime::bogus); return l.get(0);", false); }); assertEquals("variable [org.joda.time.BogusDateTime] is not defined", e.getMessage()); } public void testNotFunctionalInterface() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.add(Integer::bogus); return l.get(0);"); }); assertThat(expected.getMessage(), containsString("cannot convert function reference [Integer::bogus] to a non-functional interface [def]")); } public void testIncompatible() { expectScriptThrows(ClassCastException.class, () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::startsWith); return l.get(0);"); }); } public void testWrongArity() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("Optional.empty().orElseGet(String::startsWith);"); }); assertThat(expected.getMessage(), containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier")); } public void testWrongArityNotEnough() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("List l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);"); }); assertThat(expected.getMessage(), containsString( "function reference [String::isEmpty/2] matching [java.util.Comparator")); } public void testWrongArityDef() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def y = Optional.empty(); return y.orElseGet(String::startsWith);"); }); assertThat(expected.getMessage(), containsString("function reference [String::startsWith/0] matching [java.util.function.Supplier")); } public void testWrongArityNotEnoughDef() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { exec("def l = new ArrayList(); l.add(2); l.add(1); l.sort(String::isEmpty);"); }); assertThat(expected.getMessage(), containsString("function reference [String::isEmpty/2] matching [java.util.Comparator")); } public void testReturnVoid() { Throwable expected = expectScriptThrows(ClassCastException.class, () -> { exec("StringBuilder b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(b::setLength).sum();"); }); assertThat(expected.getMessage(), containsString("Cannot cast from [void] to [long].")); } public void testReturnVoidDef() { Exception expected = expectScriptThrows(LambdaConversionException.class, () -> { exec("StringBuilder b = new StringBuilder(); def l = [1, 2]; l.stream().mapToLong(b::setLength);"); }); assertThat(expected.getMessage(), containsString("lambda expects return type [long], but found return type [void]")); expected = expectScriptThrows(LambdaConversionException.class, () -> { exec("def b = new StringBuilder(); def l = [1, 2]; l.stream().mapToLong(b::setLength);"); }); assertThat(expected.getMessage(), containsString("lambda expects return type [long], but found return type [void]")); expected = expectScriptThrows(LambdaConversionException.class, () -> { exec("def b = new StringBuilder(); List l = [1, 2]; l.stream().mapToLong(b::setLength);"); }); assertThat(expected.getMessage(), containsString("lambda expects return type [long], but found return type [void]")); } public void testPrimitiveMethodReferences() { assertEquals(true, exec("boolean test(Function s) {return s.apply(Boolean.valueOf(true));} return test(boolean::booleanValue);")); assertEquals(true, exec("boolean test(Supplier s) {return s.get();} boolean b = true; return test(b::booleanValue);")); assertEquals((byte)1, exec("byte test(Function s) {return s.apply(Byte.valueOf(1));} return test(byte::byteValue);")); assertEquals((byte)1, exec("byte test(Supplier s) {return s.get();} byte b = 1; return test(b::byteValue);")); assertEquals((short)1, exec("short test(Function s) {return s.apply(Short.valueOf(1));} return test(short::shortValue);")); assertEquals((short)1, exec("short test(Supplier s) {return s.get();} short s = 1; return test(s::shortValue);")); assertEquals((char)1, exec("char test(Function s) {return s.apply(Character.valueOf(1));} return test(char::charValue);")); assertEquals((char)1, exec("char test(Supplier s) {return s.get();} char c = 1; return test(c::charValue);")); assertEquals(1, exec("int test(Function s) {return s.apply(Integer.valueOf(1));} return test(int::intValue);")); assertEquals(1, exec("int test(Supplier s) {return s.get();} int i = 1; return test(i::intValue);")); assertEquals((long)1, exec("long test(Function s) {return s.apply(Long.valueOf(1));} return test(long::longValue);")); assertEquals((long)1, exec("long test(Supplier s) {return s.get();} long l = 1; return test(l::longValue);")); assertEquals((float)1, exec("float test(Function s) {return s.apply(Short.valueOf(1));} return test(float::floatValue);")); assertEquals((float)1, exec("float test(Supplier s) {return s.get();} float f = 1; return test(f::floatValue);")); assertEquals((double)1, exec("double test(Function s) {return s.apply(Double.valueOf(1));} return test(double::doubleValue);")); assertEquals((double)1, exec("double test(Supplier s) {return s.get();} double d = 1; return test(d::doubleValue);")); } public void testObjectMethodOverride() { assertEquals("s", exec("CharSequence test(Supplier s) {return s.get();} CharSequence s = 's'; return test(s::toString);")); assertEquals("s", exec("CharSequence test(Supplier s) {return s.get();} def s = 's'; return test(s::toString);")); assertEquals("s", exec("CharSequence test(Function s) {return s.apply('s');} return test(CharSequence::toString);")); } public void testInvalidStaticCaptureMethodReference() { IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> exec("int test(Function f, String s) {return f.apply(s);} Integer i = Integer.valueOf(1); test(i::parseInt, '1')") ); assertThat(expected.getMessage(), containsString("cannot use a static method as a function reference")); } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.qpid.jms.provider.amqp.message; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.UUID; import org.apache.qpid.jms.exceptions.IdConversionException; import org.apache.qpid.jms.test.QpidJmsTestCase; import org.apache.qpid.proton.amqp.Binary; import org.apache.qpid.proton.amqp.UnsignedLong; import org.junit.Test; public class AmqpMessageIdHelperTest extends QpidJmsTestCase { /** * Test that {@link AmqpMessageIdHelper#hasMessageIdPrefix(String)} returns true for strings that begin "ID:" */ @Test public void testHasIdPrefixWithPrefix() { String myId = "ID:something"; assertTrue("'ID:' prefix should have been identified", AmqpMessageIdHelper.hasMessageIdPrefix(myId)); } /** * Test that {@link AmqpMessageIdHelper#hasMessageIdPrefix(String)} returns false for string beings "ID" without colon. */ @Test public void testHasIdPrefixWithIDButNoColonPrefix() { String myIdNoColon = "IDsomething"; assertFalse("'ID' prefix should not have been identified without trailing colon", AmqpMessageIdHelper.hasMessageIdPrefix(myIdNoColon)); } /** * Test that {@link AmqpMessageIdHelper#hasMessageIdPrefix(String)} returns false for null */ @Test public void testHasIdPrefixWithNull() { String nullString = null; assertFalse("null string should not result in identification as having the prefix", AmqpMessageIdHelper.hasMessageIdPrefix(nullString)); } /** * Test that {@link AmqpMessageIdHelper#hasMessageIdPrefix(String)} returns false for strings that doesnt have "ID:" anywhere */ @Test public void testHasIdPrefixWithoutPrefix() { String myNonId = "something"; assertFalse("string without 'ID:' anywhere should not have been identified as having the prefix", AmqpMessageIdHelper.hasMessageIdPrefix(myNonId)); } /** * Test that {@link AmqpMessageIdHelper#hasMessageIdPrefix(String)} returns false for strings has lowercase "id:" prefix */ @Test public void testHasIdPrefixWithLowercaseID() { String myLowerCaseNonId = "id:something"; assertFalse("lowercase 'id:' prefix should not result in identification as having 'ID:' prefix", AmqpMessageIdHelper.hasMessageIdPrefix(myLowerCaseNonId)); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns null if given null */ @Test public void testToMessageIdStringWithNull() { assertNull("null string should have been returned", AmqpMessageIdHelper.toMessageIdString(null)); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} throws an IAE if given an unexpected object type. */ @Test public void testToMessageIdStringThrowsIAEWithUnexpectedType() { try { AmqpMessageIdHelper.toMessageIdString(new Object()); fail("expected exception not thrown"); } catch (IllegalArgumentException iae) { // expected } } private void doToMessageIdTestImpl(Object idObject, String expected) { String idString = AmqpMessageIdHelper.toMessageIdString(idObject); assertNotNull("null string should not have been returned", idString); assertEquals("expected id string was not returned", expected, idString); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns the given * basic "ID:content" string unchanged. */ @Test public void testToMessageIdStringWithString() { String stringId = "ID:myIdString"; doToMessageIdTestImpl(stringId, stringId); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns the given * basic string with the 'no prefix' prefix and "ID:" prefix. */ @Test public void testToMessageIdStringWithStringNoPrefix() { String stringId = "myIdStringNoPrefix"; String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_NO_PREFIX + stringId; doToMessageIdTestImpl(stringId, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating lack of "ID:" prefix, when the given string happens to begin with * the {@link AmqpMessageIdHelper#AMQP_UUID_PREFIX}. */ @Test public void testToMessageIdStringWithStringBeginningWithEncodingPrefixForUUID() { String uuidStringMessageId = AmqpMessageIdHelper.AMQP_UUID_PREFIX + UUID.randomUUID(); String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_NO_PREFIX + uuidStringMessageId; doToMessageIdTestImpl(uuidStringMessageId, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating lack of "ID:" prefix, when the given string happens to begin with * the {@link AmqpMessageIdHelper#AMQP_ULONG_PREFIX}. */ @Test public void testToMessageIdStringWithStringBeginningWithEncodingPrefixForLong() { String longStringMessageId = AmqpMessageIdHelper.AMQP_ULONG_PREFIX + Long.valueOf(123456789L); String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_NO_PREFIX + longStringMessageId; doToMessageIdTestImpl(longStringMessageId, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating lack of "ID:" prefix, when the given string happens to begin with * the {@link AmqpMessageIdHelper#AMQP_BINARY_PREFIX}. */ @Test public void testToMessageIdStringWithStringBeginningWithEncodingPrefixForBinary() { String binaryStringMessageId = AmqpMessageIdHelper.AMQP_BINARY_PREFIX + "0123456789ABCDEF"; String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_NO_PREFIX + binaryStringMessageId; doToMessageIdTestImpl(binaryStringMessageId, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating lack of "ID:" prefix, when the given string happens to begin with * the {@link AmqpMessageIdHelper#AMQP_STRING_PREFIX}. */ @Test public void testToMessageIdStringWithStringBeginningWithEncodingPrefixForString() { String stringMessageId = AmqpMessageIdHelper.AMQP_STRING_PREFIX + "myStringId"; String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_NO_PREFIX + stringMessageId; doToMessageIdTestImpl(stringMessageId, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating lack of "ID:" prefix, effectively twice, when the given string happens to * begin with the {@link AmqpMessageIdHelper#AMQP_NO_PREFIX}. */ @Test public void testToMessageIdStringWithStringBeginningWithEncodingPrefixForNoIdPrefix() { String stringMessageId = AmqpMessageIdHelper.AMQP_NO_PREFIX + "myStringId"; String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_NO_PREFIX + stringMessageId; doToMessageIdTestImpl(stringMessageId, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating an AMQP encoded UUID when given a UUID object. */ @Test public void testToMessageIdStringWithUUID() { UUID uuidMessageId = UUID.randomUUID(); String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_UUID_PREFIX + uuidMessageId.toString(); doToMessageIdTestImpl(uuidMessageId, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating an AMQP encoded ulong when given a UnsignedLong object. */ @Test public void testToMessageIdStringWithUnsignedLong() { UnsignedLong uLongMessageId = UnsignedLong.valueOf(123456789L); String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_ULONG_PREFIX + uLongMessageId.toString(); doToMessageIdTestImpl(uLongMessageId, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating an AMQP encoded binary when given a Binary object. */ @Test public void testToMessageIdStringWithBinary() { byte[] bytes = new byte[] { (byte) 0x00, (byte) 0xAB, (byte) 0x09, (byte) 0xFF }; Binary binary = new Binary(bytes); String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_BINARY_PREFIX + "00AB09FF"; doToMessageIdTestImpl(binary, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating an escaped string, when given an input string that already has * the "ID:" prefix, but follows it with an encoding prefix, in this case * the {@link AmqpMessageIdHelper#AMQP_STRING_PREFIX}. */ @Test public void testToMessageIdStringWithStringBeginningWithIdAndEncodingPrefixForString() { String unescapedStringPrefixMessageId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + "id-content"; String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + unescapedStringPrefixMessageId; doToMessageIdTestImpl(unescapedStringPrefixMessageId, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating an escaped string, when given an input string that already has * the "ID:" prefix, but follows it with an encoding prefix, in this case * the {@link AmqpMessageIdHelper#AMQP_UUID_PREFIX}. */ @Test public void testToMessageIdStringWithStringBeginningWithIdAndEncodingPrefixForUUID() { String unescapedUuidPrefixMessageId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_UUID_PREFIX + UUID.randomUUID(); String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + unescapedUuidPrefixMessageId; doToMessageIdTestImpl(unescapedUuidPrefixMessageId, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating an escaped string, when given an input string that already has * the "ID:" prefix, but follows it with an encoding prefix, in this case * the {@link AmqpMessageIdHelper#AMQP_ULONG_PREFIX}. */ @Test public void testToMessageIdStringWithStringBeginningWithIdAndEncodingPrefixForUlong() { String unescapedUlongPrefixMessageId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_ULONG_PREFIX + "42"; String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + unescapedUlongPrefixMessageId; doToMessageIdTestImpl(unescapedUlongPrefixMessageId, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating an escaped string, when given an input string that already has * the "ID:" prefix, but follows it with an encoding prefix, in this case * the {@link AmqpMessageIdHelper#AMQP_BINARY_PREFIX}. */ @Test public void testToMessageIdStringWithStringBeginningWithIdAndEncodingPrefixForBinary() { String unescapedBinaryPrefixMessageId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_BINARY_PREFIX + "ABCDEF"; String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + unescapedBinaryPrefixMessageId; doToMessageIdTestImpl(unescapedBinaryPrefixMessageId, expected); } /** * Test that {@link AmqpMessageIdHelper#toMessageIdString(Object)} returns a string * indicating an escaped string, when given an input string that already has * the "ID:" prefix, but follows it with an encoding prefix, in this case * the {@link AmqpMessageIdHelper#AMQP_NO_PREFIX}. */ @Test public void testToMessageIdStringWithStringBeginningWithIdAndEncodingPrefixForNoIDPrefix() { String unescapedNoPrefixPrefixedMessageId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_NO_PREFIX + "id-content"; String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + unescapedNoPrefixPrefixedMessageId; doToMessageIdTestImpl(unescapedNoPrefixPrefixedMessageId, expected); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns null if given null */ @Test public void testToCorrelationIdStringWithNull() { assertNull("null string should have been returned", AmqpMessageIdHelper.toCorrelationIdString(null)); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} throws an IAE if given an unexpected object type. */ @Test public void testToCorrelationIdStringThrowsIAEWithUnexpectedType() { try { AmqpMessageIdHelper.toCorrelationIdString(new Object()); fail("expected exception not thrown"); } catch (IllegalArgumentException iae) { // expected } } private void doToCorrelationIDTestImpl(Object idObject, String expected) { String idString = AmqpMessageIdHelper.toCorrelationIdString(idObject); assertNotNull("null string should not have been returned", idString); assertEquals("expected id string was not returned", expected, idString); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns the given * basic string unchanged when it has the "ID:" prefix (but no others). */ @Test public void testToCorrelationIdStringWithString() { String stringId = "ID:myCorrelationIdString"; doToCorrelationIDTestImpl(stringId, stringId); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns the given * basic string unchanged when it lacks the "ID:" prefix (and any others) */ @Test public void testToCorrelationIdStringWithStringNoPrefix() { String stringNoId = "myCorrelationIdString"; doToCorrelationIDTestImpl(stringNoId, stringNoId); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * unchanged when it lacks the "ID:" prefix but happens to already begin with * the {@link AmqpMessageIdHelper#AMQP_UUID_PREFIX}. */ @Test public void testToCorrelationIdStringWithStringBeginningWithEncodingPrefixForUUID() { String uuidPrefixStringCorrelationId = AmqpMessageIdHelper.AMQP_UUID_PREFIX + UUID.randomUUID(); doToCorrelationIDTestImpl(uuidPrefixStringCorrelationId, uuidPrefixStringCorrelationId); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * unchanged when it lacks the "ID:" prefix but happens to already begin with * the {@link AmqpMessageIdHelper#AMQP_ULONG_PREFIX}. */ @Test public void testToCorrelationIdStringWithStringBeginningWithEncodingPrefixForLong() { String ulongPrefixStringCorrelationId = AmqpMessageIdHelper.AMQP_ULONG_PREFIX + Long.valueOf(123456789L); doToCorrelationIDTestImpl(ulongPrefixStringCorrelationId, ulongPrefixStringCorrelationId); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * unchanged when it lacks the "ID:" prefix but happens to already begin with * the {@link AmqpMessageIdHelper#AMQP_BINARY_PREFIX}. */ @Test public void testToCorrelationIdStringWithStringBeginningWithEncodingPrefixForBinary() { String binaryPrefixStringCorrelationId = AmqpMessageIdHelper.AMQP_BINARY_PREFIX + "0123456789ABCDEF"; doToCorrelationIDTestImpl(binaryPrefixStringCorrelationId, binaryPrefixStringCorrelationId); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * unchanged when it lacks the "ID:" prefix but happens to already begin with * the {@link AmqpMessageIdHelper#AMQP_STRING_PREFIX}. */ @Test public void testToCorrelationIdStringWithStringBeginningWithEncodingPrefixForString() { String stringPrefixCorrelationId = AmqpMessageIdHelper.AMQP_STRING_PREFIX + "myStringId"; doToCorrelationIDTestImpl(stringPrefixCorrelationId, stringPrefixCorrelationId); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * unchanged when it lacks the "ID:" prefix but happens to already begin with * the {@link AmqpMessageIdHelper#AMQP_NO_PREFIX}. */ @Test public void testToCorrelationIdStringWithStringBeginningWithEncodingPrefixForNoIdPrefix() { String noPrefixStringCorrelationId = AmqpMessageIdHelper.AMQP_NO_PREFIX + "myStringId"; doToCorrelationIDTestImpl(noPrefixStringCorrelationId, noPrefixStringCorrelationId); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * indicating an AMQP encoded UUID when given a UUID object. */ @Test public void testToCorrelationIdStringWithUUID() { UUID uuidCorrelationId = UUID.randomUUID(); String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_UUID_PREFIX + uuidCorrelationId.toString(); doToCorrelationIDTestImpl(uuidCorrelationId, expected); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * indicating an AMQP encoded ulong when given a UnsignedLong object. */ @Test public void testToCorrelationIdStringWithUnsignedLong() { UnsignedLong uLongCorrelationId = UnsignedLong.valueOf(123456789L); String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_ULONG_PREFIX + uLongCorrelationId.toString(); doToCorrelationIDTestImpl(uLongCorrelationId, expected); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * indicating an AMQP encoded binary when given a Binary object. */ @Test public void testToCorrelationIdStringWithBinary() { byte[] bytes = new byte[] { (byte) 0x00, (byte) 0xAB, (byte) 0x09, (byte) 0xFF }; Binary binary = new Binary(bytes); String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_BINARY_PREFIX + "00AB09FF"; doToCorrelationIDTestImpl(binary, expected); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * indicating an escaped string, when given an input string that already has * the "ID:" prefix, but follows it with an encoding prefix, in this case * the {@link AmqpMessageIdHelper#AMQP_STRING_PREFIX}. */ @Test public void testToCorrelationIdStringWithStringBeginningWithIdAndEncodingPrefixForString() { String unescapedStringPrefixCorrelationId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + "id-content"; String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + unescapedStringPrefixCorrelationId; doToCorrelationIDTestImpl(unescapedStringPrefixCorrelationId, expected); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * indicating an escaped string, when given an input string that already has * the "ID:" prefix, but follows it with an encoding prefix, in this case * the {@link AmqpMessageIdHelper#AMQP_UUID_PREFIX}. */ @Test public void testToCorrelationIdStringWithStringBeginningWithIdAndEncodingPrefixForUUID() { String unescapedUuidPrefixCorrelationId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_UUID_PREFIX + UUID.randomUUID(); String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + unescapedUuidPrefixCorrelationId; doToCorrelationIDTestImpl(unescapedUuidPrefixCorrelationId, expected); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * indicating an escaped string, when given an input string that already has * the "ID:" prefix, but follows it with an encoding prefix, in this case * the {@link AmqpMessageIdHelper#AMQP_ULONG_PREFIX}. */ @Test public void testToCorrelationIdStringWithStringBeginningWithIdAndEncodingPrefixForUlong() { String unescapedUlongPrefixCorrelationId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_ULONG_PREFIX + "42"; String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + unescapedUlongPrefixCorrelationId; doToCorrelationIDTestImpl(unescapedUlongPrefixCorrelationId, expected); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * indicating an escaped string, when given an input string that already has * the "ID:" prefix, but follows it with an encoding prefix, in this case * the {@link AmqpMessageIdHelper#AMQP_BINARY_PREFIX}. */ @Test public void testToCorrelationIdStringWithStringBeginningWithIdAndEncodingPrefixForBinary() { String unescapedBinaryPrefixCorrelationId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_BINARY_PREFIX + "ABCDEF"; String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + unescapedBinaryPrefixCorrelationId; doToCorrelationIDTestImpl(unescapedBinaryPrefixCorrelationId, expected); } /** * Test that {@link AmqpMessageIdHelper#toCorrelationIdString(Object)} returns a string * indicating an escaped string, when given an input string that already has * the "ID:" prefix, but follows it with an encoding prefix, in this case * the {@link AmqpMessageIdHelper#AMQP_NO_PREFIX}. */ @Test public void testToCorrelationIdStringWithStringBeginningWithIdAndEncodingPrefixForNoIDPrefix() { String unescapedNoPrefixCorrelationId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_NO_PREFIX + "id-content"; String expected = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + unescapedNoPrefixCorrelationId; doToCorrelationIDTestImpl(unescapedNoPrefixCorrelationId, expected); } private void doToIdObjectTestImpl(String idString, Object expected) throws IdConversionException { Object idObject = AmqpMessageIdHelper.toIdObject(idString); assertNotNull("null object should not have been returned", idObject); assertEquals("expected id object was not returned", expected, idObject); } /** * Test that {@link AmqpMessageIdHelper#toIdObject(String)} returns an * UnsignedLong when given a string indicating an encoded AMQP ulong id. * * @throws Exception if an error occurs during the test. */ @Test public void testToIdObjectWithEncodedUlong() throws Exception { UnsignedLong longId = UnsignedLong.valueOf(123456789L); String provided = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_ULONG_PREFIX + "123456789"; doToIdObjectTestImpl(provided, longId); } /** * Test that {@link AmqpMessageIdHelper#toIdObject(String)} returns a Binary * when given a string indicating an encoded AMQP binary id, using upper case hex characters * * @throws Exception if an error occurs during the test. */ @Test public void testToIdObjectWithEncodedBinaryUppercaseHexString() throws Exception { byte[] bytes = new byte[] { (byte) 0x00, (byte) 0xAB, (byte) 0x09, (byte) 0xFF }; Binary binaryId = new Binary(bytes); String provided = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_BINARY_PREFIX + "00AB09FF"; doToIdObjectTestImpl(provided, binaryId); } /** * Test that {@link AmqpMessageIdHelper#toIdObject(String)} returns null * when given null. * * @throws Exception if an error occurs during the test. */ @Test public void testToIdObjectWithNull() throws Exception { assertNull("null object should have been returned", AmqpMessageIdHelper.toIdObject(null)); } /** * Test that {@link AmqpMessageIdHelper#toIdObject(String)} returns a Binary * when given a string indicating an encoded AMQP binary id, using lower case hex characters. * * @throws Exception if an error occurs during the test. */ @Test public void testToIdObjectWithEncodedBinaryLowercaseHexString() throws Exception { byte[] bytes = new byte[] { (byte) 0x00, (byte) 0xAB, (byte) 0x09, (byte) 0xFF }; Binary binaryId = new Binary(bytes); String provided = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_BINARY_PREFIX + "00ab09ff"; doToIdObjectTestImpl(provided, binaryId); } /** * Test that {@link AmqpMessageIdHelper#toIdObject(String)} returns a UUID * when given a string indicating an encoded AMQP uuid id. * * @throws Exception if an error occurs during the test. */ @Test public void testToIdObjectWithEncodedUuid() throws Exception { UUID uuid = UUID.randomUUID(); String provided = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_UUID_PREFIX + uuid.toString(); doToIdObjectTestImpl(provided, uuid); } /** * Test that {@link AmqpMessageIdHelper#toIdObject(String)} returns a string * unchanged when given a string without any prefix. * * @throws Exception if an error occurs during the test. */ @Test public void testToIdObjectWithAppSpecificString() throws Exception { String stringId = "myStringId"; doToIdObjectTestImpl(stringId, stringId); } /** * Test that {@link AmqpMessageIdHelper#toIdObject(String)} returns a string * unchanged when given a string with only the 'ID:' prefix. * * @throws Exception if an error occurs during the test. */ @Test public void testToIdObjectWithSimplIdString() throws Exception { String stringId = "ID:myStringId"; doToIdObjectTestImpl(stringId, stringId); } /** * Test that {@link AmqpMessageIdHelper#toIdObject(String)} returns the remainder of the * provided string after removing the 'ID:' and {@link AmqpMessageIdHelper#AMQP_NO_PREFIX} * prefix used to indicate it originally had no 'ID:' prefix [when arriving as a message id]. * * @throws Exception if an error occurs during the test. */ @Test public void testToIdObjectWithStringContainingEncodingPrefixForNoIdPrefix() throws Exception { String suffix = "myStringSuffix"; String stringId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_NO_PREFIX + suffix; doToIdObjectTestImpl(stringId, suffix); } /** * Test that {@link AmqpMessageIdHelper#toIdObject(String)} returns the remainder of the * provided string after removing the {@link AmqpMessageIdHelper#AMQP_STRING_PREFIX} prefix. * * @throws Exception if an error occurs during the test. */ @Test public void testToIdObjectWithStringContainingIdStringEncodingPrefix() throws Exception { String suffix = "myStringSuffix"; String stringId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + suffix; doToIdObjectTestImpl(stringId, suffix); } /** * Test that when given a string with with the {@link AmqpMessageIdHelper#AMQP_STRING_PREFIX} prefix * and then additionally the {@link AmqpMessageIdHelper#AMQP_UUID_PREFIX}, the * {@link AmqpMessageIdHelper#toIdObject(String)} method returns the remainder of the provided string * after removing the {@link AmqpMessageIdHelper#AMQP_STRING_PREFIX} prefix. * * @throws Exception if an error occurs during the test. */ @Test public void testToIdObjectWithStringContainingIdStringEncodingPrefixAndThenUuidPrefix() throws Exception { String encodedUuidString = AmqpMessageIdHelper.AMQP_UUID_PREFIX + UUID.randomUUID().toString(); String stringId = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_STRING_PREFIX + encodedUuidString; doToIdObjectTestImpl(stringId, encodedUuidString); } /** * Test that {@link AmqpMessageIdHelper#toIdObject(String)} throws an * {@link IdConversionException} when presented with an encoded binary hex string * of uneven length (after the prefix) that thus can't be converted due to each * byte using 2 characters */ @Test public void testToIdObjectWithStringContainingBinaryHexThrowsICEWithUnevenLengthString() { String unevenHead = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_BINARY_PREFIX + "123"; try { AmqpMessageIdHelper.toIdObject(unevenHead); fail("expected exception was not thrown"); } catch (IdConversionException iae) { // expected String msg = iae.getCause().getMessage(); assertTrue("Message was not as expected: " + msg, msg.contains("even length")); } } /** * Test that {@link AmqpMessageIdHelper#toIdObject(String)} throws an * {@link IdConversionException} when presented with an encoded binary hex * string (after the prefix) that contains characters other than 0-9 * and A-F and a-f, and thus can't be converted */ @Test public void testToIdObjectWithStringContainingBinaryHexThrowsICEWithNonHexCharacters() { // char before '0' char nonHexChar = '/'; String nonHexString = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_BINARY_PREFIX + nonHexChar + nonHexChar; try { AmqpMessageIdHelper.toIdObject(nonHexString); fail("expected exception was not thrown"); } catch (IdConversionException ice) { // expected String msg = ice.getCause().getMessage(); assertTrue("Message was not as expected: " + msg, msg.contains("non-hex")); } // char after '9', before 'A' nonHexChar = ':'; nonHexString = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_BINARY_PREFIX + nonHexChar + nonHexChar; try { AmqpMessageIdHelper.toIdObject(nonHexString); fail("expected exception was not thrown"); } catch (IdConversionException ice) { // expected String msg = ice.getCause().getMessage(); assertTrue("Message was not as expected: " + msg, msg.contains("non-hex")); } // char after 'F', before 'a' nonHexChar = 'G'; nonHexString = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_BINARY_PREFIX + nonHexChar + nonHexChar; try { AmqpMessageIdHelper.toIdObject(nonHexString); fail("expected exception was not thrown"); } catch (IdConversionException ice) { // expected String msg = ice.getCause().getMessage(); assertTrue("Message was not as expected: " + msg, msg.contains("non-hex")); } // char after 'f' nonHexChar = 'g'; nonHexString = AmqpMessageIdHelper.JMS_ID_PREFIX + AmqpMessageIdHelper.AMQP_BINARY_PREFIX + nonHexChar + nonHexChar; try { AmqpMessageIdHelper.toIdObject(nonHexString); fail("expected exception was not thrown"); } catch (IdConversionException ice) { // expected String msg = ice.getCause().getMessage(); assertTrue("Message was not as expected: " + msg, msg.contains("non-hex")); } } }
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.content.browser; import android.content.Context; import android.os.Bundle; import android.os.IBinder; import android.os.ParcelFileDescriptor; import org.chromium.base.ContextUtils; import org.chromium.base.CpuFeatures; import org.chromium.base.Log; import org.chromium.base.VisibleForTesting; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; import org.chromium.base.library_loader.Linker; import org.chromium.base.process_launcher.ChildProcessCreationParams; import org.chromium.base.process_launcher.FileDescriptorInfo; import org.chromium.content.app.ChromiumLinkerParams; import org.chromium.content.common.ContentSwitches; import java.io.IOException; /** * This is the java counterpart to ChildProcessLauncherHelper. It is owned by native side and * has an explicit destroy method. * Each public or jni methods should have explicit documentation on what threads they are called. */ @JNINamespace("content::internal") public class ChildProcessLauncherHelper { private static final String TAG = "ChildProcLH"; // Represents an invalid process handle; same as base/process/process.h kNullProcessHandle. private static final int NULL_PROCESS_HANDLE = 0; // The IBinder provided to the created service. private final IBinder mIBinderCallback; // Note native pointer is only guaranteed live until nativeOnChildProcessStarted. private long mNativeChildProcessLauncherHelper; // The actual service connection. Set once we have connected to the service. private ChildProcessConnection mChildProcessConnection; @CalledByNative private static FileDescriptorInfo makeFdInfo( int id, int fd, boolean autoClose, long offset, long size) { assert LauncherThread.runningOnLauncherThread(); ParcelFileDescriptor pFd; if (autoClose) { // Adopt the FD, it will be closed when we close the ParcelFileDescriptor. pFd = ParcelFileDescriptor.adoptFd(fd); } else { try { pFd = ParcelFileDescriptor.fromFd(fd); } catch (IOException e) { Log.e(TAG, "Invalid FD provided for process connection, aborting connection.", e); return null; } } return new FileDescriptorInfo(id, pFd, offset, size); } @VisibleForTesting @CalledByNative public static ChildProcessLauncherHelper createAndStart(long nativePointer, int paramId, final String[] commandLine, FileDescriptorInfo[] filesToBeMapped) { assert LauncherThread.runningOnLauncherThread(); String processType = ContentSwitches.getSwitchValue(commandLine, ContentSwitches.SWITCH_PROCESS_TYPE); ChildProcessCreationParams params = ChildProcessCreationParams.get(paramId); if (paramId != ChildProcessCreationParams.DEFAULT_ID && params == null) { throw new RuntimeException("CreationParams id " + paramId + " not found"); } Context context = ContextUtils.getApplicationContext(); boolean sandboxed = true; boolean alwaysInForeground = false; if (!ContentSwitches.SWITCH_RENDERER_PROCESS.equals(processType)) { if (ContentSwitches.SWITCH_GPU_PROCESS.equals(processType)) { sandboxed = false; alwaysInForeground = true; } else { // We only support sandboxed utility processes now. assert ContentSwitches.SWITCH_UTILITY_PROCESS.equals(processType); } } ChildProcessLauncherHelper process_launcher = new ChildProcessLauncherHelper(nativePointer, processType); process_launcher.start( context, commandLine, filesToBeMapped, params, sandboxed, alwaysInForeground); return process_launcher; } private ChildProcessLauncherHelper(long nativePointer, String processType) { assert LauncherThread.runningOnLauncherThread(); mNativeChildProcessLauncherHelper = nativePointer; mIBinderCallback = ContentSwitches.SWITCH_GPU_PROCESS.equals(processType) ? new GpuProcessCallback() : null; initLinker(); } private void start(Context context, String[] commandLine, final FileDescriptorInfo[] filesToBeMapped, ChildProcessCreationParams params, boolean sandboxed, boolean alwaysInForeground) { boolean bindToCallerCheck = params == null ? false : params.getBindToCallerCheck(); Bundle serviceBundle = createServiceBundle(bindToCallerCheck); onBeforeConnectionAllocated(serviceBundle); Bundle connectionBundle = createConnectionBundle(commandLine, filesToBeMapped); ChildProcessLauncher.start(context, serviceBundle, connectionBundle, new ChildProcessLauncher.LaunchCallback() { @Override public void onChildProcessStarted(ChildProcessConnection connection) { mChildProcessConnection = connection; // Proactively close the FDs rather than waiting for the GC to do it. try { for (FileDescriptorInfo fileInfo : filesToBeMapped) { fileInfo.fd.close(); } } catch (IOException ioe) { Log.w(TAG, "Failed to close FD.", ioe); } if (mNativeChildProcessLauncherHelper != 0) { nativeOnChildProcessStarted( mNativeChildProcessLauncherHelper, getPid()); } mNativeChildProcessLauncherHelper = 0; } }, getIBinderCallback(), sandboxed, alwaysInForeground, params); } private int getPid() { return mChildProcessConnection == null ? NULL_PROCESS_HANDLE : mChildProcessConnection.getPid(); } // Called on client (UI or IO) thread. @CalledByNative private boolean isOomProtected() { // mChildProcessConnection is set on a different thread but does not change once it's been // set. So it is safe to test whether it's null from a different thread. if (mChildProcessConnection == null) { return false; } // We consider the process to be child protected if it has a strong or moderate binding and // the app is in the foreground. return ChildProcessLauncher.isApplicationInForeground() && !mChildProcessConnection.isWaivedBoundOnlyOrWasWhenDied(); } @CalledByNative private void setInForeground(int pid, boolean foreground, boolean boostForPendingViews) { assert LauncherThread.runningOnLauncherThread(); assert mChildProcessConnection != null; assert getPid() == pid; ChildProcessLauncher.getBindingManager().setPriority(pid, foreground, boostForPendingViews); } @CalledByNative private static void stop(int pid) { assert LauncherThread.runningOnLauncherThread(); ChildProcessLauncher.stop(pid); } // Called on UI thread. @CalledByNative private static int getNumberOfRendererSlots() { final ChildProcessCreationParams params = ChildProcessCreationParams.getDefault(); final Context context = ContextUtils.getApplicationContext(); final String packageName = ChildProcessLauncher.getPackageNameFromCreationParams( context, params, true /* inSandbox */); try { return ChildProcessLauncher.getNumberOfSandboxedServices(context, packageName); } catch (RuntimeException e) { // Unittest packages do not declare services. Some tests require a realistic number // to test child process policies, so pick a high-ish number here. return 65535; } } // Can be called on a number of threads, including launcher, and binder. private static native void nativeOnChildProcessStarted( long nativeChildProcessLauncherHelper, int pid); private static boolean sLinkerInitialized; private static long sLinkerLoadAddress; @VisibleForTesting static void initLinker() { assert LauncherThread.runningOnLauncherThread(); if (sLinkerInitialized) return; if (Linker.isUsed()) { sLinkerLoadAddress = Linker.getInstance().getBaseLoadAddress(); if (sLinkerLoadAddress == 0) { Log.i(TAG, "Shared RELRO support disabled!"); } } sLinkerInitialized = true; } private static ChromiumLinkerParams getLinkerParamsForNewConnection() { assert LauncherThread.runningOnLauncherThread(); assert sLinkerInitialized; if (sLinkerLoadAddress == 0) return null; // Always wait for the shared RELROs in service processes. final boolean waitForSharedRelros = true; if (Linker.areTestsEnabled()) { Linker linker = Linker.getInstance(); return new ChromiumLinkerParams(sLinkerLoadAddress, waitForSharedRelros, linker.getTestRunnerClassNameForTesting(), linker.getImplementationForTesting()); } else { return new ChromiumLinkerParams(sLinkerLoadAddress, waitForSharedRelros); } } /** * Creates the common bundle to be passed to child processes through the service binding intent. * If the service gets recreated by the framework the intent will be reused, so these parameters * should be common to all processes of that type. * * @param commandLine Command line params to be passed to the service. * @param linkerParams Linker params to start the service. */ // TODO(jcivelli): make private once warmup connection code is move from ChildProcessLauncher to // this class and remove initLinker call. static Bundle createServiceBundle(boolean bindToCallerCheck) { initLinker(); Bundle bundle = new Bundle(); bundle.putBoolean(ChildProcessConstants.EXTRA_BIND_TO_CALLER, bindToCallerCheck); bundle.putParcelable( ChildProcessConstants.EXTRA_LINKER_PARAMS, getLinkerParamsForNewConnection()); return bundle; } @VisibleForTesting public static Bundle createConnectionBundle( String[] commandLine, FileDescriptorInfo[] filesToBeMapped) { assert sLinkerInitialized; Bundle bundle = new Bundle(); bundle.putStringArray(ChildProcessConstants.EXTRA_COMMAND_LINE, commandLine); bundle.putParcelableArray(ChildProcessConstants.EXTRA_FILES, filesToBeMapped); // content specific parameters. bundle.putInt(ChildProcessConstants.EXTRA_CPU_COUNT, CpuFeatures.getCount()); bundle.putLong(ChildProcessConstants.EXTRA_CPU_FEATURES, CpuFeatures.getMask()); bundle.putBundle(Linker.EXTRA_LINKER_SHARED_RELROS, Linker.getInstance().getSharedRelros()); return bundle; } // Below are methods that will eventually be moved to a content delegate class. private void onBeforeConnectionAllocated(Bundle commonParameters) { // TODO(jcivelli): move createServiceBundle in there. } private IBinder getIBinderCallback() { return mIBinderCallback; } // Testing only related methods. @VisibleForTesting public static ChildProcessLauncherHelper createAndStartForTesting(long nativePointer, String[] commandLine, FileDescriptorInfo[] filesToBeMapped, ChildProcessCreationParams creationParams, boolean sandboxed, boolean alwaysInForeground) { String processType = ContentSwitches.getSwitchValue(commandLine, ContentSwitches.SWITCH_PROCESS_TYPE); ChildProcessLauncherHelper launcherHelper = new ChildProcessLauncherHelper(nativePointer, processType); launcherHelper.start(ContextUtils.getApplicationContext(), commandLine, filesToBeMapped, creationParams, sandboxed, alwaysInForeground); return launcherHelper; } @VisibleForTesting public ChildProcessConnection getChildProcessConnection() { return mChildProcessConnection; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.highlight; import org.apache.lucene.search.Query; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.core.TextFieldMapper; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.highlight.HighlightBuilder.Field; import org.elasticsearch.search.highlight.HighlightBuilder.Order; import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.function.BiConsumer; import java.util.function.Function; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; public class HighlightBuilderTests extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; private static NamedWriteableRegistry namedWriteableRegistry; private static IndicesQueriesRegistry indicesQueriesRegistry; /** * setup for the whole base test class */ @BeforeClass public static void init() { namedWriteableRegistry = new NamedWriteableRegistry(); indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).buildQueryParserRegistry(); } @AfterClass public static void afterClass() throws Exception { namedWriteableRegistry = null; indicesQueriesRegistry = null; } /** * Test serialization and deserialization of the highlighter builder */ public void testSerialization() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { HighlightBuilder original = randomHighlighterBuilder(); HighlightBuilder deserialized = serializedCopy(original); assertEquals(deserialized, original); assertEquals(deserialized.hashCode(), original.hashCode()); assertNotSame(deserialized, original); } } /** * Test equality and hashCode properties */ public void testEqualsAndHashcode() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { HighlightBuilder firstBuilder = randomHighlighterBuilder(); assertFalse("highlighter is equal to null", firstBuilder.equals(null)); assertFalse("highlighter is equal to incompatible type", firstBuilder.equals("")); assertTrue("highlighter is not equal to self", firstBuilder.equals(firstBuilder)); assertThat("same highlighter's hashcode returns different values if called multiple times", firstBuilder.hashCode(), equalTo(firstBuilder.hashCode())); assertThat("different highlighters should not be equal", mutate(firstBuilder), not(equalTo(firstBuilder))); HighlightBuilder secondBuilder = serializedCopy(firstBuilder); assertTrue("highlighter is not equal to self", secondBuilder.equals(secondBuilder)); assertTrue("highlighter is not equal to its copy", firstBuilder.equals(secondBuilder)); assertTrue("equals is not symmetric", secondBuilder.equals(firstBuilder)); assertThat("highlighter copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(firstBuilder.hashCode())); HighlightBuilder thirdBuilder = serializedCopy(secondBuilder); assertTrue("highlighter is not equal to self", thirdBuilder.equals(thirdBuilder)); assertTrue("highlighter is not equal to its copy", secondBuilder.equals(thirdBuilder)); assertThat("highlighter copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode())); assertTrue("equals is not transitive", firstBuilder.equals(thirdBuilder)); assertThat("highlighter copy's hashcode is different from original hashcode", firstBuilder.hashCode(), equalTo(thirdBuilder.hashCode())); assertTrue("equals is not symmetric", thirdBuilder.equals(secondBuilder)); assertTrue("equals is not symmetric", thirdBuilder.equals(firstBuilder)); } } /** * creates random highlighter, renders it to xContent and back to new instance that should be equal to original */ public void testFromXContent() throws IOException { QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { HighlightBuilder highlightBuilder = randomHighlighterBuilder(); XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { builder.prettyPrint(); } highlightBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentParser parser = XContentHelper.createParser(builder.bytes()); context.reset(parser); parser.nextToken(); HighlightBuilder secondHighlightBuilder; try { secondHighlightBuilder = HighlightBuilder.fromXContent(context); } catch (RuntimeException e) { throw new RuntimeException("Error parsing " + highlightBuilder, e); } assertNotSame(highlightBuilder, secondHighlightBuilder); assertEquals(highlightBuilder, secondHighlightBuilder); assertEquals(highlightBuilder.hashCode(), secondHighlightBuilder.hashCode()); } } /** * test that unknown array fields cause exception */ public void testUnknownArrayNameExpection() throws IOException { { IllegalArgumentException e = expectParseThrows(IllegalArgumentException.class, "{\n" + " \"bad_fieldname\" : [ \"field1\" 1 \"field2\" ]\n" + "}\n"); assertEquals("[highlight] unknown field [bad_fieldname], parser not found", e.getMessage()); } { ParsingException e = expectParseThrows(ParsingException.class, "{\n" + " \"fields\" : {\n" + " \"body\" : {\n" + " \"bad_fieldname\" : [ \"field1\" , \"field2\" ]\n" + " }\n" + " }\n" + "}\n"); assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); assertEquals("[fields] failed to parse field [body]", e.getCause().getMessage()); assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); } } private <T extends Throwable> T expectParseThrows(Class<T> exceptionClass, String highlightElement) throws IOException { XContentParser parser = XContentFactory.xContent(highlightElement).createParser(highlightElement); QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); context.reset(parser); return expectThrows(exceptionClass, () -> HighlightBuilder.fromXContent(context)); } /** * test that unknown field name cause exception */ public void testUnknownFieldnameExpection() throws IOException { { IllegalArgumentException e = expectParseThrows(IllegalArgumentException.class, "{\n" + " \"bad_fieldname\" : \"value\"\n" + "}\n"); assertEquals("[highlight] unknown field [bad_fieldname], parser not found", e.getMessage()); } { ParsingException e = expectParseThrows(ParsingException.class, "{\n" + " \"fields\" : {\n" + " \"body\" : {\n" + " \"bad_fieldname\" : \"value\"\n" + " }\n" + " }\n" + "}\n"); assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); assertEquals("[fields] failed to parse field [body]", e.getCause().getMessage()); assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); } } /** * test that unknown field name cause exception */ public void testUnknownObjectFieldnameExpection() throws IOException { { IllegalArgumentException e = expectParseThrows(IllegalArgumentException.class, "{\n" + " \"bad_fieldname\" : { \"field\" : \"value\" }\n \n" + "}\n"); assertEquals("[highlight] unknown field [bad_fieldname], parser not found", e.getMessage()); } { ParsingException e = expectParseThrows(ParsingException.class, "{\n" + " \"fields\" : {\n" + " \"body\" : {\n" + " \"bad_fieldname\" : { \"field\" : \"value\" }\n" + " }\n" + " }\n" + "}\n"); assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); assertEquals("[fields] failed to parse field [body]", e.getCause().getMessage()); assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); } } public void testStringInFieldsArray() throws IOException { ParsingException e = expectParseThrows(ParsingException.class, "{\"fields\" : [ \"junk\" ]}"); assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); assertEquals( "[fields] can be a single object with any number of fields or an array where each entry is an object with a single field", e.getCause().getMessage()); } public void testNoFieldsInObjectInFieldsArray() throws IOException { ParsingException e = expectParseThrows(ParsingException.class, "{\n" + " \"fields\" : [ {\n" + " }] \n" + "}\n"); assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); assertEquals( "[fields] can be a single object with any number of fields or an array where each entry is an object with a single field", e.getCause().getMessage()); } public void testTwoFieldsInObjectInFieldsArray() throws IOException { ParsingException e = expectParseThrows(ParsingException.class, "{\n" + " \"fields\" : [ {\n" + " \"body\" : {},\n" + " \"nope\" : {}\n" + " }] \n" + "}\n"); assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); assertEquals( "[fields] can be a single object with any number of fields or an array where each entry is an object with a single field", e.getCause().getMessage()); } /** * test that build() outputs a {@link SearchContextHighlight} that is has similar parameters * than what we have in the random {@link HighlightBuilder} */ public void testBuildSearchContextHighlight() throws IOException { Settings indexSettings = Settings.settingsBuilder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); Index index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_"); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter QueryShardContext mockShardContext = new QueryShardContext(idxSettings, null, null, null, null, null, indicesQueriesRegistry, null, null) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); return builder.build(new Mapper.BuilderContext(idxSettings.getSettings(), new ContentPath(1))).fieldType(); } }; mockShardContext.setMapUnmappedFieldAsString(true); for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { HighlightBuilder highlightBuilder = randomHighlighterBuilder(); SearchContextHighlight highlight = highlightBuilder.build(mockShardContext); for (SearchContextHighlight.Field field : highlight.fields()) { String encoder = highlightBuilder.encoder() != null ? highlightBuilder.encoder() : HighlightBuilder.DEFAULT_ENCODER; assertEquals(encoder, field.fieldOptions().encoder()); final Field fieldBuilder = getFieldBuilderByName(highlightBuilder, field.field()); assertNotNull("expected a highlight builder for field " + field.field(), fieldBuilder); FieldOptions fieldOptions = field.fieldOptions(); BiConsumer<Function<AbstractHighlighterBuilder<?>, Object>, Function<FieldOptions, Object>> checkSame = mergeBeforeChek(highlightBuilder, fieldBuilder, fieldOptions); checkSame.accept(AbstractHighlighterBuilder::boundaryChars, FieldOptions::boundaryChars); checkSame.accept(AbstractHighlighterBuilder::boundaryMaxScan, FieldOptions::boundaryMaxScan); checkSame.accept(AbstractHighlighterBuilder::fragmentSize, FieldOptions::fragmentCharSize); checkSame.accept(AbstractHighlighterBuilder::fragmenter, FieldOptions::fragmenter); checkSame.accept(AbstractHighlighterBuilder::requireFieldMatch, FieldOptions::requireFieldMatch); checkSame.accept(AbstractHighlighterBuilder::noMatchSize, FieldOptions::noMatchSize); checkSame.accept(AbstractHighlighterBuilder::numOfFragments, FieldOptions::numberOfFragments); checkSame.accept(AbstractHighlighterBuilder::phraseLimit, FieldOptions::phraseLimit); checkSame.accept(AbstractHighlighterBuilder::highlighterType, FieldOptions::highlighterType); checkSame.accept(AbstractHighlighterBuilder::highlightFilter, FieldOptions::highlightFilter); checkSame.accept(AbstractHighlighterBuilder::preTags, FieldOptions::preTags); checkSame.accept(AbstractHighlighterBuilder::postTags, FieldOptions::postTags); checkSame.accept(AbstractHighlighterBuilder::options, FieldOptions::options); checkSame.accept(AbstractHighlighterBuilder::order, op -> op.scoreOrdered() ? Order.SCORE : Order.NONE); assertEquals(fieldBuilder.fragmentOffset, fieldOptions.fragmentOffset()); if (fieldBuilder.matchedFields != null) { String[] copy = Arrays.copyOf(fieldBuilder.matchedFields, fieldBuilder.matchedFields.length); Arrays.sort(copy); assertArrayEquals(copy, new TreeSet<String>(fieldOptions.matchedFields()).toArray(new String[fieldOptions.matchedFields().size()])); } else { assertNull(fieldOptions.matchedFields()); } Query expectedValue = null; if (fieldBuilder.highlightQuery != null) { expectedValue = QueryBuilder.rewriteQuery(fieldBuilder.highlightQuery, mockShardContext).toQuery(mockShardContext); } else if (highlightBuilder.highlightQuery != null) { expectedValue = QueryBuilder.rewriteQuery(highlightBuilder.highlightQuery, mockShardContext).toQuery(mockShardContext); } assertEquals(expectedValue, fieldOptions.highlightQuery()); } } } /** * Create a generic helper function that performs all the work of merging the global highlight builder parameter, * the (potential) overwrite on the field level and the default value from {@link HighlightBuilder#defaultOptions} * before making the assertion that the value in the highlight builder and the actual value in the {@link FieldOptions} * passed in is the same. * * @param highlightBuilder provides the (optional) global builder parameter * @param fieldBuilder provides the (optional) field level parameter, if present this overwrites the global value * @param options the target field options that are checked */ private static BiConsumer<Function<AbstractHighlighterBuilder<?>, Object>, Function<FieldOptions, Object>> mergeBeforeChek( HighlightBuilder highlightBuilder, Field fieldBuilder, FieldOptions options) { return (highlightBuilderParameterAccessor, fieldOptionsParameterAccessor) -> { Object expectedValue = null; Object globalLevelValue = highlightBuilderParameterAccessor.apply(highlightBuilder); Object fieldLevelValue = highlightBuilderParameterAccessor.apply(fieldBuilder); if (fieldLevelValue != null) { expectedValue = fieldLevelValue; } else if (globalLevelValue != null) { expectedValue = globalLevelValue; } else { expectedValue = fieldOptionsParameterAccessor.apply(HighlightBuilder.defaultOptions); } Object actualValue = fieldOptionsParameterAccessor.apply(options); if (actualValue instanceof String[]) { assertArrayEquals((String[]) expectedValue, (String[]) actualValue); } else if (actualValue instanceof Character[]) { if (expectedValue instanceof char[]) { assertArrayEquals(HighlightBuilder.convertCharArray((char[]) expectedValue), (Character[]) actualValue); } else { assertArrayEquals((Character[]) expectedValue, (Character[]) actualValue); } } else { assertEquals(expectedValue, actualValue); } }; } private static Field getFieldBuilderByName(HighlightBuilder highlightBuilder, String fieldName) { for (Field hbfield : highlightBuilder.fields()) { if (hbfield.name().equals(fieldName)) { return hbfield; } } return null; } /** * `tags_schema` is not produced by toXContent in the builder but should be parseable, so this * adds a simple json test for this. */ public void testParsingTagsSchema() throws IOException { QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); String highlightElement = "{\n" + " \"tags_schema\" : \"styled\"\n" + "}\n"; XContentParser parser = XContentFactory.xContent(highlightElement).createParser(highlightElement); context.reset(parser); HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(context); assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlightBuilder.DEFAULT_STYLED_PRE_TAG, highlightBuilder.preTags()); assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlightBuilder.DEFAULT_STYLED_POST_TAGS, highlightBuilder.postTags()); highlightElement = "{\n" + " \"tags_schema\" : \"default\"\n" + "}\n"; parser = XContentFactory.xContent(highlightElement).createParser(highlightElement); context.reset(parser); highlightBuilder = HighlightBuilder.fromXContent(context); assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlightBuilder.DEFAULT_PRE_TAGS, highlightBuilder.preTags()); assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlightBuilder.DEFAULT_POST_TAGS, highlightBuilder.postTags()); ParsingException e = expectParseThrows(ParsingException.class, "{\n" + " \"tags_schema\" : \"somthing_else\"\n" + "}\n"); assertEquals("[highlight] failed to parse field [tags_schema]", e.getMessage()); assertEquals("Unknown tag schema [somthing_else]", e.getCause().getMessage()); } /** * test parsing empty highlight or empty fields blocks */ public void testParsingEmptyStructure() throws IOException { QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY)); String highlightElement = "{ }"; XContentParser parser = XContentFactory.xContent(highlightElement).createParser(highlightElement); context.reset(parser); HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(context); assertEquals("expected plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); highlightElement = "{ \"fields\" : { } }"; parser = XContentFactory.xContent(highlightElement).createParser(highlightElement); context.reset(parser); highlightBuilder = HighlightBuilder.fromXContent(context); assertEquals("defining no field should return plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); highlightElement = "{ \"fields\" : { \"foo\" : { } } }"; parser = XContentFactory.xContent(highlightElement).createParser(highlightElement); context.reset(parser); highlightBuilder = HighlightBuilder.fromXContent(context); assertEquals("expected HighlightBuilder with field", new HighlightBuilder().field(new Field("foo")), highlightBuilder); } public void testPreTagsWithoutPostTags() throws IOException { ParsingException e = expectParseThrows(ParsingException.class, "{\n" + " \"pre_tags\" : [\"<a>\"]\n" + "}\n"); assertEquals("pre_tags are set but post_tags are not set", e.getMessage()); e = expectParseThrows(ParsingException.class, "{\n" + " \"fields\" : {\n" + " \"body\" : {\n" + " \"pre_tags\" : [\"<a>\"]\n" + " }\n" + " }\n" + "}\n"); assertEquals("[highlight] failed to parse field [fields]", e.getMessage()); assertEquals("[fields] failed to parse field [body]", e.getCause().getMessage()); assertEquals("pre_tags are set but post_tags are not set", e.getCause().getCause().getMessage()); } /** * test ordinals of {@link Order}, since serialization depends on it */ public void testValidOrderOrdinals() { assertThat(Order.NONE.ordinal(), equalTo(0)); assertThat(Order.SCORE.ordinal(), equalTo(1)); } public void testOrderSerialization() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { Order.NONE.writeTo(out); try (StreamInput in = StreamInput.wrap(out.bytes())) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { Order.SCORE.writeTo(out); try (StreamInput in = StreamInput.wrap(out.bytes())) { assertThat(in.readVInt(), equalTo(1)); } } } protected static XContentBuilder toXContent(HighlightBuilder highlight, XContentType contentType) throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(contentType); if (randomBoolean()) { builder.prettyPrint(); } highlight.toXContent(builder, ToXContent.EMPTY_PARAMS); return builder; } /** * create random highlight builder that is put under test */ public static HighlightBuilder randomHighlighterBuilder() { HighlightBuilder testHighlighter = new HighlightBuilder(); setRandomCommonOptions(testHighlighter); testHighlighter.useExplicitFieldOrder(randomBoolean()); if (randomBoolean()) { testHighlighter.encoder(randomFrom(Arrays.asList(new String[]{"default", "html"}))); } int numberOfFields = randomIntBetween(1,5); for (int i = 0; i < numberOfFields; i++) { Field field = new Field(i + "_" + randomAsciiOfLengthBetween(1, 10)); setRandomCommonOptions(field); if (randomBoolean()) { field.fragmentOffset(randomIntBetween(1, 100)); } if (randomBoolean()) { field.matchedFields(randomStringArray(0, 4)); } testHighlighter.field(field); } return testHighlighter; } @SuppressWarnings({ "rawtypes", "unchecked" }) private static void setRandomCommonOptions(AbstractHighlighterBuilder highlightBuilder) { if (randomBoolean()) { // need to set this together, otherwise parsing will complain highlightBuilder.preTags(randomStringArray(0, 3)); highlightBuilder.postTags(randomStringArray(0, 3)); } if (randomBoolean()) { highlightBuilder.fragmentSize(randomIntBetween(0, 100)); } if (randomBoolean()) { highlightBuilder.numOfFragments(randomIntBetween(0, 10)); } if (randomBoolean()) { highlightBuilder.highlighterType(randomAsciiOfLengthBetween(1, 10)); } if (randomBoolean()) { highlightBuilder.fragmenter(randomAsciiOfLengthBetween(1, 10)); } if (randomBoolean()) { QueryBuilder highlightQuery; switch (randomInt(2)) { case 0: highlightQuery = new MatchAllQueryBuilder(); break; case 1: highlightQuery = new IdsQueryBuilder(); break; default: case 2: highlightQuery = new TermQueryBuilder(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); break; } highlightQuery.boost((float) randomDoubleBetween(0, 10, false)); highlightBuilder.highlightQuery(highlightQuery); } if (randomBoolean()) { if (randomBoolean()) { highlightBuilder.order(randomFrom(Order.values())); } else { // also test the string setter highlightBuilder.order(randomFrom(Order.values()).toString()); } } if (randomBoolean()) { highlightBuilder.highlightFilter(randomBoolean()); } if (randomBoolean()) { highlightBuilder.forceSource(randomBoolean()); } if (randomBoolean()) { highlightBuilder.boundaryMaxScan(randomIntBetween(0, 10)); } if (randomBoolean()) { highlightBuilder.boundaryChars(randomAsciiOfLengthBetween(1, 10).toCharArray()); } if (randomBoolean()) { highlightBuilder.noMatchSize(randomIntBetween(0, 10)); } if (randomBoolean()) { highlightBuilder.phraseLimit(randomIntBetween(0, 10)); } if (randomBoolean()) { int items = randomIntBetween(0, 5); Map<String, Object> options = new HashMap<String, Object>(items); for (int i = 0; i < items; i++) { Object value = null; switch (randomInt(2)) { case 0: value = randomAsciiOfLengthBetween(1, 10); break; case 1: value = new Integer(randomInt(1000)); break; case 2: value = new Boolean(randomBoolean()); break; } options.put(randomAsciiOfLengthBetween(1, 10), value); } } if (randomBoolean()) { highlightBuilder.requireFieldMatch(randomBoolean()); } } @SuppressWarnings({ "unchecked", "rawtypes" }) private static void mutateCommonOptions(AbstractHighlighterBuilder highlightBuilder) { switch (randomIntBetween(1, 16)) { case 1: highlightBuilder.preTags(randomStringArray(4, 6)); break; case 2: highlightBuilder.postTags(randomStringArray(4, 6)); break; case 3: highlightBuilder.fragmentSize(randomIntBetween(101, 200)); break; case 4: highlightBuilder.numOfFragments(randomIntBetween(11, 20)); break; case 5: highlightBuilder.highlighterType(randomAsciiOfLengthBetween(11, 20)); break; case 6: highlightBuilder.fragmenter(randomAsciiOfLengthBetween(11, 20)); break; case 7: highlightBuilder.highlightQuery(new TermQueryBuilder(randomAsciiOfLengthBetween(11, 20), randomAsciiOfLengthBetween(11, 20))); break; case 8: if (highlightBuilder.order() == Order.NONE) { highlightBuilder.order(Order.SCORE); } else { highlightBuilder.order(Order.NONE); } break; case 9: highlightBuilder.highlightFilter(toggleOrSet(highlightBuilder.highlightFilter())); break; case 10: highlightBuilder.forceSource(toggleOrSet(highlightBuilder.forceSource())); break; case 11: highlightBuilder.boundaryMaxScan(randomIntBetween(11, 20)); break; case 12: highlightBuilder.boundaryChars(randomAsciiOfLengthBetween(11, 20).toCharArray()); break; case 13: highlightBuilder.noMatchSize(randomIntBetween(11, 20)); break; case 14: highlightBuilder.phraseLimit(randomIntBetween(11, 20)); break; case 15: int items = 6; Map<String, Object> options = new HashMap<String, Object>(items); for (int i = 0; i < items; i++) { options.put(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); } highlightBuilder.options(options); break; case 16: highlightBuilder.requireFieldMatch(toggleOrSet(highlightBuilder.requireFieldMatch())); break; } } private static Boolean toggleOrSet(Boolean flag) { if (flag == null) { return randomBoolean(); } else { return !flag.booleanValue(); } } /** * Create array of unique Strings. If not unique, e.g. duplicates field names * would be dropped in {@link FieldOptions.Builder#matchedFields(Set)}, resulting in test glitches */ private static String[] randomStringArray(int minSize, int maxSize) { int size = randomIntBetween(minSize, maxSize); Set<String> randomStrings = new HashSet<String>(size); for (int f = 0; f < size; f++) { randomStrings.add(randomAsciiOfLengthBetween(3, 10)); } return randomStrings.toArray(new String[randomStrings.size()]); } /** * mutate the given highlighter builder so the returned one is different in one aspect */ private static HighlightBuilder mutate(HighlightBuilder original) throws IOException { HighlightBuilder mutation = serializedCopy(original); if (randomBoolean()) { mutateCommonOptions(mutation); } else { switch (randomIntBetween(0, 2)) { // change settings that only exists on top level case 0: mutation.useExplicitFieldOrder(!original.useExplicitFieldOrder()); break; case 1: mutation.encoder(original.encoder() + randomAsciiOfLength(2)); break; case 2: if (randomBoolean()) { // add another field mutation.field(new Field(randomAsciiOfLength(10))); } else { // change existing fields List<Field> originalFields = original.fields(); Field fieldToChange = originalFields.get(randomInt(originalFields.size() - 1)); if (randomBoolean()) { fieldToChange.fragmentOffset(randomIntBetween(101, 200)); } else { fieldToChange.matchedFields(randomStringArray(5, 10)); } } break; } } return mutation; } private static HighlightBuilder serializedCopy(HighlightBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { return HighlightBuilder.PROTOTYPE.readFrom(in); } } } }
/* * This file is part of the Jikes RVM project (http://jikesrvm.org). * * This file is licensed to You under the Common Public License (CPL); * You may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.opensource.org/licenses/cpl1.0.php * * See the COPYRIGHT.txt file distributed with this work for information * regarding copyright ownership. */ package org.jikesrvm.compilers.opt.ssa; import static org.jikesrvm.compilers.opt.driver.OptConstants.SSA_SYNTH_BCI; import static org.jikesrvm.compilers.opt.ir.Operators.GUARD_MOVE; import static org.jikesrvm.compilers.opt.ir.Operators.PHI; import java.lang.reflect.Constructor; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.Stack; import org.jikesrvm.VM; import org.jikesrvm.classloader.TypeReference; import org.jikesrvm.compilers.opt.DefUse; import org.jikesrvm.compilers.opt.OptOptions; import org.jikesrvm.compilers.opt.OptimizingCompilerException; import org.jikesrvm.compilers.opt.controlflow.BranchOptimizations; import org.jikesrvm.compilers.opt.controlflow.DominatorTree; import org.jikesrvm.compilers.opt.controlflow.DominatorTreeNode; import org.jikesrvm.compilers.opt.controlflow.LTDominators; import org.jikesrvm.compilers.opt.driver.CompilerPhase; import org.jikesrvm.compilers.opt.ir.BasicBlock; import org.jikesrvm.compilers.opt.ir.IR; import org.jikesrvm.compilers.opt.ir.IRTools; import org.jikesrvm.compilers.opt.ir.Instruction; import org.jikesrvm.compilers.opt.ir.InstructionEnumeration; import org.jikesrvm.compilers.opt.ir.Move; import org.jikesrvm.compilers.opt.ir.OperandEnumeration; import org.jikesrvm.compilers.opt.ir.Phi; import org.jikesrvm.compilers.opt.ir.Register; import org.jikesrvm.compilers.opt.ir.RegisterOperandEnumeration; import org.jikesrvm.compilers.opt.ir.operand.ConstantOperand; import org.jikesrvm.compilers.opt.ir.operand.Operand; import org.jikesrvm.compilers.opt.ir.operand.RegisterOperand; import org.jikesrvm.compilers.opt.ir.operand.TrueGuardOperand; import org.jikesrvm.compilers.opt.ir.operand.UnreachableOperand; import org.jikesrvm.compilers.opt.liveness.LiveAnalysis; import org.jikesrvm.compilers.opt.liveness.LiveSet; import org.jikesrvm.compilers.opt.util.TreeNode; /** * This compiler phase translates out of SSA form. * * @see SSA * @see SSAOptions * @see LTDominators */ public class LeaveSSA extends CompilerPhase { /** * verbose debugging flag */ static final boolean DEBUG = false; // control bias between adding blocks or adding temporaries private static final boolean SplitBlockToAvoidRenaming = false; private static final boolean SplitBlockForLocalLive = true; private static final boolean SplitBlockIntoInfrequent = true; /** * The IR to manipulate */ private IR ir; private BranchOptimizations branchOpts = new BranchOptimizations(-1, true, true); private boolean splitSomeBlock = false; private final HashSet<Instruction> globalRenameTable = new HashSet<Instruction>(); private final HashSet<Register> globalRenamePhis = new HashSet<Register>(); /** * Should we perform this phase? * @param options controlling compiler options */ public final boolean shouldPerform(OptOptions options) { return options.SSA; } /** * Constructor for this compiler phase */ private static final Constructor<CompilerPhase> constructor = getCompilerPhaseConstructor(LeaveSSA.class); /** * Get a constructor object for this compiler phase * @return compiler phase constructor */ public Constructor<CompilerPhase> getClassConstructor() { return constructor; } /** * Return a string name for this phase. * @return "Leave SSA" */ public final String getName() { return "Leave SSA"; } /** * perform the main out-of-ssa transformation * @param ir the governing IR */ public final void perform(IR ir) { this.ir = ir; translateFromSSA(ir); // reset ir.SSADictionary ir.HIRInfo.dictionary = null; // reset ssa options ir.actualSSAOptions = null; branchOpts.perform(ir, true); ir.HIRInfo.dominatorsAreComputed = false; } /** * This class provides an abstraction over stacks of names * for registers. */ static class VariableStacks extends HashMap<Register, Stack<Operand>> { /** Support for map serialization */ static final long serialVersionUID = -5664504465082745314L; /** * Get the name at the top of the stack for a particular register * @param s the register in question * @return the name at the top of the stack for the register */ Operand peek(Register s) { Stack<Operand> stack = get(s); if (stack == null || stack.isEmpty()) { return null; } else { return stack.peek(); } } /** * Pop the name at the top of the stack for a particular register * @param s the register in question * @return the name at the top of the stack for the register */ Operand pop(Register s) { Stack<Operand> stack = get(s); if (stack == null) { throw new OptimizingCompilerException( "Failure in translating out of SSA form: trying to pop operand from non-existant stack"); } else { return stack.pop(); } } /** * Push a name at the top of the stack for a particular register * @param s the register in question * @param name the name to push on the stack */ void push(Register s, Operand name) { Stack<Operand> stack = get(s); if (stack == null) { stack = new Stack<Operand>(); put(s, stack); } stack.push(name); } } /** * An instance of this class represents a pending copy instruction * to be inserted. */ static class Copy { /** * The right-hand side of the copy instruction */ final Operand source; /** * The left-hand side of the copy instruction */ final RegisterOperand destination; /** * The phi instruction which generated this copy instruction */ final Instruction phi; /** * Create a pending copy operation for an operand of a phi instruction * @param phi the phi instruction * @param index which operand of the instruction to copy */ Copy(Instruction phi, int index) { this.phi = phi; destination = Phi.getResult(phi).asRegister(); source = Phi.getValue(phi, index); } } /** * substitute variables renamed in control parents */ private void performRename(BasicBlock bb, DominatorTree dom, VariableStacks s) { if (DEBUG) VM.sysWriteln("performRename: " + bb); InstructionEnumeration e = bb.forwardRealInstrEnumerator(); while (e.hasMoreElements()) { Instruction i = e.next(); OperandEnumeration ee = i.getUses(); while (ee.hasMoreElements()) { Operand o = ee.next(); if (o instanceof RegisterOperand) { Register r1 = ((RegisterOperand) o).getRegister(); if (r1.isValidation()) continue; Operand r2 = s.peek(r1); if (r2 != null) { if (DEBUG) { VM.sysWriteln("replace operand in " + i + "(" + r2 + " for " + o); } i.replaceOperand(o, r2.copy()); } } } } // record renamings required in children e = bb.forwardRealInstrEnumerator(); while (e.hasMoreElements()) { Instruction i = e.next(); if (globalRenameTable.contains(i)) { Register original = Move.getVal(i).asRegister().getRegister(); RegisterOperand rename = Move.getResult(i); if (DEBUG) VM.sysWriteln("record rename " + rename + " for " + original); s.push(original, rename); } } // insert copies in control children Enumeration<TreeNode> children = dom.getChildren(bb); while (children.hasMoreElements()) { BasicBlock c = ((DominatorTreeNode) children.nextElement()).getBlock(); performRename(c, dom, s); } // pop renamings from this block off stack e = bb.forwardRealInstrEnumerator(); while (e.hasMoreElements()) { Instruction i = e.next(); if (globalRenameTable.contains(i)) { Register original = Move.getVal(i).asRegister().getRegister(); s.pop(original); } } } private boolean usedBelowCopy(BasicBlock bb, Register r) { InstructionEnumeration ie = bb.reverseRealInstrEnumerator(); while (ie.hasMoreElements()) { Instruction inst = ie.next(); if (inst.isBranch()) { OperandEnumeration oe = inst.getUses(); while (oe.hasMoreElements()) { Operand op = oe.next(); if (op.isRegister() && op.asRegister().getRegister() == r) { return true; } } } else { break; } } return false; } /** * Record pending copy operations needed to insert at the end of a basic * block. * TODO: this procedure is getting long and ugly. Rewrite or refactor * it. * @param bb the basic block to process * @param live valid liveness information for the IR */ private void scheduleCopies(BasicBlock bb, LiveAnalysis live) { if (DEBUG) VM.sysWrite("scheduleCopies: " + bb + "\n"); // compute out liveness from information in LiveAnalysis LiveSet out = new LiveSet(); for (Enumeration<BasicBlock> outBlocks = bb.getOut(); outBlocks.hasMoreElements();) { BasicBlock ob = outBlocks.nextElement(); LiveAnalysis.BBLiveElement le = live.getLiveInfo(ob); out.add(le.in()); } // usedByAnother represents the set of registers that appear on the // left-hand side of subsequent phi nodes. This is important, since // we be careful to order copies if the same register appears as the // source and dest of copies in the same basic block. HashSet<Register> usedByAnother = new HashSet<Register>(4); // for each basic block successor b of bb, if we make a block on the // critical edge bb->b, then store this critical block. HashMap<BasicBlock, BasicBlock> criticalBlocks = new HashMap<BasicBlock, BasicBlock>(4); // For each critical basic block b in which we are inserting copies: return the // mapping of registers to names implied by the copies that have // already been inserted into b. HashMap<BasicBlock, HashMap<Register, Register>> currentNames = new HashMap<BasicBlock, HashMap<Register, Register>>(4); // Additionally store the current names for the current basic block bb. HashMap<Register, Register> bbNames = new HashMap<Register, Register>(4); // copySet is a linked-list of copies we need to insert in this block. final LinkedList<Copy> copySet = new LinkedList<Copy>(); /* Worklist is actually used like a stack - should we make this an Stack ?? */ final LinkedList<Copy> workList = new LinkedList<Copy>(); // collect copies required in this block. These copies move // the appropriate rval into the lval of each phi node in // control children of the current block. Enumeration<BasicBlock> e = bb.getOut(); while (e.hasMoreElements()) { BasicBlock bbs = e.nextElement(); if (bbs.isExit()) continue; for (Instruction phi = bbs.firstInstruction(); phi != bbs.lastInstruction(); phi = phi.nextInstructionInCodeOrder()) { if (phi.operator() != PHI) continue; for (int index = 0; index < Phi.getNumberOfPreds(phi); index++) { if (Phi.getPred(phi, index).block != bb) continue; Operand rval = Phi.getValue(phi, index); if (rval.isRegister() && Phi.getResult(phi).asRegister().getRegister() == rval.asRegister().getRegister()) { continue; } Copy c = new Copy(phi, index); copySet.add(0, c); if (c.source instanceof RegisterOperand) { Register r = c.source.asRegister().getRegister(); usedByAnother.add(r); } } } } // the copies that need to be added to this block are processed // in a worklist that ensures that copies are inserted only // after the destination register has been read by any other copy // that needs it. // // initialize work list with all copies whose destination is not // the source for any other copy, and delete such copies from // the set of needed copies. for (Iterator<Copy> copySetIter = copySet.iterator(); copySetIter.hasNext();) { Copy c = copySetIter.next(); if (!usedByAnother.contains(c.destination.getRegister())) { workList.add(0, c); copySetIter.remove(); } } // while there is any more work to do. while (!workList.isEmpty() || !copySet.isEmpty()) { // while there are copies that can be correctly inserted. while (!workList.isEmpty()) { Copy c = workList.remove(0); Register r = c.destination.getRegister(); TypeReference tt = c.destination.getType(); if (VM.VerifyAssertions && tt == null) { tt = TypeReference.Int; VM.sysWrite("SSA, warning: null type in " + c.destination + "\n"); } Register rr = null; if (c.source.isRegister()) rr = c.source.asRegister().getRegister(); boolean shouldSplitBlock = !c.phi.getBasicBlock().isExceptionHandlerBasicBlock() && ((out.contains(r) && SplitBlockToAvoidRenaming) || (rr != null && usedBelowCopy(bb, rr) && SplitBlockForLocalLive)); if (SplitBlockIntoInfrequent) { if (!bb.getInfrequent() && c.phi.getBasicBlock().getInfrequent() && !c.phi.getBasicBlock().isExceptionHandlerBasicBlock()) { shouldSplitBlock = true; } } // this check captures cases when the result of a phi // in a control successor is live on exit of the current // block. this means it is incorrect to simply insert // a copy of the destination in the current block. so // we rename the destination to a new temporary, and // record the renaming so that dominator blocks get the // new name. if (out.contains(r) && !shouldSplitBlock) { if (!globalRenamePhis.contains(r)) { Register t = ir.regpool.getReg(r); Instruction save = SSA.makeMoveInstruction(ir, t, r, tt); if (DEBUG) { VM.sysWriteln("Inserting " + save + " before " + c.phi + " in " + c.phi.getBasicBlock()); } c.phi.insertAfter(save); globalRenamePhis.add(r); globalRenameTable.add(save); } } Instruction ci = null; // insert copy operation required to remove phi if (c.source instanceof ConstantOperand) { if (c.source instanceof UnreachableOperand) { ci = null; } else { ci = SSA.makeMoveInstruction(ir, r, (ConstantOperand) c.source); } } else if (c.source instanceof RegisterOperand) { if (shouldSplitBlock) { if (DEBUG) VM.sysWriteln("splitting edge: " + bb + "->" + c.phi.getBasicBlock()); BasicBlock criticalBlock = criticalBlocks.get(c.phi.getBasicBlock()); if (criticalBlock == null) { criticalBlock = IRTools.makeBlockOnEdge(bb, c.phi.getBasicBlock(), ir); if (c.phi.getBasicBlock().getInfrequent()) { criticalBlock.setInfrequent(); } splitSomeBlock = true; criticalBlocks.put(c.phi.getBasicBlock(), criticalBlock); HashMap<Register, Register> newNames = new HashMap<Register, Register>(4); currentNames.put(criticalBlock, newNames); } Register sr = c.source.asRegister().getRegister(); HashMap<Register, Register> criticalBlockNames = currentNames.get(criticalBlock); Register nameForSR = criticalBlockNames.get(sr); if (nameForSR == null) { nameForSR = bbNames.get(sr); if (nameForSR == null) nameForSR = sr; } if (DEBUG) VM.sysWriteln("dest(r): " + r); if (DEBUG) VM.sysWriteln("sr: " + sr + ", nameForSR: " + nameForSR); ci = SSA.makeMoveInstruction(ir, r, nameForSR, tt); criticalBlockNames.put(sr, r); criticalBlock.appendInstructionRespectingTerminalBranch(ci); } else { Register sr = c.source.asRegister().getRegister(); Register nameForSR = bbNames.get(sr); if (nameForSR == null) nameForSR = sr; if (DEBUG) VM.sysWriteln("not splitting edge: " + bb + "->" + c.phi.getBasicBlock()); if (DEBUG) VM.sysWriteln("dest(r): " + r); if (DEBUG) VM.sysWriteln("sr: " + sr + ", nameForSR: " + nameForSR); ci = SSA.makeMoveInstruction(ir, r, nameForSR, tt); bbNames.put(sr, r); SSA.addAtEnd(ir, bb, ci, c.phi.getBasicBlock().isExceptionHandlerBasicBlock()); } // ugly hack: having already added ci; set ci to null to skip remaining code; ci = null; } else { throw new OptimizingCompilerException("Unexpected phi operand " + c .source + " encountered during SSA teardown", true); } if (ci != null) { if (shouldSplitBlock) { if (DEBUG) VM.sysWriteln("splitting edge: " + bb + "->" + c.phi.getBasicBlock()); BasicBlock criticalBlock = criticalBlocks.get(c.phi.getBasicBlock()); if (criticalBlock == null) { criticalBlock = IRTools.makeBlockOnEdge(bb, c.phi.getBasicBlock(), ir); if (c.phi.getBasicBlock().getInfrequent()) { criticalBlock.setInfrequent(); } splitSomeBlock = true; criticalBlocks.put(c.phi.getBasicBlock(), criticalBlock); HashMap<Register, Register> newNames = new HashMap<Register, Register>(4); currentNames.put(criticalBlock, newNames); } criticalBlock.appendInstructionRespectingTerminalBranch(ci); } else { SSA.addAtEnd(ir, bb, ci, c.phi.getBasicBlock().isExceptionHandlerBasicBlock()); } } // source has been copied and so can now be overwritten // safely. so now add any copies _to_ the source of the // current copy to the work list. if (c.source instanceof RegisterOperand) { Register saved = c.source.asRegister().getRegister(); Iterator<Copy> copySetIter = copySet.iterator(); while (copySetIter.hasNext()) { Copy cc = copySetIter.next(); if (cc.destination.asRegister().getRegister() == saved) { workList.add(0, cc); copySetIter.remove(); } } } } // an empty work list with work remaining in the copy set // implies a cycle in the dependencies amongst copies. deal // with this: break the cycle by copying the destination // of an arbitrary member of the copy set into a temporary. // this destination has thus been saved, and can now be // safely overwritten. so, add that copy to the work list. if (!copySet.isEmpty()) { Copy c = copySet.remove(0); Register tt = ir.regpool.getReg(c.destination.getRegister()); SSA.addAtEnd(ir, bb, SSA.makeMoveInstruction(ir, tt, c.destination.getRegister(), c.destination.getType()), c.phi.getBasicBlock().isExceptionHandlerBasicBlock()); bbNames.put(c.destination.getRegister(), tt); workList.add(0, c); } } } /** * Insert copy instructions into a basic block to safely translate out * of SSA form. * * @param bb the basic block * @param dom a valid dominator tree for the IR * @param live valid liveness information for the IR */ private void insertCopies(BasicBlock bb, DominatorTree dom, LiveAnalysis live) { // add copies required in this block to remove phis. // (record renaming required by simultaneous liveness in global tables) scheduleCopies(bb, live); // insert copies in control children Enumeration<TreeNode> children = dom.getChildren(bb); while (children.hasMoreElements()) { BasicBlock c = ((DominatorTreeNode) children.nextElement()).getBlock(); insertCopies(c, dom, live); } } /** * Main driver to translate an IR out of SSA form. * * @param ir the IR in SSA form */ public void translateFromSSA(IR ir) { // 0. Deal with guards (validation registers) unSSAGuards(ir); // 1. re-compute dominator tree in case of control flow changes LTDominators.perform(ir, true, true); DominatorTree dom = new DominatorTree(ir, true); // 1.5 Perform Sreedhar's naive translation from TSSA to CSSA //if (ir.options.UNROLL_LOG == 0) normalizeSSA(ir); // 2. compute liveness LiveAnalysis live = new LiveAnalysis(false, // don't create GC maps true, // skip (final) local propagation step // of live analysis false, // don't store information at handlers false); // dont skip guards live.perform(ir); // 3. initialization VariableStacks s = new VariableStacks(); // 4. convert phi nodes into copies BasicBlock b = ((DominatorTreeNode) dom.getRoot()).getBlock(); insertCopies(b, dom, live); // 5. If necessary, recompute dominators to account for new control flow. if (splitSomeBlock) { LTDominators.perform(ir, true, true); dom = new DominatorTree(ir, true); } // 6. compensate for copies required by simulataneous liveness performRename(b, dom, s); // 7. phis are now redundant removeAllPhis(ir); } /** * Remove all phi instructions from the IR. * * @param ir the governing IR */ static void removeAllPhis(IR ir) { for (Instruction s = ir.firstInstructionInCodeOrder(), sentinel = ir.lastInstructionInCodeOrder(), nextInstr = null; s != sentinel; s = nextInstr) { // cache because remove nulls next/prev fields nextInstr = s.nextInstructionInCodeOrder(); if (Phi.conforms(s)) s.remove(); } } /** * Special treatment for guard registers: * Remove guard-phis by evaluating operands into same register. * If this target register is not unique, unite the alternatives. */ private void unSSAGuards(IR ir) { // 0. initialization unSSAGuardsInit(ir); // 1. Determine target registers unSSAGuardsDetermineReg(ir); // 2. Rename targets and remove Phis unSSAGuardsFinalize(ir); } Instruction guardPhis = null; /** * Initialization for removal of guard phis. */ private void unSSAGuardsInit(IR ir) { guardPhis = null; InstructionEnumeration e = ir.forwardInstrEnumerator(); // visit all instructions, looking for guard phis while (e.hasMoreElements()) { Instruction inst = e.next(); if (!Phi.conforms(inst)) continue; Operand res = Phi.getResult(inst); if (!(res instanceof RegisterOperand)) continue; Register r = res.asRegister().getRegister(); if (!r.isValidation()) continue; // force all operands of Phis into registers. inst.scratchObject = guardPhis; guardPhis = inst; int values = Phi.getNumberOfValues(inst); for (int i = 0; i < values; ++i) { Operand op = Phi.getValue(inst, i); if (!(op instanceof RegisterOperand)) { if (op instanceof TrueGuardOperand) { BasicBlock bb = Phi.getPred(inst, i).block; Instruction move = Move.create(GUARD_MOVE, res.asRegister().copyD2D(), new TrueGuardOperand()); move.position = ir.gc.inlineSequence; move.bcIndex = SSA_SYNTH_BCI; bb.appendInstructionRespectingTerminalBranchOrPEI(move); } else if (op instanceof UnreachableOperand) { // do nothing } else { if (VM.VerifyAssertions) VM._assert(false); } } } } // visit all guard registers, init union/find for (Register r = ir.regpool.getFirstSymbolicRegister(); r != null; r = r.getNext()) { if (!r.isValidation()) continue; r.scratch = 1; r.scratchObject = r; } } /** * Determine target register for guard phi operands */ private void unSSAGuardsDetermineReg(IR ir) { Instruction inst = guardPhis; while (inst != null) { Register r = Phi.getResult(inst).asRegister().getRegister(); int values = Phi.getNumberOfValues(inst); for (int i = 0; i < values; ++i) { Operand op = Phi.getValue(inst, i); if (op instanceof RegisterOperand) { guardUnion(op.asRegister().getRegister(), r); } else { if (VM.VerifyAssertions) { VM._assert(op instanceof TrueGuardOperand || op instanceof UnreachableOperand); } } } inst = (Instruction) inst.scratchObject; } } /** * Rename registers and delete Phis. */ private void unSSAGuardsFinalize(IR ir) { DefUse.computeDU(ir); for (Register r = ir.regpool.getFirstSymbolicRegister(); r != null; r = r.getNext()) { if (!r.isValidation()) continue; Register nreg = guardFind(r); RegisterOperandEnumeration uses = DefUse.uses(r); while (uses.hasMoreElements()) { RegisterOperand use = uses.next(); use.setRegister(nreg); } RegisterOperandEnumeration defs = DefUse.defs(r); while (defs.hasMoreElements()) { RegisterOperand def = defs.next(); def.setRegister(nreg); } } Instruction inst = guardPhis; while (inst != null) { inst.remove(); inst = (Instruction) inst.scratchObject; } } /** * union step of union/find for guard registers during unSSA */ private Register guardUnion(Register from, Register to) { Register a = guardFind(from); Register b = guardFind(to); if (a == b) return a; if (a.scratch == b.scratch) { a.scratch++; b.scratchObject = a; return a; } if (a.scratch > b.scratch) { b.scratchObject = a; return a; } a.scratchObject = b; return b; } /** * find step of union/find for guard registers during unSSA */ private Register guardFind(Register r) { Register start = r; if (VM.VerifyAssertions) VM._assert(r.scratchObject != null); while (r.scratchObject != r) r = (Register) r.scratchObject; while (start.scratchObject != r) { start.scratchObject = r; start = (Register) start.scratchObject; } return r; } /** * Avoid potential lost copy and other associated problems by * Sreedhar's naive translation from TSSA to CSSA. Guards are rather * trivial to un-SSA so they have already been removed from the IR. * This algorithm is very wasteful of registers so needs good * coalescing. * @param ir the IR to work upon */ private static void normalizeSSA(IR ir) { for (Instruction s = ir.firstInstructionInCodeOrder(), sentinel = ir.lastInstructionInCodeOrder(), nextInstr = null; s != sentinel; s = nextInstr) { // cache so we don't process inserted instructions nextInstr = s.nextInstructionInCodeOrder(); if (Phi.conforms(s) && !s.getBasicBlock().isExceptionHandlerBasicBlock()) { // We ignore exception handler BBs as they cause problems when inserting copies if (DEBUG) VM.sysWriteln("Processing " + s + " of basic block " + s.getBasicBlock()); // Does the phi instruction have an unreachable operand? boolean hasUnreachable = false; // 1. Naively copy source operands into predecessor blocks for (int index = 0; index < Phi.getNumberOfPreds(s); index++) { Operand op = Phi.getValue(s, index); if (op.isRegister()) { // Get rval Register rval = op.asRegister().getRegister(); if (rval.isValidation()) { continue; // ignore guards } else { // Make rval' Register rvalPrime = ir.regpool.getReg(rval); // Make copy instruction Instruction copy = SSA.makeMoveInstruction(ir, rvalPrime, rval, op.getType()); // Insert a copy of rval to rval' in predBlock BasicBlock pred = Phi.getPred(s, index).block; pred.appendInstructionRespectingTerminalBranch(copy); if (DEBUG) VM.sysWriteln("Inserted rval copy of " + copy + " into basic block " + pred); // Rename rval to rval' in phi instruction op.asRegister().setRegister(rvalPrime); } } else if (op instanceof UnreachableOperand) { hasUnreachable = true; } } // 2. Naively copy the result if there were no unreachable operands if (!hasUnreachable) { Operand op = Phi.getResult(s); if (!op.isRegister()) { // ignore heap operands } else { // Get lval Register lval = op.asRegister().getRegister(); // Make lval' Register lvalPrime = ir.regpool.getReg(lval); // Make copy instruction Instruction copy = SSA.makeMoveInstruction(ir, lval, lvalPrime, op.getType()); // Insert a copy of lval' to lval after phi instruction s.insertAfter(copy); // Rename lval to lval' in phi instruction op.asRegister().setRegister(lvalPrime); if (DEBUG) VM.sysWriteln("Inserted lval copy of " + copy + " after " + s); } } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.webproxy; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.net.InetAddress; import java.net.URI; import java.net.URISyntaxException; import java.net.URLEncoder; import java.util.Arrays; import java.util.EnumSet; import java.util.Enumeration; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.UriBuilder; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Apps; import org.apache.hadoop.yarn.util.StringHelper; import org.apache.hadoop.yarn.util.TrackingUriPlugin; import org.apache.hadoop.yarn.webapp.MimeType; import org.apache.hadoop.yarn.webapp.hamlet.Hamlet; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.params.ClientPNames; import org.apache.http.client.params.CookiePolicy; import org.apache.http.client.utils.URLEncodedUtils; import org.apache.http.conn.params.ConnRoutePNames; import org.apache.http.impl.client.DefaultHttpClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class WebAppProxyServlet extends HttpServlet { private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger( WebAppProxyServlet.class); private static final Set<String> passThroughHeaders = new HashSet<>(Arrays.asList( "User-Agent", "Accept", "Accept-Encoding", "Accept-Language", "Accept-Charset")); public static final String PROXY_USER_COOKIE_NAME = "proxy-user"; private transient List<TrackingUriPlugin> trackingUriPlugins; private final String rmAppPageUrlBase; private transient YarnConfiguration conf; private static class _ implements Hamlet._ { //Empty } private static class Page extends Hamlet { Page(PrintWriter out) { super(out, 0, false); } public HTML<WebAppProxyServlet._> html() { return new HTML<>("html", null, EnumSet.of(EOpt.ENDTAG)); } } /** * Default constructor */ public WebAppProxyServlet() { super(); conf = new YarnConfiguration(); this.trackingUriPlugins = conf.getInstances(YarnConfiguration.YARN_TRACKING_URL_GENERATOR, TrackingUriPlugin.class); this.rmAppPageUrlBase = StringHelper.pjoin( WebAppUtils.getResolvedRMWebAppURLWithScheme(conf), "cluster", "app"); } /** * Output 404 with appropriate message. * @param resp the http response. * @param message the message to include on the page. * @throws IOException on any error. */ private static void notFound(HttpServletResponse resp, String message) throws IOException { ProxyUtils.notFound(resp, message); } /** * Warn the user that the link may not be safe! * @param resp the http response * @param link the link to point to * @param user the user that owns the link. * @throws IOException on any error. */ private static void warnUserPage(HttpServletResponse resp, String link, String user, ApplicationId id) throws IOException { //Set the cookie when we warn which overrides the query parameter //This is so that if a user passes in the approved query parameter without //having first visited this page then this page will still be displayed resp.addCookie(makeCheckCookie(id, false)); resp.setContentType(MimeType.HTML); Page p = new Page(resp.getWriter()); p.html(). h1("WARNING: The following page may not be safe!"). h3(). _("click ").a(link, "here"). _(" to continue to an Application Master web interface owned by ", user). _(). _(); } /** * Download link and have it be the response. * @param req the http request * @param resp the http response * @param link the link to download * @param c the cookie to set if any * @throws IOException on any error. */ private static void proxyLink(HttpServletRequest req, HttpServletResponse resp, URI link, Cookie c, String proxyHost) throws IOException { DefaultHttpClient client = new DefaultHttpClient(); client .getParams() .setParameter(ClientPNames.COOKIE_POLICY, CookiePolicy.BROWSER_COMPATIBILITY) .setBooleanParameter(ClientPNames.ALLOW_CIRCULAR_REDIRECTS, true); // Make sure we send the request from the proxy address in the config // since that is what the AM filter checks against. IP aliasing or // similar could cause issues otherwise. InetAddress localAddress = InetAddress.getByName(proxyHost); if (LOG.isDebugEnabled()) { LOG.debug("local InetAddress for proxy host: {}", localAddress); } client.getParams() .setParameter(ConnRoutePNames.LOCAL_ADDRESS, localAddress); HttpGet httpGet = new HttpGet(link); @SuppressWarnings("unchecked") Enumeration<String> names = req.getHeaderNames(); while(names.hasMoreElements()) { String name = names.nextElement(); if(passThroughHeaders.contains(name)) { String value = req.getHeader(name); if (LOG.isDebugEnabled()) { LOG.debug("REQ HEADER: {} : {}", name, value); } httpGet.setHeader(name, value); } } String user = req.getRemoteUser(); if (user != null && !user.isEmpty()) { httpGet.setHeader("Cookie", PROXY_USER_COOKIE_NAME + "=" + URLEncoder.encode(user, "ASCII")); } OutputStream out = resp.getOutputStream(); try { HttpResponse httpResp = client.execute(httpGet); resp.setStatus(httpResp.getStatusLine().getStatusCode()); for (Header header : httpResp.getAllHeaders()) { resp.setHeader(header.getName(), header.getValue()); } if (c != null) { resp.addCookie(c); } InputStream in = httpResp.getEntity().getContent(); if (in != null) { IOUtils.copyBytes(in, out, 4096, true); } } finally { httpGet.releaseConnection(); } } private static String getCheckCookieName(ApplicationId id){ return "checked_"+id; } private static Cookie makeCheckCookie(ApplicationId id, boolean isSet) { Cookie c = new Cookie(getCheckCookieName(id),String.valueOf(isSet)); c.setPath(ProxyUriUtils.getPath(id)); c.setMaxAge(60 * 60 * 2); //2 hours in seconds return c; } private boolean isSecurityEnabled() { Boolean b = (Boolean) getServletContext() .getAttribute(WebAppProxy.IS_SECURITY_ENABLED_ATTRIBUTE); return b != null ? b : false; } private ApplicationReport getApplicationReport(ApplicationId id) throws IOException, YarnException { return ((AppReportFetcher) getServletContext() .getAttribute(WebAppProxy.FETCHER_ATTRIBUTE)).getApplicationReport(id); } private String getProxyHost() throws IOException { return ((String) getServletContext() .getAttribute(WebAppProxy.PROXY_HOST_ATTRIBUTE)); } @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException{ try { String userApprovedParamS = req.getParameter(ProxyUriUtils.PROXY_APPROVAL_PARAM); boolean userWasWarned = false; boolean userApproved = Boolean.valueOf(userApprovedParamS); boolean securityEnabled = isSecurityEnabled(); final String remoteUser = req.getRemoteUser(); final String pathInfo = req.getPathInfo(); String[] parts = pathInfo.split("/", 3); if(parts.length < 2) { LOG.warn("{} gave an invalid proxy path {}", remoteUser, pathInfo); notFound(resp, "Your path appears to be formatted incorrectly."); return; } //parts[0] is empty because path info always starts with a / String appId = parts[1]; String rest = parts.length > 2 ? parts[2] : ""; ApplicationId id = Apps.toAppID(appId); if(id == null) { LOG.warn("{} attempting to access {} that is invalid", remoteUser, appId); notFound(resp, appId + " appears to be formatted incorrectly."); return; } if(securityEnabled) { String cookieName = getCheckCookieName(id); Cookie[] cookies = req.getCookies(); if (cookies != null) { for (Cookie c : cookies) { if (cookieName.equals(c.getName())) { userWasWarned = true; userApproved = userApproved || Boolean.valueOf(c.getValue()); break; } } } } boolean checkUser = securityEnabled && (!userWasWarned || !userApproved); ApplicationReport applicationReport; try { applicationReport = getApplicationReport(id); } catch (ApplicationNotFoundException e) { applicationReport = null; } if(applicationReport == null) { LOG.warn("{} attempting to access {} that was not found", remoteUser, id); URI toFetch = ProxyUriUtils .getUriFromTrackingPlugins(id, this.trackingUriPlugins); if (toFetch != null) { ProxyUtils.sendRedirect(req, resp, toFetch.toString()); return; } notFound(resp, "Application " + appId + " could not be found, " + "please try the history server"); return; } String original = applicationReport.getOriginalTrackingUrl(); URI trackingUri; // fallback to ResourceManager's app page if no tracking URI provided if(original == null || original.equals("N/A")) { ProxyUtils.sendRedirect(req, resp, StringHelper.pjoin(rmAppPageUrlBase, id.toString())); return; } else { if (ProxyUriUtils.getSchemeFromUrl(original).isEmpty()) { trackingUri = ProxyUriUtils.getUriFromAMUrl( WebAppUtils.getHttpSchemePrefix(conf), original); } else { trackingUri = new URI(original); } } String runningUser = applicationReport.getUser(); if(checkUser && !runningUser.equals(remoteUser)) { LOG.info("Asking {} if they want to connect to the " + "app master GUI of {} owned by {}", remoteUser, appId, runningUser); warnUserPage(resp, ProxyUriUtils.getPathAndQuery(id, rest, req.getQueryString(), true), runningUser, id); return; } // Append the user-provided path and query parameter to the original // tracking url. List<NameValuePair> queryPairs = URLEncodedUtils.parse(req.getQueryString(), null); UriBuilder builder = UriBuilder.fromUri(trackingUri); for (NameValuePair pair : queryPairs) { builder.queryParam(pair.getName(), pair.getValue()); } URI toFetch = builder.path(rest).build(); LOG.info("{} is accessing unchecked {}" + " which is the app master GUI of {} owned by {}", remoteUser, toFetch, appId, runningUser); switch (applicationReport.getYarnApplicationState()) { case KILLED: case FINISHED: case FAILED: ProxyUtils.sendRedirect(req, resp, toFetch.toString()); return; default: // fall out of the switch } Cookie c = null; if (userWasWarned && userApproved) { c = makeCheckCookie(id, true); } proxyLink(req, resp, toFetch, c, getProxyHost()); } catch(URISyntaxException | YarnException e) { throw new IOException(e); } } /** * This method is used by Java object deserialization, to fill in the * transient {@link #trackingUriPlugins} field. * See {@link ObjectInputStream#defaultReadObject()} * <p> * <I>Do not remove</I> * <p> * Yarn isn't currently serializing this class, but findbugs * complains in its absence. * * * @param input source * @throws IOException IO failure * @throws ClassNotFoundException classloader fun */ private void readObject(ObjectInputStream input) throws IOException, ClassNotFoundException { input.defaultReadObject(); conf = new YarnConfiguration(); this.trackingUriPlugins = conf.getInstances(YarnConfiguration.YARN_TRACKING_URL_GENERATOR, TrackingUriPlugin.class); } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.lang.psi.impl.statements.expressions; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Ref; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.impl.PsiSubstitutorImpl; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.InheritanceUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.util.Function; import com.intellij.util.containers.ComparatorUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.HashMap; import gnu.trove.THashMap; import gnu.trove.THashSet; import gnu.trove.TIntObjectHashMap; import gnu.trove.TObjectIntHashMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElement; import org.jetbrains.plugins.groovy.lang.psi.api.GroovyResolveResult; import org.jetbrains.plugins.groovy.lang.psi.api.SpreadState; import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.annotation.GrAnnotation; import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.annotation.GrAnnotationArrayInitializer; import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.annotation.GrAnnotationMemberValue; import org.jetbrains.plugins.groovy.lang.psi.api.signatures.GrClosureSignature; import org.jetbrains.plugins.groovy.lang.psi.api.signatures.GrSignature; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrBinaryExpression; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression; import org.jetbrains.plugins.groovy.lang.psi.impl.*; import org.jetbrains.plugins.groovy.lang.psi.impl.signatures.GrClosureSignatureImpl; import org.jetbrains.plugins.groovy.lang.psi.typeEnhancers.GrTypeConverter; import org.jetbrains.plugins.groovy.lang.psi.util.GroovyCommonClassNames; import org.jetbrains.plugins.groovy.lang.resolve.ResolveUtil; import org.jetbrains.plugins.groovy.util.LightCacheKey; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import static com.intellij.psi.CommonClassNames.*; import static org.jetbrains.plugins.groovy.lang.lexer.GroovyTokenTypes.*; import static org.jetbrains.plugins.groovy.lang.parser.GroovyElementTypes.COMPOSITE_LSHIFT_SIGN; import static org.jetbrains.plugins.groovy.lang.parser.GroovyElementTypes.COMPOSITE_RSHIFT_SIGN; import static org.jetbrains.plugins.groovy.lang.psi.util.GroovyCommonClassNames.*; /** * @author ven */ public class TypesUtil { private static final LightCacheKey<Map<String, PsiClass>> PARENT_CACHE_KEY = LightCacheKey.create(); @NonNls public static final Map<String, PsiType> ourQNameToUnboxed = new HashMap<String, PsiType>(); public static final PsiPrimitiveType[] PRIMITIVES = new PsiPrimitiveType[]{ PsiType.BYTE, PsiType.CHAR, PsiType.DOUBLE, PsiType.FLOAT, PsiType.INT, PsiType.SHORT, PsiType.LONG, PsiType.BOOLEAN, PsiType.VOID }; private TypesUtil() { } @Nullable public static PsiType getNumericResultType(GrBinaryExpression binaryExpression) { PsiType lType = binaryExpression.getLeftOperand().getType(); final GrExpression rop = binaryExpression.getRightOperand(); PsiType rType = rop == null ? null : rop.getType(); if (lType == null || rType == null) return null; return getLeastUpperBoundForNumericType(lType, rType); } @Nullable private static PsiType getLeastUpperBoundForNumericType(@NotNull PsiType lType, @NotNull PsiType rType) { String lCanonical = lType.getCanonicalText(); String rCanonical = rType.getCanonicalText(); if (JAVA_LANG_FLOAT.equals(lCanonical)) lCanonical = JAVA_LANG_DOUBLE; if (JAVA_LANG_FLOAT.equals(rCanonical)) rCanonical = JAVA_LANG_DOUBLE; if (TYPE_TO_RANK.containsKey(lCanonical) && TYPE_TO_RANK.containsKey(rCanonical)) { return TYPE_TO_RANK.get(lCanonical) > TYPE_TO_RANK.get(rCanonical) ? lType : rType; } return null; } @NotNull public static GroovyResolveResult[] getOverloadedOperatorCandidates(@NotNull PsiType thisType, IElementType tokenType, @NotNull GroovyPsiElement place, PsiType[] argumentTypes) { return getOverloadedOperatorCandidates(thisType, tokenType, place, argumentTypes, false); } @NotNull public static GroovyResolveResult[] getOverloadedOperatorCandidates(@NotNull PsiType thisType, IElementType tokenType, @NotNull GroovyPsiElement place, PsiType[] argumentTypes, boolean incompleteCode) { return ResolveUtil.getMethodCandidates(thisType, ourOperationsToOperatorNames.get(tokenType), place, true, incompleteCode, false, argumentTypes); } public static GroovyResolveResult[] getOverloadedUnaryOperatorCandidates(@NotNull PsiType thisType, IElementType tokenType, @NotNull GroovyPsiElement place, PsiType[] argumentTypes) { return ResolveUtil.getMethodCandidates(thisType, ourUnaryOperationsToOperatorNames.get(tokenType), place, argumentTypes); } private static final Map<IElementType, String> ourPrimitiveTypesToClassNames = new HashMap<IElementType, String>(); private static final String NULL = "null"; static { ourPrimitiveTypesToClassNames.put(mSTRING_LITERAL, JAVA_LANG_STRING); ourPrimitiveTypesToClassNames.put(mGSTRING_LITERAL, JAVA_LANG_STRING); ourPrimitiveTypesToClassNames.put(mREGEX_LITERAL, JAVA_LANG_STRING); ourPrimitiveTypesToClassNames.put(mDOLLAR_SLASH_REGEX_LITERAL, JAVA_LANG_STRING); ourPrimitiveTypesToClassNames.put(mNUM_INT, JAVA_LANG_INTEGER); ourPrimitiveTypesToClassNames.put(mNUM_LONG, JAVA_LANG_LONG); ourPrimitiveTypesToClassNames.put(mNUM_FLOAT, JAVA_LANG_FLOAT); ourPrimitiveTypesToClassNames.put(mNUM_DOUBLE, JAVA_LANG_DOUBLE); ourPrimitiveTypesToClassNames.put(mNUM_BIG_INT, JAVA_MATH_BIG_INTEGER); ourPrimitiveTypesToClassNames.put(mNUM_BIG_DECIMAL, JAVA_MATH_BIG_DECIMAL); ourPrimitiveTypesToClassNames.put(kFALSE, JAVA_LANG_BOOLEAN); ourPrimitiveTypesToClassNames.put(kTRUE, JAVA_LANG_BOOLEAN); ourPrimitiveTypesToClassNames.put(kNULL, NULL); ourPrimitiveTypesToClassNames.put(kINT, JAVA_LANG_INTEGER); ourPrimitiveTypesToClassNames.put(kLONG, JAVA_LANG_LONG); ourPrimitiveTypesToClassNames.put(kFLOAT, JAVA_LANG_FLOAT); ourPrimitiveTypesToClassNames.put(kDOUBLE, JAVA_LANG_DOUBLE); ourPrimitiveTypesToClassNames.put(kBOOLEAN, JAVA_LANG_BOOLEAN); ourPrimitiveTypesToClassNames.put(kCHAR, JAVA_LANG_CHARACTER); ourPrimitiveTypesToClassNames.put(kBYTE, JAVA_LANG_BYTE); } private static final Map<IElementType, String> ourOperationsToOperatorNames = new HashMap<IElementType, String>(); private static final Map<IElementType, String> ourUnaryOperationsToOperatorNames = new HashMap<IElementType, String>(); static { ourOperationsToOperatorNames.put(mPLUS, "plus"); ourOperationsToOperatorNames.put(mMINUS, "minus"); ourOperationsToOperatorNames.put(mBAND, "and"); ourOperationsToOperatorNames.put(mBOR, "or"); ourOperationsToOperatorNames.put(mBXOR, "xor"); ourOperationsToOperatorNames.put(mDIV, "div"); ourOperationsToOperatorNames.put(mMOD, "mod"); ourOperationsToOperatorNames.put(mSTAR, "multiply"); ourOperationsToOperatorNames.put(kAS, "asType"); ourOperationsToOperatorNames.put(mCOMPARE_TO, "compareTo"); ourOperationsToOperatorNames.put(mGT, "compareTo"); ourOperationsToOperatorNames.put(mGE, "compareTo"); ourOperationsToOperatorNames.put(mLT, "compareTo"); ourOperationsToOperatorNames.put(mLE, "compareTo"); ourOperationsToOperatorNames.put(mSTAR_STAR, "power"); ourOperationsToOperatorNames.put(COMPOSITE_LSHIFT_SIGN, "leftShift"); ourOperationsToOperatorNames.put(COMPOSITE_RSHIFT_SIGN, "rightShift"); ourOperationsToOperatorNames.put(mEQUAL, "equals"); ourOperationsToOperatorNames.put(mNOT_EQUAL, "equals"); ourUnaryOperationsToOperatorNames.put(mLNOT, "asBoolean"); ourUnaryOperationsToOperatorNames.put(mPLUS, "positive"); ourUnaryOperationsToOperatorNames.put(mMINUS, "negative"); ourUnaryOperationsToOperatorNames.put(mDEC, "previous"); ourUnaryOperationsToOperatorNames.put(mINC, "next"); ourUnaryOperationsToOperatorNames.put(mBNOT, "bitwiseNegate"); } private static final TObjectIntHashMap<String> TYPE_TO_RANK = new TObjectIntHashMap<String>(); static { TYPE_TO_RANK.put(JAVA_LANG_BYTE, 1); TYPE_TO_RANK.put(JAVA_LANG_SHORT, 2); TYPE_TO_RANK.put(JAVA_LANG_CHARACTER, 2); TYPE_TO_RANK.put(JAVA_LANG_INTEGER, 3); TYPE_TO_RANK.put(JAVA_LANG_LONG, 4); TYPE_TO_RANK.put(JAVA_MATH_BIG_INTEGER, 5); TYPE_TO_RANK.put(JAVA_MATH_BIG_DECIMAL, 6); TYPE_TO_RANK.put(JAVA_LANG_FLOAT, 7); TYPE_TO_RANK.put(JAVA_LANG_DOUBLE, 8); TYPE_TO_RANK.put(JAVA_LANG_NUMBER, 9); } static { ourQNameToUnboxed.put(JAVA_LANG_BOOLEAN, PsiType.BOOLEAN); ourQNameToUnboxed.put(JAVA_LANG_BYTE, PsiType.BYTE); ourQNameToUnboxed.put(JAVA_LANG_CHARACTER, PsiType.CHAR); ourQNameToUnboxed.put(JAVA_LANG_SHORT, PsiType.SHORT); ourQNameToUnboxed.put(JAVA_LANG_INTEGER, PsiType.INT); ourQNameToUnboxed.put(JAVA_LANG_LONG, PsiType.LONG); ourQNameToUnboxed.put(JAVA_LANG_FLOAT, PsiType.FLOAT); ourQNameToUnboxed.put(JAVA_LANG_DOUBLE, PsiType.DOUBLE); } private static final TIntObjectHashMap<String> RANK_TO_TYPE = new TIntObjectHashMap<String>(); static { RANK_TO_TYPE.put(1, JAVA_LANG_INTEGER); RANK_TO_TYPE.put(2, JAVA_LANG_INTEGER); RANK_TO_TYPE.put(3, JAVA_LANG_INTEGER); RANK_TO_TYPE.put(4, JAVA_LANG_LONG); RANK_TO_TYPE.put(5, JAVA_MATH_BIG_INTEGER); RANK_TO_TYPE.put(6, JAVA_MATH_BIG_DECIMAL); RANK_TO_TYPE.put(7, JAVA_LANG_DOUBLE); RANK_TO_TYPE.put(8, JAVA_LANG_DOUBLE); RANK_TO_TYPE.put(9, JAVA_LANG_NUMBER); } public static boolean isAssignable(@Nullable PsiType lType, @Nullable PsiType rType, @NotNull PsiElement context) { if (lType == null || rType == null) { return false; } if (rType instanceof PsiIntersectionType) { for (PsiType child : ((PsiIntersectionType)rType).getConjuncts()) { if (isAssignable(lType, child, context)) { return true; } } return false; } if (lType instanceof PsiIntersectionType) { for (PsiType child : ((PsiIntersectionType)lType).getConjuncts()) { if (!isAssignable(child, rType, context)) { return false; } } return true; } if (isAssignableWithoutConversions(lType, rType, context)) { return true; } Boolean byConversionInMethodCall = isAssignableByConversion(lType, rType, context, true); if (byConversionInMethodCall != null) { return byConversionInMethodCall.booleanValue(); } if (lType instanceof PsiPrimitiveType && rType == PsiType.NULL) { //check it because now we will wrap primitive type. return false; } final PsiManager manager = context.getManager(); final GlobalSearchScope scope = context.getResolveScope(); lType = boxPrimitiveType(lType, manager, scope); rType = boxPrimitiveType(rType, manager, scope); if (lType.isAssignableFrom(rType)) { return true; } Boolean byConversion = isAssignableByConversion(lType, rType, context, false); if (byConversion != null) { return byConversion.booleanValue(); } return false; } public static boolean isAssignableByMethodCallConversion(@Nullable PsiType lType, @Nullable PsiType rType, @NotNull PsiElement context) { if (lType == null || rType == null) { return false; } if (rType instanceof PsiIntersectionType) { for (PsiType child : ((PsiIntersectionType)rType).getConjuncts()) { if (isAssignableByMethodCallConversion(lType, child, context)) { return true; } } return false; } if (lType instanceof PsiIntersectionType) { for (PsiType child : ((PsiIntersectionType)lType).getConjuncts()) { if (!isAssignableByMethodCallConversion(child, rType, context)) { return false; } } return true; } if (isAssignableWithoutConversions(lType, rType, context)) { return true; } Boolean byConversion = isAssignableByConversion(lType, rType, context, true); if (byConversion != null) { return byConversion.booleanValue(); } return false; } @Nullable private static Boolean isAssignableByConversion(@NotNull PsiType lType, @NotNull PsiType rType, @NotNull PsiElement context, boolean inMethodCall) { if (context instanceof GroovyPsiElement) { for (GrTypeConverter converter : GrTypeConverter.EP_NAME.getExtensions()) { if (inMethodCall == converter.isAllowedInMethodCall()) { final Boolean result = converter.isConvertible(lType, rType, (GroovyPsiElement)context); if (result != null) { return result; } } } } return null; } public static boolean isAssignableWithoutConversions(@Nullable PsiType lType, @Nullable PsiType rType, @NotNull PsiElement context) { if (lType == null || rType == null) return false; if (rType == PsiType.NULL) { return !(lType instanceof PsiPrimitiveType); } PsiManager manager = context.getManager(); GlobalSearchScope scope = context.getResolveScope(); if (rType instanceof GrTupleType && ((GrTupleType)rType).getComponentTypes().length == 0) { if (lType instanceof PsiArrayType || InheritanceUtil.isInheritor(lType, JAVA_UTIL_LIST) || InheritanceUtil.isInheritor(lType, JAVA_UTIL_SET)) { return true; } } if (isClassType(rType, GROOVY_LANG_GSTRING) && lType.equalsToText(JAVA_LANG_STRING)) { return true; } if (isNumericType(lType) && isNumericType(rType)) { lType = unboxPrimitiveTypeWrapper(lType); if (isClassType(lType, JAVA_MATH_BIG_DECIMAL)) lType = PsiType.DOUBLE; rType = unboxPrimitiveTypeWrapper(rType); if (isClassType(rType, JAVA_MATH_BIG_DECIMAL)) rType = PsiType.DOUBLE; } else { rType = boxPrimitiveType(rType, manager, scope); lType = boxPrimitiveType(lType, manager, scope); } if (rType instanceof GrMapType || rType instanceof GrTupleType) { Boolean result = isAssignableForNativeTypes(lType, (PsiClassType)rType, context); if (result != null && result.booleanValue()) return true; } if (TypeConversionUtil.isAssignable(lType, rType)) { return true; } return false; } @Nullable private static Boolean isAssignableForNativeTypes(@NotNull PsiType lType, @NotNull PsiClassType rType, @NotNull PsiElement context) { if (!(lType instanceof PsiClassType)) return null; final PsiClassType.ClassResolveResult leftResult = ((PsiClassType)lType).resolveGenerics(); final PsiClassType.ClassResolveResult rightResult = rType.resolveGenerics(); final PsiClass leftClass = leftResult.getElement(); PsiClass rightClass = rightResult.getElement(); if (rightClass == null || leftClass == null) return null; if (!InheritanceUtil.isInheritorOrSelf(rightClass, leftClass, true)) return Boolean.FALSE; PsiSubstitutor rightSubstitutor = rightResult.getSubstitutor(); if (!leftClass.hasTypeParameters()) return Boolean.TRUE; PsiSubstitutor leftSubstitutor = leftResult.getSubstitutor(); if (!leftClass.getManager().areElementsEquivalent(leftClass, rightClass)) { rightSubstitutor = TypeConversionUtil.getSuperClassSubstitutor(leftClass, rightClass, rightSubstitutor); rightClass = leftClass; } else if (!rightClass.hasTypeParameters()) return Boolean.TRUE; Iterator<PsiTypeParameter> li = PsiUtil.typeParametersIterator(leftClass); Iterator<PsiTypeParameter> ri = PsiUtil.typeParametersIterator(rightClass); while (li.hasNext()) { if (!ri.hasNext()) return Boolean.FALSE; PsiTypeParameter lp = li.next(); PsiTypeParameter rp = ri.next(); final PsiType typeLeft = leftSubstitutor.substitute(lp); if (typeLeft == null) continue; final PsiType typeRight = rightSubstitutor.substituteWithBoundsPromotion(rp); if (typeRight == null) { return Boolean.TRUE; } if (!isAssignableWithoutConversions(typeLeft, typeRight, context)) return Boolean.FALSE; } return Boolean.TRUE; } public static boolean isNumericType(@Nullable PsiType type) { if (type instanceof PsiClassType) { return TYPE_TO_RANK.contains(type.getCanonicalText()); } return type instanceof PsiPrimitiveType && TypeConversionUtil.isNumericType(type); } public static PsiType unboxPrimitiveTypeWrapperAndEraseGenerics(PsiType result) { return TypeConversionUtil.erasure(unboxPrimitiveTypeWrapper(result)); } public static PsiType unboxPrimitiveTypeWrapper(@Nullable PsiType type) { if (type instanceof PsiClassType) { final PsiClass psiClass = ((PsiClassType)type).resolve(); if (psiClass != null) { PsiType unboxed = ourQNameToUnboxed.get(psiClass.getQualifiedName()); if (unboxed != null) type = unboxed; } } return type; } public static PsiType boxPrimitiveType(@Nullable PsiType result, @NotNull PsiManager manager, @NotNull GlobalSearchScope resolveScope, boolean boxVoid) { if (result instanceof PsiPrimitiveType && (boxVoid || result != PsiType.VOID)) { PsiPrimitiveType primitive = (PsiPrimitiveType)result; String boxedTypeName = primitive.getBoxedTypeName(); if (boxedTypeName != null) { return GroovyPsiManager.getInstance(manager.getProject()).createTypeByFQClassName(boxedTypeName, resolveScope); } } return result; } public static PsiType boxPrimitiveType(@Nullable PsiType result, @NotNull PsiManager manager, @NotNull GlobalSearchScope resolveScope) { return boxPrimitiveType(result, manager, resolveScope, false); } @NotNull public static PsiClassType createType(String fqName, @NotNull PsiElement context) { return createTypeByFQClassName(fqName, context); } @NotNull public static PsiClassType getJavaLangObject(@NotNull PsiElement context) { return PsiType.getJavaLangObject(context.getManager(), context.getResolveScope()); } @Nullable public static PsiType getLeastUpperBoundNullable(@Nullable PsiType type1, @Nullable PsiType type2, @NotNull PsiManager manager) { if (type1 == null) return type2; if (type2 == null) return type1; return getLeastUpperBound(type1, type2, manager); } @Nullable public static PsiType getLeastUpperBoundNullable(@NotNull Iterable<PsiType> collection, @NotNull PsiManager manager) { Iterator<PsiType> iterator = collection.iterator(); if (!iterator.hasNext()) return null; PsiType result = iterator.next(); while (iterator.hasNext()) { result = getLeastUpperBoundNullable(result, iterator.next(), manager); } return result; } @Nullable public static PsiType getLeastUpperBound(@NotNull PsiType type1, @NotNull PsiType type2, PsiManager manager) { if (type1 instanceof GrTupleType && type2 instanceof GrTupleType) { GrTupleType tuple1 = (GrTupleType)type1; GrTupleType tuple2 = (GrTupleType)type2; PsiType[] components1 = tuple1.getComponentTypes(); PsiType[] components2 = tuple2.getComponentTypes(); if (components1.length == 0) return genNewListBy(type2, manager); if (components2.length == 0) return genNewListBy(type1, manager); PsiType[] components3 = new PsiType[Math.min(components1.length, components2.length)]; for (int i = 0; i < components3.length; i++) { PsiType c1 = components1[i]; PsiType c2 = components2[i]; if (c1 == null || c2 == null) { components3[i] = null; } else { components3[i] = getLeastUpperBound(c1, c2, manager); } } return new GrTupleType(components3, JavaPsiFacade.getInstance(manager.getProject()), tuple1.getScope().intersectWith(tuple2.getResolveScope())); } else if (checkEmptyListAndList(type1, type2)) { return genNewListBy(type2, manager); } else if (checkEmptyListAndList(type2, type1)) { return genNewListBy(type1, manager); } else if (type1 instanceof GrMapType && type2 instanceof GrMapType) { return GrMapType.merge(((GrMapType)type1), ((GrMapType)type2)); } else if (checkEmptyMapAndMap(type1, type2)) { return genNewMapBy(type2, manager); } else if (checkEmptyMapAndMap(type2, type1)) { return genNewMapBy(type1, manager); } else if (type1 instanceof GrClosureType && type2 instanceof GrClosureType) { GrClosureType clType1 = (GrClosureType)type1; GrClosureType clType2 = (GrClosureType)type2; GrSignature signature1 = clType1.getSignature(); GrSignature signature2 = clType2.getSignature(); if (signature1 instanceof GrClosureSignature && signature2 instanceof GrClosureSignature) { if (((GrClosureSignature)signature1).getParameterCount() == ((GrClosureSignature)signature2).getParameterCount()) { final GrClosureSignature signature = GrClosureSignatureImpl.getLeastUpperBound(((GrClosureSignature)signature1), ((GrClosureSignature)signature2), manager); if (signature != null) { GlobalSearchScope scope = clType1.getResolveScope().intersectWith(clType2.getResolveScope()); final LanguageLevel languageLevel = ComparatorUtil.max(clType1.getLanguageLevel(), clType2.getLanguageLevel()); return GrClosureType.create(signature, scope, JavaPsiFacade.getInstance(manager.getProject()), languageLevel, true); } } } } else if (GroovyCommonClassNames.GROOVY_LANG_GSTRING.equals(type1.getCanonicalText()) && CommonClassNames.JAVA_LANG_STRING.equals(type2.getInternalCanonicalText())) { return type2; } else if (GroovyCommonClassNames.GROOVY_LANG_GSTRING.equals(type2.getCanonicalText()) && CommonClassNames.JAVA_LANG_STRING.equals(type1.getInternalCanonicalText())) { return type1; } final PsiType result = getLeastUpperBoundForNumericType(type1, type2); if (result != null) return result; return GenericsUtil.getLeastUpperBound(type1, type2, manager); } private static boolean checkEmptyListAndList(PsiType type1, PsiType type2) { if (type1 instanceof GrTupleType) { PsiType[] types = ((GrTupleType)type1).getComponentTypes(); if (types.length == 0 && InheritanceUtil.isInheritor(type2, JAVA_UTIL_LIST)) return true; } return false; } private static PsiType genNewListBy(PsiType genericOwner, PsiManager manager) { PsiClass list = JavaPsiFacade.getInstance(manager.getProject()).findClass(JAVA_UTIL_LIST, genericOwner.getResolveScope()); PsiElementFactory factory = JavaPsiFacade.getElementFactory(manager.getProject()); if (list == null) return factory.createTypeFromText(JAVA_UTIL_LIST, null); return factory.createType(list, PsiUtil.extractIterableTypeParameter(genericOwner, false)); } private static boolean checkEmptyMapAndMap(PsiType type1, PsiType type2) { if (type1 instanceof GrMapType) { PsiType[] types = ((GrMapType)type1).getAllKeyTypes(); if (types.length == 0 && InheritanceUtil.isInheritor(type2, JAVA_UTIL_MAP)) return true; } return false; } private static PsiType genNewMapBy(PsiType genericOwner, PsiManager manager) { PsiClass map = JavaPsiFacade.getInstance(manager.getProject()).findClass(JAVA_UTIL_MAP, genericOwner.getResolveScope()); PsiElementFactory factory = JavaPsiFacade.getElementFactory(manager.getProject()); if (map == null) return factory.createTypeFromText(JAVA_UTIL_MAP, null); final PsiType key = PsiUtil.substituteTypeParameter(genericOwner, JAVA_UTIL_MAP, 0, false); final PsiType value = PsiUtil.substituteTypeParameter(genericOwner, JAVA_UTIL_MAP, 1, false); return factory.createType(map, key, value); } @Nullable public static PsiType getPsiType(PsiElement context, IElementType elemType) { if (elemType == kNULL) { return PsiType.NULL; } final String typeName = getBoxedTypeName(elemType); if (typeName != null) { return createTypeByFQClassName(typeName, context); } return null; } @Nullable public static String getBoxedTypeName(IElementType elemType) { return ourPrimitiveTypesToClassNames.get(elemType); } @NotNull public static PsiType getLeastUpperBound(PsiClass[] classes, PsiManager manager) { PsiElementFactory factory = JavaPsiFacade.getElementFactory(manager.getProject()); if (classes.length == 0) return factory.createTypeByFQClassName(JAVA_LANG_OBJECT); PsiType type = factory.createType(classes[0]); for (int i = 1; i < classes.length; i++) { PsiType t = getLeastUpperBound(type, factory.createType(classes[i]), manager); if (t != null) { type = t; } } return type; } public static boolean isClassType(@Nullable PsiType type, @NotNull String qName) { if (type instanceof PsiClassType) { final PsiClass psiClass = ((PsiClassType)type).resolve(); return psiClass != null && qName.equals(psiClass.getQualifiedName()); } return false; } public static PsiSubstitutor composeSubstitutors(PsiSubstitutor s1, PsiSubstitutor s2) { final Map<PsiTypeParameter, PsiType> map = s1.getSubstitutionMap(); Map<PsiTypeParameter, PsiType> result = new THashMap<PsiTypeParameter, PsiType>(map.size()); for (PsiTypeParameter parameter : map.keySet()) { result.put(parameter, s2.substitute(map.get(parameter))); } final Map<PsiTypeParameter, PsiType> map2 = s2.getSubstitutionMap(); for (PsiTypeParameter parameter : map2.keySet()) { if (!result.containsKey(parameter)) { result.put(parameter, map2.get(parameter)); } } return PsiSubstitutorImpl.createSubstitutor(result); } @NotNull public static PsiClassType createTypeByFQClassName(@NotNull String fqName, @NotNull PsiElement context) { return GroovyPsiManager.getInstance(context.getProject()).createTypeByFQClassName(fqName, context.getResolveScope()); } @Nullable public static PsiType createJavaLangClassType(@Nullable PsiType type, Project project, GlobalSearchScope resolveScope) { final JavaPsiFacade facade = JavaPsiFacade.getInstance(project); PsiType result = null; PsiClass javaLangClass = facade.findClass(JAVA_LANG_CLASS, resolveScope); if (javaLangClass != null) { PsiSubstitutor substitutor = PsiSubstitutor.EMPTY; final PsiTypeParameter[] typeParameters = javaLangClass.getTypeParameters(); if (typeParameters.length == 1) { substitutor = substitutor.put(typeParameters[0], type); } result = facade.getElementFactory().createType(javaLangClass, substitutor); } return result; } @NotNull public static PsiPrimitiveType getPrimitiveTypeByText(String typeText) { for (final PsiPrimitiveType primitive : PRIMITIVES) { if (PsiType.VOID.equals(primitive)) { return primitive; } if (primitive.getCanonicalText().equals(typeText)) { return primitive; } } assert false : "Unknown primitive type"; return null; } @NotNull public static PsiClassType createListType(@NotNull PsiClass elements) { JavaPsiFacade facade = JavaPsiFacade.getInstance(elements.getProject()); GlobalSearchScope resolveScope = elements.getResolveScope(); PsiClass listClass = facade.findClass(JAVA_UTIL_LIST, resolveScope); if (listClass == null) { return facade.getElementFactory().createTypeByFQClassName(JAVA_UTIL_LIST, resolveScope); } return facade.getElementFactory().createType(listClass, facade.getElementFactory().createType(elements)); } @NotNull public static PsiType createSetType(@NotNull PsiElement context, @NotNull PsiType type) { JavaPsiFacade facade = JavaPsiFacade.getInstance(context.getProject()); GlobalSearchScope resolveScope = context.getResolveScope(); PsiClass setClass = facade.findClass(JAVA_UTIL_SET, resolveScope); if (setClass != null && setClass.getTypeParameters().length == 1) { return facade.getElementFactory().createType(setClass, type); } return facade.getElementFactory().createTypeByFQClassName(JAVA_UTIL_SET, resolveScope); } public static boolean isAnnotatedCheckHierarchyWithCache(@NotNull PsiClass aClass, @NotNull String annotationFQN) { Map<String, PsiClass> classMap = getSuperClassesWithCache(aClass); for (PsiClass psiClass : classMap.values()) { PsiModifierList modifierList = psiClass.getModifierList(); if (modifierList != null) { if (modifierList.findAnnotation(annotationFQN) != null) { return true; } } } return false; } public static Map<String, PsiClass> getSuperClassesWithCache(@NotNull PsiClass aClass) { Map<String, PsiClass> superClassNames = PARENT_CACHE_KEY.getCachedValue(aClass); if (superClassNames == null) { Set<PsiClass> superClasses = new THashSet<PsiClass>(); superClasses.add(aClass); InheritanceUtil.getSuperClasses(aClass, superClasses, true); superClassNames = new LinkedHashMap<String, PsiClass>(); for (PsiClass superClass : superClasses) { superClassNames.put(superClass.getQualifiedName(), superClass); } superClassNames = PARENT_CACHE_KEY.putCachedValue(aClass, superClassNames); } return superClassNames; } @Nullable public static PsiType substituteBoxAndNormalizeType(@Nullable PsiType type, @NotNull PsiSubstitutor substitutor, @Nullable SpreadState state, @NotNull GrExpression expression) { if (type == null) return null; GlobalSearchScope resolveScope = expression.getResolveScope(); PsiManager manager = expression.getManager(); type = substitutor.substitute(type); type = boxPrimitiveType(type, manager, resolveScope); if (type == null) return null; type = PsiImplUtil.normalizeWildcardTypeByPosition(type, expression); type = SpreadState.apply(type, state, expression.getProject()); return type; } @Nullable public static PsiType getItemType(@Nullable PsiType containerType) { if (containerType == null) return null; if (containerType instanceof PsiArrayType) return ((PsiArrayType)containerType).getComponentType(); return PsiUtil.extractIterableTypeParameter(containerType, false); } @Nullable public static PsiClassType createSimilarCollection(@Nullable PsiType collection, Project project, PsiType... itemType) { if (InheritanceUtil.isInheritor(collection, "java.util.SortedSet")) { return createCollection(project, "java.util.SortedSet", itemType); } if (InheritanceUtil.isInheritor(collection, "java.util.LinkedHashSet")) { return createCollection(project, "java.util.LinkedHashSet", itemType); } if (InheritanceUtil.isInheritor(collection, CommonClassNames.JAVA_UTIL_SET)) { return createCollection(project, "java.util.HashSet", itemType); } if (InheritanceUtil.isInheritor(collection, "java.util.LinkedList")) { return createCollection(project, "java.util.LInkedList", itemType); } if (InheritanceUtil.isInheritor(collection, "java.util.Stack")) { return createCollection(project, "java.util.Stack", itemType); } if (InheritanceUtil.isInheritor(collection, "java.util.Vector")) { return createCollection(project, "java.util.Vector", itemType); } if (InheritanceUtil.isInheritor(collection, CommonClassNames.JAVA_UTIL_LIST)) { return createCollection(project, "java.util.ArrayList", itemType); } if (InheritanceUtil.isInheritor(collection, "java.util.Queue")) { return createCollection(project, "java.util.LinkedList", itemType); } return createCollection(project, "java.util.ArrayList", itemType); } @Nullable private static PsiClassType createCollection(Project project, String collectionName, PsiType... item) { PsiElementFactory factory = JavaPsiFacade.getElementFactory(project); PsiClass collection = JavaPsiFacade.getInstance(project).findClass(collectionName, GlobalSearchScope.allScope(project)); if (collection == null) return null; PsiTypeParameter[] parameters = collection.getTypeParameters(); if (parameters.length != 1) return null; return factory.createType(collection, item); } @Nullable public static PsiType inferAnnotationMemberValueType(GrAnnotationMemberValue value) { if (value instanceof GrExpression) { return ((GrExpression)value).getType(); } else if (value instanceof GrAnnotation) { final PsiElement resolved = ((GrAnnotation)value).getClassReference().resolve(); if (resolved instanceof PsiClass) { return JavaPsiFacade.getElementFactory(value.getProject()).createType((PsiClass)resolved, PsiSubstitutor.EMPTY); } return null; } else if (value instanceof GrAnnotationArrayInitializer) { return getTupleByAnnotationArrayInitializer((GrAnnotationArrayInitializer)value); } return null; } public static PsiType getTupleByAnnotationArrayInitializer(GrAnnotationArrayInitializer value) { final GrAnnotationMemberValue[] initializers = value.getInitializers(); PsiType[] types = ContainerUtil.map(initializers, new Function<GrAnnotationMemberValue, PsiType>() { @Override public PsiType fun(GrAnnotationMemberValue value) { return inferAnnotationMemberValueType(value); } }, new PsiType[initializers.length]); return new GrTupleType(types, JavaPsiFacade.getInstance(value.getProject()), value.getResolveScope()); } public static boolean resolvesTo(PsiType type, String fqn) { if (type instanceof PsiClassType) { final PsiClass resolved = ((PsiClassType)type).resolve(); return resolved != null && fqn.equals(resolved.getQualifiedName()); } return false; } @Nullable public static PsiType rawSecondGeneric(PsiType type, Project project) { if (!(type instanceof PsiClassType)) return null; final PsiClassType.ClassResolveResult result = ((PsiClassType)type).resolveGenerics(); final PsiClass element = result.getElement(); if (element == null) return null; final PsiType[] parameters = ((PsiClassType)type).getParameters(); boolean changed = false; for (int i = 0; i < parameters.length; i++) { PsiType parameter = parameters[i]; if (parameter == null) continue; final Ref<PsiType> newParam = new Ref<PsiType>(); parameter.accept(new PsiTypeVisitorEx<Object>() { @Nullable @Override public Object visitClassType(PsiClassType classType) { if (classType.getParameterCount() > 0) { newParam.set(classType.rawType()); } return null; } @Nullable @Override public Object visitCapturedWildcardType(PsiCapturedWildcardType capturedWildcardType) { newParam.set(capturedWildcardType.getWildcard().getBound()); return null; } @Nullable @Override public Object visitWildcardType(PsiWildcardType wildcardType) { newParam.set(wildcardType.getBound()); return null; } }); if (!newParam.isNull()) { changed = true; parameters[i] = newParam.get(); } } if (!changed) return null; return JavaPsiFacade.getElementFactory(project).createType(element, parameters); } public static boolean isPsiClassTypeToClosure(PsiType type) { if (!(type instanceof PsiClassType)) return false; final PsiClass psiClass = ((PsiClassType)type).resolve(); if (psiClass == null) return false; return GROOVY_LANG_CLOSURE.equals(psiClass.getQualifiedName()); } }
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.query2; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.devtools.build.lib.analysis.AliasProvider; import com.google.devtools.build.lib.analysis.TargetAndConfiguration; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.transitions.TransitionFactory; import com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.TargetParsingException; import com.google.devtools.build.lib.cmdline.TargetPattern; import com.google.devtools.build.lib.collect.compacthashset.CompactHashSet; import com.google.devtools.build.lib.concurrent.MultisetSemaphore; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.packages.AspectClass; import com.google.devtools.build.lib.packages.DependencyFilter; import com.google.devtools.build.lib.packages.NoSuchTargetException; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.pkgcache.FilteringPolicies; import com.google.devtools.build.lib.pkgcache.PackageManager; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.query2.common.AbstractBlazeQueryEnvironment; import com.google.devtools.build.lib.query2.engine.KeyExtractor; import com.google.devtools.build.lib.query2.engine.MinDepthUniquifier; import com.google.devtools.build.lib.query2.engine.QueryEnvironment; import com.google.devtools.build.lib.query2.engine.QueryEvalResult; import com.google.devtools.build.lib.query2.engine.QueryException; import com.google.devtools.build.lib.query2.engine.QueryExpression; import com.google.devtools.build.lib.query2.engine.QueryExpressionContext; import com.google.devtools.build.lib.query2.engine.QueryUtil.MinDepthUniquifierImpl; import com.google.devtools.build.lib.query2.engine.QueryUtil.MutableKeyExtractorBackedMapImpl; import com.google.devtools.build.lib.query2.engine.QueryUtil.UniquifierImpl; import com.google.devtools.build.lib.query2.engine.ThreadSafeOutputFormatterCallback; import com.google.devtools.build.lib.query2.engine.Uniquifier; import com.google.devtools.build.lib.rules.AliasConfiguredTarget; import com.google.devtools.build.lib.server.FailureDetails.ConfigurableQuery; import com.google.devtools.build.lib.skyframe.AspectValueKey.AspectKey; import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey; import com.google.devtools.build.lib.skyframe.ConfiguredTargetValue; import com.google.devtools.build.lib.skyframe.GraphBackedRecursivePackageProvider; import com.google.devtools.build.lib.skyframe.IgnoredPackagePrefixesValue; import com.google.devtools.build.lib.skyframe.PackageValue; import com.google.devtools.build.lib.skyframe.RecursivePackageProviderBackedTargetPatternResolver; import com.google.devtools.build.lib.skyframe.RecursivePkgValueRootPackageExtractor; import com.google.devtools.build.lib.skyframe.SkyFunctions; import com.google.devtools.build.lib.skyframe.SkyframeExecutor; import com.google.devtools.build.lib.skyframe.TargetPatternValue; import com.google.devtools.build.lib.skyframe.TargetPatternValue.TargetPatternKey; import com.google.devtools.build.lib.supplier.InterruptibleSupplier; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.WalkableGraph; import java.io.IOException; import java.io.OutputStream; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import javax.annotation.Nullable; /** * {@link QueryEnvironment} that runs queries based on results from the analysis phase. * * <p>This environment can theoretically be used for multiple queries, but currently is only ever * used for one over the course of its lifetime. If this ever changed to be used for multiple, the * {@link TargetAccessor} field should be initialized on a per-query basis not a per-environment * basis. * * <p>Aspects are followed if {@link * com.google.devtools.build.lib.query2.common.CommonQueryOptions#useAspects} is on. */ public abstract class PostAnalysisQueryEnvironment<T> extends AbstractBlazeQueryEnvironment<T> { protected final TopLevelConfigurations topLevelConfigurations; protected final BuildConfiguration hostConfiguration; private final PathFragment parserPrefix; private final PathPackageLocator pkgPath; private final Supplier<WalkableGraph> walkableGraphSupplier; protected WalkableGraph graph; private static final Function<SkyKey, ConfiguredTargetKey> SKYKEY_TO_CTKEY = skyKey -> (ConfiguredTargetKey) skyKey.argument(); private static final ImmutableList<TargetPattern> ALL_PATTERNS = ImmutableList.of(TargetPattern.defaultParser().parseConstantUnchecked("//...")); protected RecursivePackageProviderBackedTargetPatternResolver resolver; public PostAnalysisQueryEnvironment( boolean keepGoing, ExtendedEventHandler eventHandler, Iterable<QueryFunction> extraFunctions, TopLevelConfigurations topLevelConfigurations, BuildConfiguration hostConfiguration, PathFragment parserPrefix, PathPackageLocator pkgPath, Supplier<WalkableGraph> walkableGraphSupplier, Set<Setting> settings) { super(keepGoing, true, Rule.ALL_LABELS, eventHandler, settings, extraFunctions); this.topLevelConfigurations = topLevelConfigurations; this.hostConfiguration = hostConfiguration; this.parserPrefix = parserPrefix; this.pkgPath = pkgPath; this.walkableGraphSupplier = walkableGraphSupplier; } public abstract ImmutableList<NamedThreadSafeOutputFormatterCallback<T>> getDefaultOutputFormatters( TargetAccessor<T> accessor, ExtendedEventHandler eventHandler, OutputStream outputStream, SkyframeExecutor skyframeExecutor, BuildConfiguration hostConfiguration, @Nullable TransitionFactory<Rule> trimmingTransitionFactory, PackageManager packageManager) throws QueryException, InterruptedException; public abstract String getOutputFormat(); protected abstract KeyExtractor<T, ConfiguredTargetKey> getConfiguredTargetKeyExtractor(); @Override public QueryEvalResult evaluateQuery( QueryExpression expr, ThreadSafeOutputFormatterCallback<T> callback) throws QueryException, InterruptedException, IOException { beforeEvaluateQuery(); return super.evaluateQuery(expr, callback); } private void beforeEvaluateQuery() throws QueryException { graph = walkableGraphSupplier.get(); GraphBackedRecursivePackageProvider graphBackedRecursivePackageProvider = new GraphBackedRecursivePackageProvider( graph, ALL_PATTERNS, pkgPath, new RecursivePkgValueRootPackageExtractor()); resolver = new RecursivePackageProviderBackedTargetPatternResolver( graphBackedRecursivePackageProvider, eventHandler, FilteringPolicies.NO_FILTER, MultisetSemaphore.unbounded()); checkSettings(settings); } // Check to make sure the settings requested are currently supported by this class private void checkSettings(Set<Setting> settings) throws QueryException { if (settings.contains(Setting.NO_NODEP_DEPS) || settings.contains(Setting.TESTS_EXPRESSION_STRICT)) { settings = Sets.difference( settings, ImmutableSet.of(Setting.ONLY_TARGET_DEPS, Setting.NO_IMPLICIT_DEPS)); throw new QueryException( String.format( "The following filter(s) are not currently supported by configured query: %s", settings), ConfigurableQuery.Code.FILTERS_NOT_SUPPORTED); } } public BuildConfiguration getHostConfiguration() { return hostConfiguration; } // TODO(bazel-team): It's weird that this untemplated function exists. Fix? Or don't implement? @Override public Target getTarget(Label label) throws TargetNotFoundException, InterruptedException { try { return ((PackageValue) walkableGraphSupplier.get().getValue(PackageValue.key(label.getPackageIdentifier()))) .getPackage() .getTarget(label.getName()); } catch (NoSuchTargetException e) { throw new TargetNotFoundException(e, e.getDetailedExitCode()); } } @Override public T getOrCreate(T target) { return target; } /** * This method has to exist because {@link AliasConfiguredTarget#getLabel()} returns the label of * the "actual" target instead of the alias target. Grr. */ public abstract Label getCorrectLabel(T target); @Nullable protected abstract T getHostConfiguredTarget(Label label) throws InterruptedException; @Nullable protected abstract T getTargetConfiguredTarget(Label label) throws InterruptedException; @Nullable protected abstract T getNullConfiguredTarget(Label label) throws InterruptedException; @Nullable public ConfiguredTargetValue getConfiguredTargetValue(SkyKey key) throws InterruptedException { return (ConfiguredTargetValue) walkableGraphSupplier.get().getValue(key); } private boolean isAliasConfiguredTarget(ConfiguredTargetKey key) throws InterruptedException { return getConfiguredTargetValue(key).getConfiguredTarget().getProvider(AliasProvider.class) != null; } public InterruptibleSupplier<ImmutableSet<PathFragment>> getIgnoredPackagePrefixesPathFragments() { return () -> { IgnoredPackagePrefixesValue ignoredPackagePrefixesValue = (IgnoredPackagePrefixesValue) walkableGraphSupplier.get().getValue(IgnoredPackagePrefixesValue.key()); return ignoredPackagePrefixesValue == null ? ImmutableSet.of() : ignoredPackagePrefixesValue.getPatterns(); }; } @Nullable protected abstract T getValueFromKey(SkyKey key) throws InterruptedException; protected TargetPattern getPattern(String pattern) throws TargetParsingException { TargetPatternKey targetPatternKey = ((TargetPatternKey) TargetPatternValue.key(pattern, FilteringPolicies.NO_FILTER, parserPrefix).argument()); return targetPatternKey.getParsedPattern(); } public ThreadSafeMutableSet<T> getFwdDeps(Iterable<T> targets) throws InterruptedException { Map<SkyKey, T> targetsByKey = Maps.newHashMapWithExpectedSize(Iterables.size(targets)); for (T target : targets) { targetsByKey.put(getSkyKey(target), target); } Map<SkyKey, ImmutableList<ClassifiedDependency<T>>> directDeps = targetifyValues(targetsByKey, graph.getDirectDeps(targetsByKey.keySet())); if (targetsByKey.size() != directDeps.size()) { Iterable<ConfiguredTargetKey> missingTargets = Sets.difference(targetsByKey.keySet(), directDeps.keySet()).stream() .map(SKYKEY_TO_CTKEY) .collect(Collectors.toList()); eventHandler.handle(Event.warn("Targets were missing from graph: " + missingTargets)); } ThreadSafeMutableSet<T> result = createThreadSafeMutableSet(); for (Map.Entry<SkyKey, ImmutableList<ClassifiedDependency<T>>> entry : directDeps.entrySet()) { result.addAll(filterFwdDeps(targetsByKey.get(entry.getKey()), entry.getValue())); } return result; } @Override public ThreadSafeMutableSet<T> getFwdDeps(Iterable<T> targets, QueryExpressionContext<T> context) throws InterruptedException { return getFwdDeps(targets); } private ImmutableList<T> filterFwdDeps( T configTarget, ImmutableList<ClassifiedDependency<T>> rawFwdDeps) { if (settings.isEmpty()) { return getDependencies(rawFwdDeps); } return getAllowedDeps(configTarget, rawFwdDeps); } @Override public Collection<T> getReverseDeps(Iterable<T> targets, QueryExpressionContext<T> context) throws InterruptedException { Map<SkyKey, T> targetsByKey = Maps.newHashMapWithExpectedSize(Iterables.size(targets)); for (T target : targets) { targetsByKey.put(getSkyKey(target), target); } Map<SkyKey, ImmutableList<ClassifiedDependency<T>>> reverseDepsByKey = targetifyValues(targetsByKey, graph.getReverseDeps(targetsByKey.keySet())); if (targetsByKey.size() != reverseDepsByKey.size()) { Iterable<ConfiguredTargetKey> missingTargets = Sets.difference(targetsByKey.keySet(), reverseDepsByKey.keySet()).stream() .map(SKYKEY_TO_CTKEY) .collect(Collectors.toList()); eventHandler.handle(Event.warn("Targets were missing from graph: " + missingTargets)); } Map<T, ImmutableList<ClassifiedDependency<T>>> reverseDepsByCT = new HashMap<>(); for (Map.Entry<SkyKey, ImmutableList<ClassifiedDependency<T>>> entry : reverseDepsByKey.entrySet()) { reverseDepsByCT.put(targetsByKey.get(entry.getKey()), entry.getValue()); } return reverseDepsByCT.isEmpty() ? Collections.emptyList() : filterReverseDeps(reverseDepsByCT); } private Collection<T> filterReverseDeps( Map<T, ImmutableList<ClassifiedDependency<T>>> rawReverseDeps) { Set<T> result = CompactHashSet.create(); for (Map.Entry<T, ImmutableList<ClassifiedDependency<T>>> targetAndRdeps : rawReverseDeps.entrySet()) { ImmutableList.Builder<ClassifiedDependency<T>> ruleDeps = ImmutableList.builder(); for (ClassifiedDependency<T> parent : targetAndRdeps.getValue()) { T dependency = parent.dependency; if (parent.dependency instanceof RuleConfiguredTarget && dependencyFilter != DependencyFilter.ALL_DEPS) { ruleDeps.add(parent); } else { result.add(dependency); } } result.addAll(getAllowedDeps(targetAndRdeps.getKey(), ruleDeps.build())); } return result; } /** * @param target source target * @param deps next level of deps to filter */ private ImmutableList<T> getAllowedDeps(T target, Collection<ClassifiedDependency<T>> deps) { // It's possible to query on a target that's configured in the host configuration. In those // cases if --notool_deps is turned on, we only allow reachable targets that are ALSO in the // host config. This is somewhat counterintuitive and subject to change in the future but seems // like the best option right now. if (settings.contains(Setting.ONLY_TARGET_DEPS)) { BuildConfiguration currentConfig = getConfiguration(target); if (currentConfig != null && currentConfig.isToolConfiguration()) { deps = deps.stream() .filter( dep -> getConfiguration(dep.dependency) != null && getConfiguration(dep.dependency).isToolConfiguration()) .collect(Collectors.toList()); } else { deps = deps.stream() .filter( dep -> // We include source files, which have null configuration, even though // they can also appear on host-configured attributes like genrule#tools. // While this may not be strictly correct, it's better to overapproximate // than underapproximate the results. getConfiguration(dep.dependency) == null || !getConfiguration(dep.dependency).isToolConfiguration()) .collect(Collectors.toList()); } } if (settings.contains(Setting.NO_IMPLICIT_DEPS)) { RuleConfiguredTarget ruleConfiguredTarget = getRuleConfiguredTarget(target); if (ruleConfiguredTarget != null) { deps = deps.stream().filter(dep -> !dep.implicit).collect(Collectors.toList()); } } return getDependencies(deps); } protected abstract RuleConfiguredTarget getRuleConfiguredTarget(T target); /** * Returns targetified dependencies wrapped as {@link ClassifiedDependency} objects which include * information on if the target is an implicit or explicit dependency. * * <p>A target may have toolchain dependencies and aspects attached to its deps that declare their * own dependencies through private attributes. All of these are considered implicit dependencies * of the target. * * @param parent Parent target that knows about its attribute-attached implicit deps. If this is * null, that is a signal from the caller that all dependencies should be considered implicit. * @param dependencies dependencies to targetify * @param knownCtDeps the keys of configured target deps already added to the deps list. Outside * callers should pass an empty set. This is used for recursive calls to prevent aspect and * toolchain deps from duplicating the target's direct deps. * @param resolvedAspectClasses aspect classes that have already been examined for dependencies. * Aspects can add dependencies through privately declared label-based attributes. Aspects may * also propagate down the target's deps. So if an aspect of type C is attached to target T * that depends on U and V, the aspect may depend on more type C aspects attached to U and V * that themselves depend on type C aspects attached to U and V's deps and so on. Since C * defines the aspect's deps, all of those aspect instances have the same deps, which makes * examinining each of them down T's transitive deps very wasteful. This parameter lets us * avoid that redundancy. */ private ImmutableList<ClassifiedDependency<T>> targetifyValues( @Nullable T parent, Iterable<SkyKey> dependencies, Set<SkyKey> knownCtDeps, Set<AspectClass> resolvedAspectClasses) throws InterruptedException { Collection<ConfiguredTargetKey> implicitDeps = null; if (parent != null) { RuleConfiguredTarget ruleConfiguredTarget = getRuleConfiguredTarget(parent); if (ruleConfiguredTarget != null) { implicitDeps = ruleConfiguredTarget.getImplicitDeps(); } } ImmutableList.Builder<ClassifiedDependency<T>> values = ImmutableList.builder(); // TODO(bazel-team): An even better approach would be to treat aspects and toolchains as // first-class query nodes just like targets. In other words, let query expressions reference // them (they also have identifying labels) and make the graph connections between targets, // aspects, and toolchains explicit. That would permit more detailed queries and eliminate the // per-key-type special casing below. The challenge is to generalize all query code that // currently assumes its inputs are configured targets. Toolchains may have additional caveats: // see b/148550864. for (SkyKey key : dependencies) { if (knownCtDeps.contains(key)) { continue; } if (key.functionName().equals(SkyFunctions.CONFIGURED_TARGET)) { T dependency = getValueFromKey(key); Preconditions.checkState( dependency != null, "query-requested node '%s' was unavailable in the query environment graph. If you" + " come across this error, please ping b/150301500 or contact the blaze" + " configurability team.", key); boolean implicit = implicitDeps == null || implicitDeps.contains( ConfiguredTargetKey.builder() .setLabel(getCorrectLabel(dependency)) .setConfiguration(getConfiguration(dependency)) .build()); values.add(new ClassifiedDependency<>(dependency, implicit)); knownCtDeps.add(key); } else if (settings.contains(Setting.INCLUDE_ASPECTS) && key.functionName().equals(SkyFunctions.ASPECT) && !resolvedAspectClasses.contains(((AspectKey) key).getAspectClass())) { // When an aspect is attached to an alias configured target, it bypasses standard dependency // resolution and just Skyframe-loads the same aspect for the alias' referent. That means // the original aspect's attribute deps aren't Skyframe-resolved through AspectFunction's // usual call to ConfiguredTargetFunction.computeDependencies, so graph.getDirectDeps() // won't include them. So we defer "resolving" the aspect class to the non-alias version, // which properly reflects all dependencies. See AspectFunction for details. if (!isAliasConfiguredTarget(((AspectKey) key).getBaseConfiguredTargetKey())) { // Make sure we don't examine aspects of this type again. This saves us from unnecessarily // traversing a target's transitive deps because it propagates an aspect down those deps. // The deps added by the aspect are a function of the aspect's class, not the target it's // attached to. And they can't be configured because aspects have no UI for overriding // attribute defaults. So it's sufficient to examine only a single instance of a given // aspect class. This has real memory and performance consequences: see b/163052263. // Note the aspect could attach *another* aspect type to its deps. That will still get // examined through the recursive call. resolvedAspectClasses.add(((AspectKey) key).getAspectClass()); } values.addAll( targetifyValues(null, graph.getDirectDeps(key), knownCtDeps, resolvedAspectClasses)); } else if (key.functionName().equals(SkyFunctions.TOOLCHAIN_RESOLUTION)) { values.addAll( targetifyValues(null, graph.getDirectDeps(key), knownCtDeps, resolvedAspectClasses)); } } return values.build(); } private Map<SkyKey, ImmutableList<ClassifiedDependency<T>>> targetifyValues( Map<SkyKey, T> fromTargetsByKey, Map<SkyKey, ? extends Iterable<SkyKey>> input) throws InterruptedException { Map<SkyKey, ImmutableList<ClassifiedDependency<T>>> result = new HashMap<>(); for (Map.Entry<SkyKey, ? extends Iterable<SkyKey>> entry : input.entrySet()) { SkyKey fromKey = entry.getKey(); result.put( fromKey, targetifyValues( fromTargetsByKey.get(fromKey), entry.getValue(), /*knownCtDeps=*/ new HashSet<>(), /*resolvedAspectClasses=*/ new HashSet<>())); } return result; } /** A class to store a dependency with some information. */ private static class ClassifiedDependency<T> { // True if this dependency is attached implicitly. boolean implicit; T dependency; private ClassifiedDependency(T dependency, boolean implicit) { this.implicit = implicit; this.dependency = dependency; } } private static <T> ImmutableList<T> getDependencies( Collection<ClassifiedDependency<T>> classifiedDependencies) { return classifiedDependencies.stream() .map(dep -> dep.dependency) .collect(ImmutableList.toImmutableList()); } @Nullable protected abstract BuildConfiguration getConfiguration(T target); protected abstract ConfiguredTargetKey getSkyKey(T target); @Override public ThreadSafeMutableSet<T> getTransitiveClosure( ThreadSafeMutableSet<T> targets, QueryExpressionContext<T> context) throws InterruptedException { return SkyQueryUtils.getTransitiveClosure( targets, targets1 -> getFwdDeps(targets1, context), createThreadSafeMutableSet()); } @Override public void buildTransitiveClosure( QueryExpression caller, ThreadSafeMutableSet<T> targetNodes, int maxDepth) { // TODO(bazel-team): implement this. Just needed for error-checking. } @Override public ImmutableList<T> getNodesOnPath(T from, T to, QueryExpressionContext<T> context) throws InterruptedException { return SkyQueryUtils.getNodesOnPath( from, to, targets -> getFwdDeps(targets, context), getConfiguredTargetKeyExtractor()::extractKey); } @Override public <V> MutableMap<T, V> createMutableMap() { return new MutableKeyExtractorBackedMapImpl<>(getConfiguredTargetKeyExtractor()); } @Override public Uniquifier<T> createUniquifier() { return new UniquifierImpl<>(getConfiguredTargetKeyExtractor()); } @Override public MinDepthUniquifier<T> createMinDepthUniquifier() { return new MinDepthUniquifierImpl<>( getConfiguredTargetKeyExtractor(), SkyQueryEnvironment.DEFAULT_THREAD_COUNT); } /** Target patterns are resolved on the fly so no pre-work to be done here. */ @Override protected void preloadOrThrow(QueryExpression caller, Collection<String> patterns) {} @Override public ThreadSafeMutableSet<T> getBuildFiles( QueryExpression caller, ThreadSafeMutableSet<T> nodes, boolean buildFiles, boolean loads, QueryExpressionContext<T> context) throws QueryException { throw new QueryException( "buildfiles() doesn't make sense for the configured target graph", ConfigurableQuery.Code.BUILDFILES_FUNCTION_NOT_SUPPORTED); } @Override public Collection<T> getSiblingTargetsInPackage(T target) throws QueryException { throw new QueryException( "siblings() not supported for post analysis queries", ConfigurableQuery.Code.SIBLINGS_FUNCTION_NOT_SUPPORTED); } @Override public void close() {} /** A wrapper class for the set of top-level configurations in a query. */ public static class TopLevelConfigurations { /** A map of non-null configured top-level targets sorted by configuration checksum. */ private final ImmutableMap<Label, BuildConfiguration> nonNulls; /** * {@code nonNulls} may often have many duplicate values in its value set so we store a sorted * set of all the non-null configurations here. */ private final ImmutableSortedSet<BuildConfiguration> nonNullConfigs; /** A list of null configured top-level targets. */ private final ImmutableList<Label> nulls; public TopLevelConfigurations( Collection<TargetAndConfiguration> topLevelTargetsAndConfigurations) { ImmutableMap.Builder<Label, BuildConfiguration> nonNullsBuilder = ImmutableMap.builderWithExpectedSize(topLevelTargetsAndConfigurations.size()); ImmutableList.Builder<Label> nullsBuilder = new ImmutableList.Builder<>(); for (TargetAndConfiguration targetAndConfiguration : topLevelTargetsAndConfigurations) { if (targetAndConfiguration.getConfiguration() == null) { nullsBuilder.add(targetAndConfiguration.getLabel()); } else { nonNullsBuilder.put( targetAndConfiguration.getLabel(), targetAndConfiguration.getConfiguration()); } } nonNulls = nonNullsBuilder.build(); nonNullConfigs = ImmutableSortedSet.copyOf( Comparator.comparing(BuildConfiguration::checksum), nonNulls.values()); nulls = nullsBuilder.build(); } public boolean isTopLevelTarget(Label label) { return nonNulls.containsKey(label) || nulls.contains(label); } // This method returns the configuration of a top-level target if it's not null-configured and // otherwise returns null (signifying it is null configured). @Nullable public BuildConfiguration getConfigurationForTopLevelTarget(Label label) { Preconditions.checkArgument( isTopLevelTarget(label), "Attempting to get top-level configuration for non-top-level target %s.", label); return nonNulls.get(label); } public Iterable<BuildConfiguration> getConfigurations() { if (nulls.isEmpty()) { return nonNullConfigs; } else { return Iterables.concat(nonNullConfigs, Collections.singletonList(null)); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.configuration2.tree; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; /** * <p> * A class representing the various symbols that are supported in keys * recognized by {@link DefaultExpressionEngine}. * </p> * <p> * An instance of this class is associated with each instance of * {@code DefaultExpressionEngine}. It determines which concrete symbols are * used to define elements like separators, attributes, etc. within a * configuration key. * </p> * <p> * Instances are created using the nested {@code Builder} class. They are * immutable and can be shared between arbitrary components. * </p> * * @version $Id$ * @since 2.0 */ public final class DefaultExpressionEngineSymbols { /** Constant for the default property delimiter. */ public static final String DEFAULT_PROPERTY_DELIMITER = "."; /** Constant for the default escaped property delimiter. */ public static final String DEFAULT_ESCAPED_DELIMITER = DEFAULT_PROPERTY_DELIMITER + DEFAULT_PROPERTY_DELIMITER; /** Constant for the default attribute start marker. */ public static final String DEFAULT_ATTRIBUTE_START = "[@"; /** Constant for the default attribute end marker. */ public static final String DEFAULT_ATTRIBUTE_END = "]"; /** Constant for the default index start marker. */ public static final String DEFAULT_INDEX_START = "("; /** Constant for the default index end marker. */ public static final String DEFAULT_INDEX_END = ")"; /** * An instance with default symbols. This instance is used by the default * instance of {@code DefaultExpressionEngine}. */ public static final DefaultExpressionEngineSymbols DEFAULT_SYMBOLS = createDefaultSmybols(); /** Stores the property delimiter. */ private final String propertyDelimiter; /** Stores the escaped property delimiter. */ private final String escapedDelimiter; /** Stores the attribute start marker. */ private final String attributeStart; /** Stores the attribute end marker. */ private final String attributeEnd; /** Stores the index start marker. */ private final String indexStart; /** stores the index end marker. */ private final String indexEnd; /** * Creates a new instance of {@code DefaultExpressionEngineSymbols}. * * @param b the builder for defining the properties of this instance */ private DefaultExpressionEngineSymbols(Builder b) { propertyDelimiter = b.propertyDelimiter; escapedDelimiter = b.escapedDelimiter; indexStart = b.indexStart; indexEnd = b.indexEnd; attributeStart = b.attributeStart; attributeEnd = b.attributeEnd; } /** * Returns the string used as delimiter in property keys. * * @return the property delimiter */ public String getPropertyDelimiter() { return propertyDelimiter; } /** * Returns the string representing an escaped property delimiter. * * @return the escaped property delimiter */ public String getEscapedDelimiter() { return escapedDelimiter; } /** * Returns the string representing an attribute start marker. * * @return the attribute start marker */ public String getAttributeStart() { return attributeStart; } /** * Returns the string representing an attribute end marker. * * @return the attribute end marker */ public String getAttributeEnd() { return attributeEnd; } /** * Returns the string representing the start of an index in a property key. * * @return the index start marker */ public String getIndexStart() { return indexStart; } /** * Returns the string representing the end of an index in a property key. * * @return the index end marker */ public String getIndexEnd() { return indexEnd; } /** * Returns a hash code for this object. * * @return a hash code */ @Override public int hashCode() { return new HashCodeBuilder().append(getPropertyDelimiter()) .append(getEscapedDelimiter()).append(getIndexStart()) .append(getIndexEnd()).append(getAttributeStart()) .append(getAttributeEnd()).toHashCode(); } /** * Compares this object with another one. Two instances of * {@code DefaultExpressionEngineSymbols} are considered equal if all of * their properties are equal. * * @param obj the object to compare to * @return a flag whether these objects are equal */ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof DefaultExpressionEngineSymbols)) { return false; } DefaultExpressionEngineSymbols c = (DefaultExpressionEngineSymbols) obj; return new EqualsBuilder() .append(getPropertyDelimiter(), c.getPropertyDelimiter()) .append(getEscapedDelimiter(), c.getEscapedDelimiter()) .append(getIndexStart(), c.getIndexStart()) .append(getIndexEnd(), c.getIndexEnd()) .append(getAttributeStart(), c.getAttributeStart()) .append(getAttributeEnd(), c.getAttributeEnd()).isEquals(); } /** * Returns a string representation for this object. This string contains the * values of all properties. * * @return a string for this object */ @Override public String toString() { return new ToStringBuilder(this) .append("propertyDelimiter", getPropertyDelimiter()) .append("escapedDelimiter", getEscapedDelimiter()) .append("indexStart", getIndexStart()) .append("indexEnd", getIndexEnd()) .append("attributeStart", getAttributeStart()) .append("attributeEnd", getAttributeEnd()).toString(); } /** * Creates the {@code DefaultExpressionEngineSymbols} object with default * symbols. * * @return the default symbols instance */ private static DefaultExpressionEngineSymbols createDefaultSmybols() { return new Builder().setPropertyDelimiter(DEFAULT_PROPERTY_DELIMITER) .setEscapedDelimiter(DEFAULT_ESCAPED_DELIMITER) .setIndexStart(DEFAULT_INDEX_START) .setIndexEnd(DEFAULT_INDEX_END) .setAttributeStart(DEFAULT_ATTRIBUTE_START) .setAttributeEnd(DEFAULT_ATTRIBUTE_END).create(); } /** * A builder class for creating instances of * {@code DefaultExpressionEngineSymbols}. */ public static class Builder { /** Stores the property delimiter. */ private String propertyDelimiter; /** Stores the escaped property delimiter. */ private String escapedDelimiter; /** Stores the attribute start marker. */ private String attributeStart; /** Stores the attribute end marker. */ private String attributeEnd; /** Stores the index start marker. */ private String indexStart; /** stores the index end marker. */ private String indexEnd; /** * Creates a new, uninitialized instance of {@code Builder}. All symbols * are undefined. */ public Builder() { } /** * Creates a new instance of {@code Builder} whose properties are * initialized from the passed in {@code DefaultExpressionEngineSymbols} * object. This is useful if symbols are to be created which are similar * to the passed in instance. * * @param c the {@code DefaultExpressionEngineSymbols} object serving as * starting point for this builder */ public Builder(DefaultExpressionEngineSymbols c) { propertyDelimiter = c.getPropertyDelimiter(); escapedDelimiter = c.getEscapedDelimiter(); indexStart = c.getIndexStart(); indexEnd = c.getIndexEnd(); attributeStart = c.getAttributeStart(); attributeEnd = c.getAttributeEnd(); } /** * Sets the string representing a delimiter for properties. * * @param d the property delimiter * @return a reference to this object for method chaining */ public Builder setPropertyDelimiter(String d) { propertyDelimiter = d; return this; } /** * Sets the string representing an escaped property delimiter. With this * string a delimiter that belongs to the key of a property can be * escaped. If for instance &quot;.&quot; is used as property delimiter, * you can set the escaped delimiter to &quot;\.&quot; and can then * escape the delimiter with a back slash. * * @param ed the escaped property delimiter * @return a reference to this object for method chaining */ public Builder setEscapedDelimiter(String ed) { escapedDelimiter = ed; return this; } /** * Sets the string representing the start of an index in a property key. * Index start and end marker are used together to detect indices in a * property key. * * @param is the index start * @return a reference to this object for method chaining */ public Builder setIndexStart(String is) { indexStart = is; return this; } /** * Sets the string representing the end of an index in a property key. * * @param ie the index end * @return a reference to this object for method chaining */ public Builder setIndexEnd(String ie) { indexEnd = ie; return this; } /** * Sets the string representing the start marker of an attribute in a * property key. Attribute start and end marker are used together to * detect attributes in a property key. * * @param as the attribute start marker * @return a reference to this object for method chaining */ public Builder setAttributeStart(String as) { attributeStart = as; return this; } /** * Sets the string representing the end marker of an attribute in a * property key. * * @param ae the attribute end marker * @return a reference to this object for method chaining */ public Builder setAttributeEnd(String ae) { attributeEnd = ae; return this; } /** * Creates the {@code DefaultExpressionEngineSymbols} instance based on * the properties set for this builder object. This method does not * change the state of this builder. So it is possible to change * properties and create another {@code DefaultExpressionEngineSymbols} * instance. * * @return the newly created {@code DefaultExpressionEngineSymbols} * instance */ public DefaultExpressionEngineSymbols create() { return new DefaultExpressionEngineSymbols(this); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * */ package org.apache.hadoop.hdds.scm.node; import com.google.common.base.Preconditions; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos; import org.apache.hadoop.hdds.protocol.proto. StorageContainerDatanodeProtocolProtos.StorageReportProto; import org.apache.hadoop.hdds.scm.exceptions.SCMException; import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.container.common.impl.StorageLocationReport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.management.ObjectName; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes.DUPLICATE_DATANODE; import static org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes.NO_SUCH_DATANODE; /** * This data structure maintains the disk space capacity, disk usage and free * space availability per Datanode. * This information is built from the DN node reports. */ public class SCMNodeStorageStatMap implements SCMNodeStorageStatMXBean { static final Logger LOG = LoggerFactory.getLogger(SCMNodeStorageStatMap.class); private final double warningUtilizationThreshold; private final double criticalUtilizationThreshold; private final Map<UUID, Set<StorageLocationReport>> scmNodeStorageReportMap; // NodeStorageInfo MXBean private ObjectName scmNodeStorageInfoBean; /** * constructs the scmNodeStorageReportMap object. */ public SCMNodeStorageStatMap(OzoneConfiguration conf) { // scmNodeStorageReportMap = new ConcurrentHashMap<>(); scmNodeStorageReportMap = new ConcurrentHashMap<>(); warningUtilizationThreshold = conf.getDouble( OzoneConfigKeys. HDDS_DATANODE_STORAGE_UTILIZATION_WARNING_THRESHOLD, OzoneConfigKeys. HDDS_DATANODE_STORAGE_UTILIZATION_WARNING_THRESHOLD_DEFAULT); criticalUtilizationThreshold = conf.getDouble( OzoneConfigKeys. HDDS_DATANODE_STORAGE_UTILIZATION_CRITICAL_THRESHOLD, OzoneConfigKeys. HDDS_DATANODE_STORAGE_UTILIZATION_CRITICAL_THRESHOLD_DEFAULT); } /** * Enum that Describes what we should do at various thresholds. */ public enum UtilizationThreshold { NORMAL, WARN, CRITICAL; } /** * Returns true if this a datanode that is already tracked by * scmNodeStorageReportMap. * * @param datanodeID - UUID of the Datanode. * @return True if this is tracked, false if this map does not know about it. */ public boolean isKnownDatanode(UUID datanodeID) { Preconditions.checkNotNull(datanodeID); return scmNodeStorageReportMap.containsKey(datanodeID); } public List<UUID> getDatanodeList( UtilizationThreshold threshold) { return scmNodeStorageReportMap.entrySet().stream().filter( entry -> (isThresholdReached(threshold, getScmUsedratio(getUsedSpace(entry.getKey()), getCapacity(entry.getKey()))))) .map(Map.Entry::getKey) .collect(Collectors.toList()); } /** * Insert a new datanode into Node2Container Map. * * @param datanodeID -- Datanode UUID * @param report - set if StorageReports. */ public void insertNewDatanode(UUID datanodeID, Set<StorageLocationReport> report) throws SCMException { Preconditions.checkNotNull(report); Preconditions.checkState(report.size() != 0); Preconditions.checkNotNull(datanodeID); synchronized (scmNodeStorageReportMap) { if (isKnownDatanode(datanodeID)) { throw new SCMException("Node already exists in the map", DUPLICATE_DATANODE); } scmNodeStorageReportMap.putIfAbsent(datanodeID, report); } } //TODO: This should be called once SCMNodeManager gets Started. private void registerMXBean() { this.scmNodeStorageInfoBean = MBeans.register("StorageContainerManager", "scmNodeStorageInfo", this); } //TODO: Unregister call should happen as a part of SCMNodeManager shutdown. private void unregisterMXBean() { if(this.scmNodeStorageInfoBean != null) { MBeans.unregister(this.scmNodeStorageInfoBean); this.scmNodeStorageInfoBean = null; } } /** * Updates the Container list of an existing DN. * * @param datanodeID - UUID of DN. * @param report - set of Storage Reports for the Datanode. * @throws SCMException - if we don't know about this datanode, for new DN * use insertNewDatanode. */ public void updateDatanodeMap(UUID datanodeID, Set<StorageLocationReport> report) throws SCMException { Preconditions.checkNotNull(datanodeID); Preconditions.checkNotNull(report); Preconditions.checkState(report.size() != 0); synchronized (scmNodeStorageReportMap) { if (!scmNodeStorageReportMap.containsKey(datanodeID)) { throw new SCMException("No such datanode", NO_SUCH_DATANODE); } scmNodeStorageReportMap.put(datanodeID, report); } } public StorageReportResult processNodeReport(UUID datanodeID, StorageContainerDatanodeProtocolProtos.NodeReportProto nodeReport) throws IOException { Preconditions.checkNotNull(datanodeID); Preconditions.checkNotNull(nodeReport); long totalCapacity = 0; long totalRemaining = 0; long totalScmUsed = 0; Set<StorageLocationReport> storagReportSet = new HashSet<>(); Set<StorageLocationReport> fullVolumeSet = new HashSet<>(); Set<StorageLocationReport> failedVolumeSet = new HashSet<>(); List<StorageReportProto> storageReports = nodeReport.getStorageReportList(); for (StorageReportProto report : storageReports) { StorageLocationReport storageReport = StorageLocationReport.getFromProtobuf(report); storagReportSet.add(storageReport); if (report.hasFailed() && report.getFailed()) { failedVolumeSet.add(storageReport); } else if (isThresholdReached(UtilizationThreshold.CRITICAL, getScmUsedratio(report.getScmUsed(), report.getCapacity()))) { fullVolumeSet.add(storageReport); } totalCapacity += report.getCapacity(); totalRemaining += report.getRemaining(); totalScmUsed += report.getScmUsed(); } if (!isKnownDatanode(datanodeID)) { insertNewDatanode(datanodeID, storagReportSet); } else { updateDatanodeMap(datanodeID, storagReportSet); } if (isThresholdReached(UtilizationThreshold.CRITICAL, getScmUsedratio(totalScmUsed, totalCapacity))) { LOG.warn("Datanode {} is out of storage space. Capacity: {}, Used: {}", datanodeID, totalCapacity, totalScmUsed); return StorageReportResult.ReportResultBuilder.newBuilder() .setStatus(ReportStatus.DATANODE_OUT_OF_SPACE) .setFullVolumeSet(fullVolumeSet).setFailedVolumeSet(failedVolumeSet) .build(); } if (isThresholdReached(UtilizationThreshold.WARN, getScmUsedratio(totalScmUsed, totalCapacity))) { LOG.warn("Datanode {} is low on storage space. Capacity: {}, Used: {}", datanodeID, totalCapacity, totalScmUsed); } if (failedVolumeSet.isEmpty() && !fullVolumeSet.isEmpty()) { return StorageReportResult.ReportResultBuilder.newBuilder() .setStatus(ReportStatus.STORAGE_OUT_OF_SPACE) .setFullVolumeSet(fullVolumeSet).build(); } if (!failedVolumeSet.isEmpty() && fullVolumeSet.isEmpty()) { return StorageReportResult.ReportResultBuilder.newBuilder() .setStatus(ReportStatus.FAILED_STORAGE) .setFailedVolumeSet(failedVolumeSet).build(); } if (!failedVolumeSet.isEmpty() && !fullVolumeSet.isEmpty()) { return StorageReportResult.ReportResultBuilder.newBuilder() .setStatus(ReportStatus.FAILED_AND_OUT_OF_SPACE_STORAGE) .setFailedVolumeSet(failedVolumeSet).setFullVolumeSet(fullVolumeSet) .build(); } return StorageReportResult.ReportResultBuilder.newBuilder() .setStatus(ReportStatus.ALL_IS_WELL).build(); } private boolean isThresholdReached(UtilizationThreshold threshold, double scmUsedratio) { switch (threshold) { case NORMAL: return scmUsedratio < warningUtilizationThreshold; case WARN: return scmUsedratio >= warningUtilizationThreshold && scmUsedratio < criticalUtilizationThreshold; case CRITICAL: return scmUsedratio >= criticalUtilizationThreshold; default: throw new RuntimeException("Unknown UtilizationThreshold value"); } } @Override public long getCapacity(UUID dnId) { long capacity = 0; Set<StorageLocationReport> reportSet = scmNodeStorageReportMap.get(dnId); for (StorageLocationReport report : reportSet) { capacity += report.getCapacity(); } return capacity; } @Override public long getRemainingSpace(UUID dnId) { long remaining = 0; Set<StorageLocationReport> reportSet = scmNodeStorageReportMap.get(dnId); for (StorageLocationReport report : reportSet) { remaining += report.getRemaining(); } return remaining; } @Override public long getUsedSpace(UUID dnId) { long scmUsed = 0; Set<StorageLocationReport> reportSet = scmNodeStorageReportMap.get(dnId); for (StorageLocationReport report : reportSet) { scmUsed += report.getScmUsed(); } return scmUsed; } @Override public long getTotalCapacity() { long capacity = 0; Set<UUID> dnIdSet = scmNodeStorageReportMap.keySet(); for (UUID id : dnIdSet) { capacity += getCapacity(id); } return capacity; } @Override public long getTotalSpaceUsed() { long scmUsed = 0; Set<UUID> dnIdSet = scmNodeStorageReportMap.keySet(); for (UUID id : dnIdSet) { scmUsed += getUsedSpace(id); } return scmUsed; } @Override public long getTotalFreeSpace() { long remaining = 0; Set<UUID> dnIdSet = scmNodeStorageReportMap.keySet(); for (UUID id : dnIdSet) { remaining += getRemainingSpace(id); } return remaining; } /** * removes the dataNode from scmNodeStorageReportMap. * @param datanodeID * @throws SCMException in case the dataNode is not found in the map. */ public void removeDatanode(UUID datanodeID) throws SCMException { Preconditions.checkNotNull(datanodeID); synchronized (scmNodeStorageReportMap) { if (!scmNodeStorageReportMap.containsKey(datanodeID)) { throw new SCMException("No such datanode", NO_SUCH_DATANODE); } scmNodeStorageReportMap.remove(datanodeID); } } /** * Returns the set of storage volumes for a Datanode. * @param datanodeID * @return set of storage volumes. */ @Override public Set<StorageLocationReport> getStorageVolumes(UUID datanodeID) { return scmNodeStorageReportMap.get(datanodeID); } /** * Truncate to 4 digits since uncontrolled precision is some times * counter intuitive to what users expect. * @param value - double. * @return double. */ private double truncateDecimals(double value) { final int multiplier = 10000; return (double) ((long) (value * multiplier)) / multiplier; } /** * get the scmUsed ratio. */ public double getScmUsedratio(long scmUsed, long capacity) { double scmUsedRatio = truncateDecimals(scmUsed / (double) capacity); return scmUsedRatio; } /** * Results possible from processing a Node report by * Node2ContainerMapper. */ public enum ReportStatus { ALL_IS_WELL, DATANODE_OUT_OF_SPACE, STORAGE_OUT_OF_SPACE, FAILED_STORAGE, FAILED_AND_OUT_OF_SPACE_STORAGE } }
/* * Copyright (C) 2013 Priboi Tiberiu * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cwp.chart; import com.cwp.cmoneycharge.R; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.LinearGradient; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Paint.Style; import android.graphics.Rect; import android.graphics.Shader.TileMode; import android.util.AttributeSet; import android.view.View; import android.view.ViewGroup; /** * The folding layout where the number of folds, the anchor point and the * orientation of the fold can be specified. Each of these parameters can be * modified individually and updates and resets the fold to a default (unfolded) * state. The fold factor varies between 0 (completely unfolded flat image) to * 1.0 (completely folded, non-visible image). * * This layout throws an exception if there is more than one child added to the * view. For more complicated view hierarchy's inside the folding layout, the * views should all be nested inside 1 parent layout. * * This layout folds the contents of its child in real time. By applying matrix * transformations when drawing to canvas, the contents of the child may change * as the fold takes place. It is important to note that there are jagged edges * about the perimeter of the layout as a result of applying transformations to * a rectangle. This can be avoided by having the child of this layout wrap its * content inside a 1 pixel transparent border. This will cause an anti-aliasing * like effect and smoothen out the edges. * */ public class BaseFoldingLayout extends ViewGroup { /* * A bug was introduced in Android 4.3 that ignores changes to the Canvas * state between multiple calls to super.dispatchDraw() when running with * hardware acceleration. To account for this bug, a slightly different * approach was taken to fold a static image whereby a bitmap of the * original contents is captured and drawn in segments onto the canvas. * However, this method does not permit the folding of a TextureView hosting * a live camera feed which continuously updates. Furthermore, the sepia * effect was removed from the bitmap variation of the demo to simplify the * logic when running with this workaround." */ public static enum Orientation { VERTICAL, HORIZONTAL } private final String FOLDING_VIEW_EXCEPTION_MESSAGE = "Folding Layout can only 1 child at " + "most"; private final float SHADING_ALPHA = 0.8f; private final float SHADING_FACTOR = 0.5f; private final int DEPTH_CONSTANT = 1500; private final int NUM_OF_POLY_POINTS = 8; private Rect[] mFoldRectArray; private Matrix[] mMatrix; protected Orientation mOrientation = Orientation.VERTICAL; protected float mAnchorFactor = 0; private float mFoldFactor = 0; private int mNumberOfFolds = 2; private boolean mIsHorizontal = true; private int mOriginalWidth = 0; private int mOriginalHeight = 0; private float mFoldMaxWidth = 0; private float mFoldMaxHeight = 0; private float mFoldDrawWidth = 0; private float mFoldDrawHeight = 0; private boolean mIsFoldPrepared = false; private boolean mShouldDraw = true; private Paint mSolidShadow; private Paint mGradientShadow; private LinearGradient mShadowLinearGradient; private Matrix mShadowGradientMatrix; private float[] mSrc; private float[] mDst; private OnFoldListener mFoldListener; private float mPreviousFoldFactor = 0; private Bitmap mFullBitmap; private Rect mDstRect; public BaseFoldingLayout(Context context) { super(context); } public BaseFoldingLayout(Context context, AttributeSet attrs) { super(context, attrs); init(context, attrs); } public BaseFoldingLayout(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(context, attrs); } public void init(Context context, AttributeSet attrs) { // now style everything! TypedArray ta = context.obtainStyledAttributes(attrs, R.styleable.FoldingMenu); int mFoldNumber = ta.getInt(R.styleable.FoldingMenu_foldNumber, mNumberOfFolds); if (mFoldNumber > 0 && mFoldNumber < 7) { mNumberOfFolds = mFoldNumber; } else { mNumberOfFolds = 2; } ta.recycle(); } @Override protected boolean addViewInLayout(View child, int index, LayoutParams params, boolean preventRequestLayout) { throwCustomException(getChildCount()); boolean returnValue = super.addViewInLayout(child, index, params, preventRequestLayout); return returnValue; } @Override public void addView(View child, int index, LayoutParams params) { throwCustomException(getChildCount()); super.addView(child, index, params); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { View child = getChildAt(0); measureChild(child, widthMeasureSpec, heightMeasureSpec); setMeasuredDimension(widthMeasureSpec, heightMeasureSpec); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { View child = getChildAt(0); child.layout(0, 0, child.getMeasuredWidth(), child.getMeasuredHeight()); updateFold(); } /** * The custom exception to be thrown so as to limit the number of views in * this layout to at most one. */ private class NumberOfFoldingLayoutChildrenException extends RuntimeException { /** * */ private static final long serialVersionUID = 1L; public NumberOfFoldingLayoutChildrenException(String message) { super(message); } } /** * Throws an exception if the number of views added to this layout exceeds * one. */ private void throwCustomException(int numOfChildViews) { if (numOfChildViews == 1) { throw new NumberOfFoldingLayoutChildrenException( FOLDING_VIEW_EXCEPTION_MESSAGE); } } public void setFoldListener(OnFoldListener foldListener) { mFoldListener = foldListener; } /** * Sets the fold factor of the folding view and updates all the * corresponding matrices and values to account for the new fold factor. * Once that is complete, it redraws itself with the new fold. */ public void setFoldFactor(float foldFactor) { if (foldFactor != mFoldFactor) { mFoldFactor = foldFactor; calculateMatrices(); invalidate(); } } public void setOrientation(Orientation orientation) { if (orientation != mOrientation) { mOrientation = orientation; updateFold(); } } public void setAnchorFactor(float anchorFactor) { if (anchorFactor != mAnchorFactor) { mAnchorFactor = anchorFactor; updateFold(); } } public void setNumberOfFolds(int numberOfFolds) { if (numberOfFolds != mNumberOfFolds) { mNumberOfFolds = numberOfFolds; updateFold(); } } public float getAnchorFactor() { return mAnchorFactor; } public Orientation getOrientation() { return mOrientation; } public float getFoldFactor() { return mFoldFactor; } public int getNumberOfFolds() { return mNumberOfFolds; } private void updateFold() { prepareFold(mOrientation, mAnchorFactor, mNumberOfFolds); calculateMatrices(); invalidate(); } /** * This method is called in order to update the fold's orientation, anchor * point and number of folds. This creates the necessary setup in order to * prepare the layout for a fold with the specified parameters. Some of the * dimensions required for the folding transformation are also acquired * here. * * After this method is called, it will be in a completely unfolded state by * default. */ private void prepareFold(Orientation orientation, float anchorFactor, int numberOfFolds) { mSrc = new float[NUM_OF_POLY_POINTS]; mDst = new float[NUM_OF_POLY_POINTS]; mDstRect = new Rect(); mFoldFactor = 0; mPreviousFoldFactor = 0; mIsFoldPrepared = false; mSolidShadow = new Paint(); mGradientShadow = new Paint(); mOrientation = orientation; mIsHorizontal = (orientation == Orientation.HORIZONTAL); if (mIsHorizontal) { mShadowLinearGradient = new LinearGradient(0, 0, SHADING_FACTOR, 0, Color.BLACK, Color.TRANSPARENT, TileMode.CLAMP); } else { mShadowLinearGradient = new LinearGradient(0, 0, 0, SHADING_FACTOR, Color.BLACK, Color.TRANSPARENT, TileMode.CLAMP); } mGradientShadow.setStyle(Style.FILL); mGradientShadow.setShader(mShadowLinearGradient); mShadowGradientMatrix = new Matrix(); mAnchorFactor = anchorFactor; mNumberOfFolds = numberOfFolds; mOriginalWidth = getMeasuredWidth(); mOriginalHeight = getMeasuredHeight(); mFoldRectArray = new Rect[mNumberOfFolds]; mMatrix = new Matrix[mNumberOfFolds]; for (int x = 0; x < mNumberOfFolds; x++) { mMatrix[x] = new Matrix(); } int h = mOriginalHeight; int w = mOriginalWidth; if (Util.IS_JBMR2 && h != 0 && w != 0) { mFullBitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(mFullBitmap); getChildAt(0).draw(canvas); } int delta = Math.round(mIsHorizontal ? ((float) w) / ((float) mNumberOfFolds) : ((float) h) / ((float) mNumberOfFolds)); /* * Loops through the number of folds and segments the full layout into a * number of smaller equal components. If the number of folds is odd, * then one of the components will be smaller than all the rest. Note * that deltap below handles the calculation for an odd number of folds. */ for (int x = 0; x < mNumberOfFolds; x++) { if (mIsHorizontal) { int deltap = (x + 1) * delta > w ? w - x * delta : delta; mFoldRectArray[x] = new Rect(x * delta, 0, x * delta + deltap, h); } else { int deltap = (x + 1) * delta > h ? h - x * delta : delta; mFoldRectArray[x] = new Rect(0, x * delta, w, x * delta + deltap); } } if (mIsHorizontal) { mFoldMaxHeight = h; mFoldMaxWidth = delta; } else { mFoldMaxHeight = delta; mFoldMaxWidth = w; } mIsFoldPrepared = true; } /* * Calculates the transformation matrices used to draw each of the separate * folding segments from this view. */ private void calculateMatrices() { mShouldDraw = true; if (!mIsFoldPrepared) { return; } /** * If the fold factor is 1 than the folding view should not be seen and * the canvas can be left completely empty. */ if (mFoldFactor == 1) { mShouldDraw = false; return; } // if (mFoldFactor == 0 && mPreviousFoldFactor > 0 // && mFoldListener != null) { // // mFoldListener.onEndFold(mFoldFactor); // }else if (mPreviousFoldFactor == 0 && mFoldFactor > 0 // && mFoldListener != null) { // // mFoldListener.onStartFold(mFoldFactor); // } else if(mFoldListener != null) { // mFoldListener.onFoldingState(mFoldFactor); // } if (mFoldFactor > 0.9f && mFoldFactor > mPreviousFoldFactor && mFoldListener != null) { mFoldListener.onEndFold(mFoldFactor); } else if (mFoldFactor < 0.1f && mFoldFactor < mPreviousFoldFactor && mFoldListener != null) { mFoldListener.onStartFold(mFoldFactor); } else if (mFoldListener != null) { mFoldListener.onFoldingState(mFoldFactor, mFoldDrawHeight); } // if(mFoldFactor == 0 && mFoldListener != null) { // mFoldListener.onStartFold(mFoldFactor); // } else if(mFoldFactor == 1 && mFoldListener != null) { // mFoldListener.onEndFold(mFoldFactor); // } else { // mFoldListener.onFoldingState(mFoldFactor); // } mPreviousFoldFactor = mFoldFactor; /* * Reset all the transformation matrices back to identity before * computing the new transformation */ for (int x = 0; x < mNumberOfFolds; x++) { mMatrix[x].reset(); } float cTranslationFactor = 1 - mFoldFactor; float translatedDistance = mIsHorizontal ? mOriginalWidth * cTranslationFactor : mOriginalHeight * cTranslationFactor; float translatedDistancePerFold = Math.round(translatedDistance / mNumberOfFolds); /* * For an odd number of folds, the rounding error may cause the * translatedDistancePerFold to be grater than the max fold width or * height. */ mFoldDrawWidth = mFoldMaxWidth < translatedDistancePerFold ? translatedDistancePerFold : mFoldMaxWidth; mFoldDrawHeight = mFoldMaxHeight < translatedDistancePerFold ? translatedDistancePerFold : mFoldMaxHeight; float translatedDistanceFoldSquared = translatedDistancePerFold * translatedDistancePerFold; /* * Calculate the depth of the fold into the screen using pythagorean * theorem. */ float depth = mIsHorizontal ? (float) Math .sqrt((double) (mFoldDrawWidth * mFoldDrawWidth - translatedDistanceFoldSquared)) : (float) Math .sqrt((double) (mFoldDrawHeight * mFoldDrawHeight - translatedDistanceFoldSquared)); /* * The size of some object is always inversely proportional to the * distance it is away from the viewpoint. The constant can be varied to * to affect the amount of perspective. */ float scaleFactor = DEPTH_CONSTANT / (DEPTH_CONSTANT + depth); float scaledWidth, scaledHeight, bottomScaledPoint, topScaledPoint, rightScaledPoint, leftScaledPoint; if (mIsHorizontal) { scaledWidth = mFoldDrawWidth * cTranslationFactor; scaledHeight = mFoldDrawHeight * scaleFactor; } else { scaledWidth = mFoldDrawWidth * scaleFactor; scaledHeight = mFoldDrawHeight * cTranslationFactor; } topScaledPoint = (mFoldDrawHeight - scaledHeight) / 2.0f; bottomScaledPoint = topScaledPoint + scaledHeight; leftScaledPoint = (mFoldDrawWidth - scaledWidth) / 2.0f; rightScaledPoint = leftScaledPoint + scaledWidth; float anchorPoint = mIsHorizontal ? mAnchorFactor * mOriginalWidth : mAnchorFactor * mOriginalHeight; /* The fold along which the anchor point is located. */ float midFold = mIsHorizontal ? (anchorPoint / mFoldDrawWidth) : anchorPoint / mFoldDrawHeight; mSrc[0] = 0; mSrc[1] = 0; mSrc[2] = 0; mSrc[3] = mFoldDrawHeight; mSrc[4] = mFoldDrawWidth; mSrc[5] = 0; mSrc[6] = mFoldDrawWidth; mSrc[7] = mFoldDrawHeight; /* * Computes the transformation matrix for each fold using the values * calculated above. */ for (int x = 0; x < mNumberOfFolds; x++) { boolean isEven = (x % 2 == 0); if (mIsHorizontal) { mDst[0] = (anchorPoint > x * mFoldDrawWidth) ? anchorPoint + (x - midFold) * scaledWidth : anchorPoint - (midFold - x) * scaledWidth; mDst[1] = isEven ? 0 : topScaledPoint; mDst[2] = mDst[0]; mDst[3] = isEven ? mFoldDrawHeight : bottomScaledPoint; mDst[4] = (anchorPoint > (x + 1) * mFoldDrawWidth) ? anchorPoint + (x + 1 - midFold) * scaledWidth : anchorPoint - (midFold - x - 1) * scaledWidth; mDst[5] = isEven ? topScaledPoint : 0; mDst[6] = mDst[4]; mDst[7] = isEven ? bottomScaledPoint : mFoldDrawHeight; } else { mDst[0] = isEven ? 0 : leftScaledPoint; mDst[1] = (anchorPoint > x * mFoldDrawHeight) ? anchorPoint + (x - midFold) * scaledHeight : anchorPoint - (midFold - x) * scaledHeight; mDst[2] = isEven ? leftScaledPoint : 0; mDst[3] = (anchorPoint > (x + 1) * mFoldDrawHeight) ? anchorPoint + (x + 1 - midFold) * scaledHeight : anchorPoint - (midFold - x - 1) * scaledHeight; mDst[4] = isEven ? mFoldDrawWidth : rightScaledPoint; mDst[5] = mDst[1]; mDst[6] = isEven ? rightScaledPoint : mFoldDrawWidth; mDst[7] = mDst[3]; } /* * Pixel fractions are present for odd number of folds which need to * be rounded off here. */ for (int y = 0; y < 8; y++) { mDst[y] = Math.round(mDst[y]); } /* * If it so happens that any of the folds have reached a point where * the width or height of that fold is 0, then nothing needs to be * drawn onto the canvas because the view is essentially completely * folded. */ if (mIsHorizontal) { if (mDst[4] <= mDst[0] || mDst[6] <= mDst[2]) { mShouldDraw = false; return; } } else { if (mDst[3] <= mDst[1] || mDst[7] <= mDst[5]) { mShouldDraw = false; return; } } /* Sets the shadow and bitmap transformation matrices. */ mMatrix[x].setPolyToPoly(mSrc, 0, mDst, 0, NUM_OF_POLY_POINTS / 2); } /* * The shadows on the folds are split into two parts: Solid shadows and * gradients. Every other fold has a solid shadow which overlays the * whole fold. Similarly, the folds in between these alternating folds * also have an overlaying shadow. However, it is a gradient that takes * up part of the fold as opposed to a solid shadow overlaying the whole * fold. */ /* Solid shadow paint object. */ int alpha = (int) (mFoldFactor * 255 * SHADING_ALPHA); mSolidShadow.setColor(Color.argb(alpha, 0, 0, 0)); if (mIsHorizontal) { mShadowGradientMatrix.setScale(mFoldDrawWidth, 1); mShadowLinearGradient.setLocalMatrix(mShadowGradientMatrix); } else { mShadowGradientMatrix.setScale(1, mFoldDrawHeight); mShadowLinearGradient.setLocalMatrix(mShadowGradientMatrix); } mGradientShadow.setAlpha(alpha); } @Override protected void dispatchDraw(Canvas canvas) { /** * If prepareFold has not been called or if preparation has not * completed yet, then no custom drawing will take place so only need to * invoke super's onDraw and return. */ if (!mIsFoldPrepared || mFoldFactor == 0) { super.dispatchDraw(canvas); return; } if (!mShouldDraw) { return; } Rect src; /* * Draws the bitmaps and shadows on the canvas with the appropriate * transformations. */ for (int x = 0; x < mNumberOfFolds; x++) { src = mFoldRectArray[x]; /* The canvas is saved and restored for every individual fold */ canvas.save(); /* * Concatenates the canvas with the transformation matrix for the * the segment of the view corresponding to the actual image being * displayed. */ canvas.concat(mMatrix[x]); if (Util.IS_JBMR2) { mDstRect.set(0, 0, src.width(), src.height()); canvas.drawBitmap(mFullBitmap, src, mDstRect, null); } else { /* * The same transformation matrix is used for both the shadow * and the image segment. The canvas is clipped to account for * the size of each fold and is translated so they are drawn in * the right place. The shadow is then drawn on top of the * different folds using the sametransformation matrix. */ canvas.clipRect(0, 0, src.right - src.left, src.bottom - src.top); if (mIsHorizontal) { canvas.translate(-src.left, 0); } else { canvas.translate(0, -src.top); } super.dispatchDraw(canvas); if (mIsHorizontal) { canvas.translate(src.left, 0); } else { canvas.translate(0, src.top); } } /* Draws the shadows corresponding to this specific fold. */ if (x % 2 == 0) { canvas.drawRect(0, 0, mFoldDrawWidth, mFoldDrawHeight, mSolidShadow); } else { canvas.drawRect(0, 0, mFoldDrawWidth, mFoldDrawHeight, mGradientShadow); } canvas.restore(); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.bytecode.expression; import com.facebook.presto.bytecode.BytecodeBlock; import com.facebook.presto.bytecode.BytecodeNode; import com.facebook.presto.bytecode.MethodGenerationContext; import com.facebook.presto.bytecode.OpCode; import com.facebook.presto.bytecode.ParameterizedType; import com.google.common.collect.ImmutableList; import java.util.List; import static com.facebook.presto.bytecode.ParameterizedType.type; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.primitives.Primitives.wrap; import static java.lang.String.format; import static java.util.Objects.requireNonNull; class CastBytecodeExpression extends BytecodeExpression { private static final ParameterizedType OBJECT_TYPE = type(Object.class); private final BytecodeExpression instance; public CastBytecodeExpression(BytecodeExpression instance, ParameterizedType type) { super(type); this.instance = requireNonNull(instance, "instance is null"); checkArgument(type.getPrimitiveType() != void.class, "Type %s can not be cast to %s", instance.getType(), type); // Call generateBytecode to run the validation logic. The result is thrown away. // Duplicating the validation logic here is error-prone and introduces duplicate code. generateBytecode(instance.getType(), getType()); } @Override public BytecodeNode getBytecode(MethodGenerationContext generationContext) { return new BytecodeBlock() .append(instance.getBytecode(generationContext)) .append(generateBytecode(instance.getType(), getType())); } private static BytecodeBlock generateBytecode(ParameterizedType sourceType, ParameterizedType targetType) { BytecodeBlock block = new BytecodeBlock(); switch (getTypeKind(sourceType)) { case PRIMITIVE: switch (getTypeKind(targetType)) { case PRIMITIVE: castPrimitiveToPrimitive(block, sourceType.getPrimitiveType(), targetType.getPrimitiveType()); return block; case BOXED_PRIMITVE: checkArgument(sourceType.getPrimitiveType() == unwrapPrimitiveType(targetType), "Type %s can not be cast to %s", sourceType, targetType); return block.invokeStatic(targetType, "valueOf", targetType, sourceType); case OTHER: checkArgument(OBJECT_TYPE.equals(targetType), "Type %s can not be cast to %s", sourceType, targetType); Class<?> sourceClass = sourceType.getPrimitiveType(); return block .invokeStatic(wrap(sourceClass), "valueOf", wrap(sourceClass), sourceClass) .checkCast(targetType); } case BOXED_PRIMITVE: switch (getTypeKind(targetType)) { case PRIMITIVE: checkArgument(unwrapPrimitiveType(sourceType) == targetType.getPrimitiveType(), "Type %s can not be cast to %s", sourceType, targetType); return block.invokeVirtual(sourceType, targetType.getPrimitiveType().getSimpleName() + "Value", targetType); case BOXED_PRIMITVE: checkArgument(sourceType.equals(targetType), "Type %s can not be cast to %s", sourceType, targetType); return block; case OTHER: return block.checkCast(targetType); } case OTHER: switch (getTypeKind(targetType)) { case PRIMITIVE: checkArgument(OBJECT_TYPE.equals(sourceType), "Type %s can not be cast to %s", sourceType, targetType); return block .checkCast(wrap(targetType.getPrimitiveType())) .invokeVirtual(wrap(targetType.getPrimitiveType()), targetType.getPrimitiveType().getSimpleName() + "Value", targetType.getPrimitiveType()); case BOXED_PRIMITVE: case OTHER: return block.checkCast(targetType); } } throw new UnsupportedOperationException("unexpected enum value"); } private static BytecodeBlock castPrimitiveToPrimitive(BytecodeBlock block, Class<?> sourceType, Class<?> targetType) { if (sourceType == boolean.class) { if (targetType == boolean.class) { return block; } } if (sourceType == byte.class) { if (targetType == byte.class) { return block; } if (targetType == char.class) { return block; } if (targetType == short.class) { return block; } if (targetType == int.class) { return block; } if (targetType == long.class) { return block.append(OpCode.I2L); } if (targetType == float.class) { return block.append(OpCode.I2F); } if (targetType == double.class) { return block.append(OpCode.I2D); } } if (sourceType == char.class) { if (targetType == byte.class) { return block.append(OpCode.I2B); } if (targetType == char.class) { return block; } if (targetType == short.class) { return block; } if (targetType == int.class) { return block; } if (targetType == long.class) { return block.append(OpCode.I2L); } if (targetType == float.class) { return block.append(OpCode.I2F); } if (targetType == double.class) { return block.append(OpCode.I2D); } } if (sourceType == short.class) { if (targetType == byte.class) { return block.append(OpCode.I2B); } if (targetType == char.class) { return block.append(OpCode.I2C); } if (targetType == short.class) { return block; } if (targetType == int.class) { return block; } if (targetType == long.class) { return block.append(OpCode.I2L); } if (targetType == float.class) { return block.append(OpCode.I2F); } if (targetType == double.class) { return block.append(OpCode.I2D); } } if (sourceType == int.class) { if (targetType == boolean.class) { return block; } if (targetType == byte.class) { return block.append(OpCode.I2B); } if (targetType == char.class) { return block.append(OpCode.I2C); } if (targetType == short.class) { return block.append(OpCode.I2S); } if (targetType == int.class) { return block; } if (targetType == long.class) { return block.append(OpCode.I2L); } if (targetType == float.class) { return block.append(OpCode.I2F); } if (targetType == double.class) { return block.append(OpCode.I2D); } } if (sourceType == long.class) { if (targetType == byte.class) { return block.append(OpCode.L2I).append(OpCode.I2B); } if (targetType == char.class) { return block.append(OpCode.L2I).append(OpCode.I2C); } if (targetType == short.class) { return block.append(OpCode.L2I).append(OpCode.I2S); } if (targetType == int.class) { return block.append(OpCode.L2I); } if (targetType == long.class) { return block; } if (targetType == float.class) { return block.append(OpCode.L2F); } if (targetType == double.class) { return block.append(OpCode.L2D); } } if (sourceType == float.class) { if (targetType == byte.class) { return block.append(OpCode.F2I).append(OpCode.I2B); } if (targetType == char.class) { return block.append(OpCode.F2I).append(OpCode.I2C); } if (targetType == short.class) { return block.append(OpCode.F2I).append(OpCode.I2S); } if (targetType == int.class) { return block.append(OpCode.F2I); } if (targetType == long.class) { return block.append(OpCode.F2L); } if (targetType == float.class) { return block; } if (targetType == double.class) { return block.append(OpCode.F2D); } } if (sourceType == double.class) { if (targetType == byte.class) { return block.append(OpCode.D2I).append(OpCode.I2B); } if (targetType == char.class) { return block.append(OpCode.D2I).append(OpCode.I2C); } if (targetType == short.class) { return block.append(OpCode.D2I).append(OpCode.I2S); } if (targetType == int.class) { return block.append(OpCode.D2I); } if (targetType == long.class) { return block.append(OpCode.D2L); } if (targetType == float.class) { return block.append(OpCode.D2F); } if (targetType == double.class) { return block; } } throw new IllegalArgumentException(format("Type %s can not be cast to %s", sourceType, targetType)); } private static TypeKind getTypeKind(ParameterizedType type) { if (type.isPrimitive()) { return TypeKind.PRIMITIVE; } if (unwrapPrimitiveType(type) != null) { return TypeKind.BOXED_PRIMITVE; } return TypeKind.OTHER; } private static Class<?> unwrapPrimitiveType(ParameterizedType boxedPrimitiveType) { switch (boxedPrimitiveType.getJavaClassName()) { case "java.lang.Boolean": return boolean.class; case "java.lang.Byte": return byte.class; case "java.lang.Character": return char.class; case "java.lang.Short": return short.class; case "java.lang.Integer": return int.class; case "java.lang.Long": return long.class; case "java.lang.Float": return float.class; case "java.lang.Double": return double.class; default: return null; } } @Override protected String formatOneLine() { return "((" + getType().getSimpleName() + ") " + instance + ")"; } @Override public List<BytecodeNode> getChildNodes() { return ImmutableList.<BytecodeNode>of(instance); } private enum TypeKind { PRIMITIVE, BOXED_PRIMITVE, OTHER } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.http.netty; import java.nio.charset.StandardCharsets; import org.elasticsearch.cache.recycler.MockPageCacheRecycler; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.netty.pipelining.OrderedDownstreamChannelEvent; import org.elasticsearch.http.netty.pipelining.OrderedUpstreamMessageEvent; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.handler.codec.http.DefaultHttpResponse; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpResponse; import org.jboss.netty.handler.codec.http.QueryStringDecoder; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.http.netty.NettyHttpClient.returnHttpResponseBodies; import static org.elasticsearch.http.netty.NettyHttpServerTransport.HttpChannelPipelineFactory; import static org.hamcrest.Matchers.*; import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH; import static org.jboss.netty.handler.codec.http.HttpResponseStatus.OK; import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1; /** * This test just tests, if he pipelining works in general with out any connection the elasticsearch handler */ public class NettyHttpServerPipeliningTests extends ESTestCase { private NetworkService networkService; private ThreadPool threadPool; private MockPageCacheRecycler mockPageCacheRecycler; private MockBigArrays bigArrays; private CustomNettyHttpServerTransport httpServerTransport; @Before public void setup() throws Exception { networkService = new NetworkService(Settings.EMPTY); threadPool = new ThreadPool("test"); mockPageCacheRecycler = new MockPageCacheRecycler(Settings.EMPTY, threadPool); bigArrays = new MockBigArrays(mockPageCacheRecycler, new NoneCircuitBreakerService()); } @After public void shutdown() throws Exception { if (threadPool != null) { threadPool.shutdownNow(); } if (httpServerTransport != null) { httpServerTransport.close(); } } @Test public void testThatHttpPipeliningWorksWhenEnabled() throws Exception { Settings settings = settingsBuilder().put("http.pipelining", true).build(); httpServerTransport = new CustomNettyHttpServerTransport(settings); httpServerTransport.start(); InetSocketTransportAddress transportAddress = (InetSocketTransportAddress) httpServerTransport.boundAddress().boundAddress(); List<String> requests = Arrays.asList("/firstfast", "/slow?sleep=500", "/secondfast", "/slow?sleep=1000", "/thirdfast"); try (NettyHttpClient nettyHttpClient = new NettyHttpClient()) { Collection<HttpResponse> responses = nettyHttpClient.sendRequests(transportAddress.address(), requests.toArray(new String[]{})); Collection<String> responseBodies = returnHttpResponseBodies(responses); assertThat(responseBodies, contains("/firstfast", "/slow?sleep=500", "/secondfast", "/slow?sleep=1000", "/thirdfast")); } } @Test public void testThatHttpPipeliningCanBeDisabled() throws Exception { Settings settings = settingsBuilder().put("http.pipelining", false).build(); httpServerTransport = new CustomNettyHttpServerTransport(settings); httpServerTransport.start(); InetSocketTransportAddress transportAddress = (InetSocketTransportAddress) httpServerTransport.boundAddress().boundAddress(); List<String> requests = Arrays.asList("/slow?sleep=1000", "/firstfast", "/secondfast", "/thirdfast", "/slow?sleep=500"); try (NettyHttpClient nettyHttpClient = new NettyHttpClient()) { Collection<HttpResponse> responses = nettyHttpClient.sendRequests(transportAddress.address(), requests.toArray(new String[]{})); List<String> responseBodies = new ArrayList<>(returnHttpResponseBodies(responses)); // we cannot be sure about the order of the fast requests, but the slow ones should have to be last assertThat(responseBodies, hasSize(5)); assertThat(responseBodies.get(3), is("/slow?sleep=500")); assertThat(responseBodies.get(4), is("/slow?sleep=1000")); } } class CustomNettyHttpServerTransport extends NettyHttpServerTransport { private final ExecutorService executorService; public CustomNettyHttpServerTransport(Settings settings) { super(settings, NettyHttpServerPipeliningTests.this.networkService, NettyHttpServerPipeliningTests.this.bigArrays); this.executorService = Executors.newFixedThreadPool(5); } @Override public ChannelPipelineFactory configureServerChannelPipelineFactory() { return new CustomHttpChannelPipelineFactory(this, executorService); } @Override public HttpServerTransport stop() { executorService.shutdownNow(); return super.stop(); } } private class CustomHttpChannelPipelineFactory extends HttpChannelPipelineFactory { private final ExecutorService executorService; public CustomHttpChannelPipelineFactory(NettyHttpServerTransport transport, ExecutorService executorService) { super(transport, randomBoolean()); this.executorService = executorService; } @Override public ChannelPipeline getPipeline() throws Exception { ChannelPipeline pipeline = super.getPipeline(); pipeline.replace("handler", "handler", new PossiblySlowUpstreamHandler(executorService)); return pipeline; } } class PossiblySlowUpstreamHandler extends SimpleChannelUpstreamHandler { private final ExecutorService executorService; public PossiblySlowUpstreamHandler(ExecutorService executorService) { this.executorService = executorService; } @Override public void messageReceived(final ChannelHandlerContext ctx, final MessageEvent e) throws Exception { executorService.submit(new PossiblySlowRunnable(ctx, e)); } @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) { e.getCause().printStackTrace(); e.getChannel().close(); } } class PossiblySlowRunnable implements Runnable { private ChannelHandlerContext ctx; private MessageEvent e; public PossiblySlowRunnable(ChannelHandlerContext ctx, MessageEvent e) { this.ctx = ctx; this.e = e; } @Override public void run() { HttpRequest request; OrderedUpstreamMessageEvent oue = null; if (e instanceof OrderedUpstreamMessageEvent) { oue = (OrderedUpstreamMessageEvent) e; request = (HttpRequest) oue.getMessage(); } else { request = (HttpRequest) e.getMessage(); } ChannelBuffer buffer = ChannelBuffers.copiedBuffer(request.getUri(), StandardCharsets.UTF_8); DefaultHttpResponse httpResponse = new DefaultHttpResponse(HTTP_1_1, OK); httpResponse.headers().add(CONTENT_LENGTH, buffer.readableBytes()); httpResponse.setContent(buffer); QueryStringDecoder decoder = new QueryStringDecoder(request.getUri()); final int timeout = request.getUri().startsWith("/slow") && decoder.getParameters().containsKey("sleep") ? Integer.valueOf(decoder.getParameters().get("sleep").get(0)) : 0; if (timeout > 0) { try { Thread.sleep(timeout); } catch (InterruptedException e1) { Thread.currentThread().interrupt(); throw new RuntimeException(); } } if (oue != null) { ctx.sendDownstream(new OrderedDownstreamChannelEvent(oue, 0, true, httpResponse)); } else { ctx.getChannel().write(httpResponse); } } } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Copyright 2006 Google Inc. All rights reserved. package com.google.devtools.build.lib.rules.cpp; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.fail; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.FailAction; import com.google.devtools.build.lib.actions.extra.CppLinkInfo; import com.google.devtools.build.lib.actions.extra.ExtraActionInfo; import com.google.devtools.build.lib.actions.util.ActionsTestUtil; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.OutputGroupInfo; import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException; import com.google.devtools.build.lib.analysis.test.InstrumentedFilesProvider; import com.google.devtools.build.lib.analysis.util.AnalysisMock; import com.google.devtools.build.lib.analysis.util.BuildViewTestCase; import com.google.devtools.build.lib.packages.ImplicitOutputsFunction; import com.google.devtools.build.lib.packages.util.MockCcSupport; import com.google.devtools.build.lib.skyframe.ConfiguredTargetAndData; import com.google.devtools.build.lib.testutil.TestRuleClassProvider; import com.google.devtools.build.lib.util.FileType; import com.google.devtools.build.lib.util.StringUtil; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.view.config.crosstool.CrosstoolConfig; import com.google.protobuf.TextFormat; import java.util.ArrayList; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * "White-box" unit test of cc_library rule. */ @RunWith(JUnit4.class) public class CcLibraryConfiguredTargetTest extends BuildViewTestCase { private static final PathFragment STL_CPPMAP = PathFragment.create("stl.cppmap"); private static final PathFragment CROSSTOOL_CPPMAP = PathFragment.create("crosstool.cppmap"); @Override protected ConfiguredRuleClassProvider getRuleClassProvider() { ConfiguredRuleClassProvider.Builder builder = new ConfiguredRuleClassProvider.Builder(); TestRuleClassProvider.addStandardRules(builder); return builder.addRuleDefinition(new TestRuleClassProvider.MakeVariableTesterRule()).build(); } @Before public final void createFiles() throws Exception { scratch.file( "hello/BUILD", "cc_library(", " name = 'hello',", " srcs = ['hello.cc'],", ")", "cc_library(", " name = 'hello_static',", " srcs = ['hello.cc'],", " linkstatic = 1,", ")", "cc_library(", " name = 'hello_alwayslink',", " srcs = ['hello.cc'],", " alwayslink = 1,", ")", "cc_binary(", " name = 'hello_bin',", " srcs = ['hello_main.cc'],", ")"); scratch.file( "hello/hello.cc", "#include <stdio.h>", "int hello_world() { printf(\"Hello, world!\\n\"); }"); scratch.file( "hello/hello_main.cc", "#include <stdio.h>", "int main() { printf(\"Hello, world!\\n\"); }"); } private CppCompileAction getCppCompileAction(String label) throws Exception { return getCppCompileAction(getConfiguredTarget(label)); } private CppCompileAction getCppCompileAction(ConfiguredTarget target) throws Exception { List<CppCompileAction> compilationSteps = actionsTestUtil().findTransitivePrerequisitesOf( ActionsTestUtil.getFirstArtifactEndingWith(getFilesToBuild(target), ".a"), CppCompileAction.class); return compilationSteps.get(0); } private CppModuleMapAction getCppModuleMapAction(String label) throws Exception { ConfiguredTarget target = getConfiguredTarget(label); CppModuleMap cppModuleMap = target.get(CcCompilationInfo.PROVIDER).getCcCompilationContext().getCppModuleMap(); return (CppModuleMapAction) getGeneratingAction(cppModuleMap.getArtifact()); } private void assertNoCppModuleMapAction(String label) throws Exception { ConfiguredTarget target = getConfiguredTarget(label); assertThat(target.get(CcCompilationInfo.PROVIDER).getCcCompilationContext().getCppModuleMap()) .isNull(); } public void checkWrongExtensionInArtifactNamePattern( String categoryName, ImmutableList<String> correctExtensions) throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool( mockToolsConfig, MockCcSupport.COPY_DYNAMIC_LIBRARIES_TO_BINARY_CONFIGURATION, MockCcSupport.TARGETS_WINDOWS_CONFIGURATION, "supports_interface_shared_objects: true", "artifact_name_pattern {" + " category_name: '" + categoryName + "'" + " prefix: ''" + " extension: '.wrong_ext'" + "}"); try { useConfiguration(); fail("Should fail"); } catch (InvalidConfigurationException e) { assertThat(e) .hasMessageThat() .contains( String.format( "Unrecognized file extension '.wrong_ext', allowed " + "extensions are %s, please check artifact_name_pattern configuration for %s " + "in your CROSSTOOL.", StringUtil.joinEnglishList(correctExtensions, "or", "'"), categoryName)); } } @Test public void testDefinesAndMakeVariables() throws Exception { ConfiguredTarget l = scratchConfiguredTarget("a", "l", "cc_library(name='l', srcs=['l.cc'], defines=['V=$(FOO)'], toolchains=[':v'])", "make_variable_tester(name='v', variables={'FOO': 'BAR'})"); assertThat(l.get(CcCompilationInfo.PROVIDER).getCcCompilationContext().getDefines()) .contains("V=BAR"); } @Test public void testMisconfiguredCrosstoolRaisesErrorWhenLinking() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool( mockToolsConfig, MockCcSupport.NO_LEGACY_FEATURES_FEATURE, MockCcSupport.EMPTY_COMPILE_ACTION_CONFIG, MockCcSupport.PIC_FEATURE); useConfiguration(); checkError( "test", "test", "Expected action_config for 'c++-link-static-library' to be configured", "cc_library(name = 'test', srcs = ['test.cc'])"); } @Test public void testMisconfiguredCrosstoolRaisesErrorWhenCompiling() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool( mockToolsConfig, MockCcSupport.NO_LEGACY_FEATURES_FEATURE, MockCcSupport.EMPTY_STATIC_LIBRARY_ACTION_CONFIG, MockCcSupport.PIC_FEATURE); useConfiguration(); checkError( "test", "test", "Expected action_config for 'c++-compile' to be configured", "cc_library(name = 'test', srcs = ['test.cc'])"); } @Test public void testFilesToBuild() throws Exception { useConfiguration("--cpu=k8"); ConfiguredTarget hello = getConfiguredTarget("//hello:hello"); String cpu = getTargetConfiguration().getCpu(); Artifact archive = getBinArtifact("libhello.a", hello); Artifact implSharedObject = getBinArtifact("libhello.so", hello); Artifact implInterfaceSharedObject = getBinArtifact("libhello.ifso", hello); Artifact implSharedObjectLink = getSharedArtifact("_solib_" + cpu + "/libhello_Slibhello.so", hello); Artifact implInterfaceSharedObjectLink = getSharedArtifact("_solib_" + cpu + "/libhello_Slibhello.ifso", hello); assertThat(getFilesToBuild(hello)).containsExactly(archive, implSharedObject, implInterfaceSharedObject); assertThat(LinkerInputs.toLibraryArtifacts( hello.getProvider(CcNativeLibraryProvider.class).getTransitiveCcNativeLibraries())) .containsExactly(implInterfaceSharedObjectLink); assertThat( hello .get(CcLinkingInfo.PROVIDER) .getCcLinkParamsStore() .get(/* linkingStatically= */ false, /* linkShared= */ false) .getDynamicLibrariesForRuntime()) .containsExactly(implSharedObjectLink); } @Test public void testFilesToBuildWithoutDSO() throws Exception { CrosstoolConfig.CrosstoolRelease.Builder release = CrosstoolConfig.CrosstoolRelease.newBuilder() .mergeFrom(CrosstoolConfigurationHelper.simpleCompleteToolchainProto()); release.getToolchainBuilder(0) .setTargetCpu("k8") .setCompiler("compiler") .clearLinkingModeFlags(); scratch.file("crosstool/BUILD", "cc_toolchain_suite(", " name = 'crosstool',", " toolchains = {'k8|compiler': ':cc-compiler-k8'})", "filegroup(name = 'empty')", "cc_toolchain(", " name = 'cc-compiler-k8',", " output_licenses = ['unencumbered'],", " cpu = 'k8',", " ar_files = ':empty',", " as_files = ':empty',", " compiler_files = ':empty',", " dwp_files = ':empty',", " coverage_files = ':empty',", " linker_files = ':empty',", " strip_files = ':empty',", " objcopy_files = ':empty',", " static_runtime_libs = [':empty'],", " dynamic_runtime_libs = [':empty'],", " all_files = ':empty',", " licenses = ['unencumbered'])"); scratch.file("crosstool/CROSSTOOL", TextFormat.printToString(release)); // This is like the preceding test, but with a toolchain that can't build '.so' files useConfiguration("--crosstool_top=//crosstool:crosstool", "--compiler=compiler", "--cpu=k8", "--host_cpu=k8"); ConfiguredTarget hello = getConfiguredTarget("//hello:hello"); Artifact archive = getBinArtifact("libhello.a", hello); assertThat(getFilesToBuild(hello)).containsExactly(archive); } @Test public void testFilesToBuildWithInterfaceSharedObjects() throws Exception { useConfiguration("--interface_shared_objects"); useConfiguration("--cpu=k8"); ConfiguredTarget hello = getConfiguredTarget("//hello:hello"); String cpu = getTargetConfiguration().getCpu(); Artifact archive = getBinArtifact("libhello.a", hello); Artifact sharedObject = getBinArtifact("libhello.ifso", hello); Artifact implSharedObject = getBinArtifact("libhello.so", hello); Artifact sharedObjectLink = getSharedArtifact("_solib_" + cpu + "/libhello_Slibhello.ifso", hello); Artifact implSharedObjectLink = getSharedArtifact("_solib_" + cpu + "/libhello_Slibhello.so", hello); assertThat(getFilesToBuild(hello)).containsExactly(archive, sharedObject, implSharedObject); assertThat(LinkerInputs.toLibraryArtifacts( hello.getProvider(CcNativeLibraryProvider.class).getTransitiveCcNativeLibraries())) .containsExactly(sharedObjectLink); assertThat( hello .get(CcLinkingInfo.PROVIDER) .getCcLinkParamsStore() .get(/* linkingStatically= */ false, /* linkShared= */ false) .getDynamicLibrariesForRuntime()) .containsExactly(implSharedObjectLink); } @Test public void testEmptyLinkopts() throws Exception { ConfiguredTarget hello = getConfiguredTarget("//hello:hello"); assertThat( hello .get(CcLinkingInfo.PROVIDER) .getCcLinkParamsStore() .getCcLinkParams(false, false) .getLinkopts() .isEmpty()) .isTrue(); } @Test public void testSoName() throws Exception { // Without interface shared libraries. useConfiguration("--nointerface_shared_objects"); ConfiguredTarget hello = getConfiguredTarget("//hello:hello"); Artifact sharedObject = getOnlyElement(FileType.filter(getFilesToBuild(hello), CppFileTypes.SHARED_LIBRARY)); CppLinkAction action = (CppLinkAction) getGeneratingAction(sharedObject); for (String option : action.getLinkCommandLine().getLinkopts()) { assertThat(option).doesNotContain("-Wl,-soname"); } // With interface shared libraries. useConfiguration("--interface_shared_objects"); useConfiguration("--cpu=k8"); hello = getConfiguredTarget("//hello:hello"); sharedObject = FileType.filter(getFilesToBuild(hello), CppFileTypes.SHARED_LIBRARY).iterator().next(); action = (CppLinkAction) getGeneratingAction(sharedObject); assertThat(action.getLinkCommandLine().getLinkopts()) .contains("-Wl,-soname=libhello_Slibhello.so"); } @Test public void testCppLinkActionExtraActionInfoWithoutSharedLibraries() throws Exception { useConfiguration("--nointerface_shared_objects"); ConfiguredTarget hello = getConfiguredTarget("//hello:hello"); Artifact sharedObject = getOnlyElement(FileType.filter(getFilesToBuild(hello), CppFileTypes.SHARED_LIBRARY)); CppLinkAction action = (CppLinkAction) getGeneratingAction(sharedObject); ExtraActionInfo.Builder builder = action.getExtraActionInfo(actionKeyContext); ExtraActionInfo info = builder.build(); assertThat(info.getMnemonic()).isEqualTo("CppLink"); CppLinkInfo cppLinkInfo = info.getExtension(CppLinkInfo.cppLinkInfo); assertThat(cppLinkInfo).isNotNull(); Iterable<String> inputs = Artifact.asExecPaths(action.getLinkCommandLine().getLinkerInputArtifacts()); assertThat(cppLinkInfo.getInputFileList()).containsExactlyElementsIn(inputs); assertThat(cppLinkInfo.getOutputFile()) .isEqualTo(action.getPrimaryOutput().getExecPathString()); assertThat(cppLinkInfo.hasInterfaceOutputFile()).isFalse(); assertThat(cppLinkInfo.getLinkTargetType()) .isEqualTo(action.getLinkCommandLine().getLinkTargetType().name()); assertThat(cppLinkInfo.getLinkStaticness()) .isEqualTo(action.getLinkCommandLine().getLinkingMode().name()); Iterable<String> linkstamps = Artifact.asExecPaths(action.getLinkstampObjects()); assertThat(cppLinkInfo.getLinkStampList()).containsExactlyElementsIn(linkstamps); Iterable<String> buildInfoHeaderArtifacts = Artifact.asExecPaths(action.getBuildInfoHeaderArtifacts()); assertThat(cppLinkInfo.getBuildInfoHeaderArtifactList()) .containsExactlyElementsIn(buildInfoHeaderArtifacts); assertThat(cppLinkInfo.getLinkOptList()).containsExactlyElementsIn(action.getArguments()); } @Test public void testCppLinkActionExtraActionInfoWithSharedLibraries() throws Exception { useConfiguration("--cpu=k8"); ConfiguredTarget hello = getConfiguredTarget("//hello:hello"); Artifact sharedObject = FileType.filter(getFilesToBuild(hello), CppFileTypes.SHARED_LIBRARY).iterator().next(); CppLinkAction action = (CppLinkAction) getGeneratingAction(sharedObject); ExtraActionInfo.Builder builder = action.getExtraActionInfo(actionKeyContext); ExtraActionInfo info = builder.build(); assertThat(info.getMnemonic()).isEqualTo("CppLink"); CppLinkInfo cppLinkInfo = info.getExtension(CppLinkInfo.cppLinkInfo); assertThat(cppLinkInfo).isNotNull(); Iterable<String> inputs = Artifact.asExecPaths(action.getLinkCommandLine().getLinkerInputArtifacts()); assertThat(cppLinkInfo.getInputFileList()).containsExactlyElementsIn(inputs); assertThat(cppLinkInfo.getOutputFile()) .isEqualTo(action.getPrimaryOutput().getExecPathString()); assertThat(cppLinkInfo.getLinkTargetType()) .isEqualTo(action.getLinkCommandLine().getLinkTargetType().name()); assertThat(cppLinkInfo.getLinkStaticness()) .isEqualTo(action.getLinkCommandLine().getLinkingMode().name()); Iterable<String> linkstamps = Artifact.asExecPaths(action.getLinkstampObjects()); assertThat(cppLinkInfo.getLinkStampList()).containsExactlyElementsIn(linkstamps); Iterable<String> buildInfoHeaderArtifacts = Artifact.asExecPaths(action.getBuildInfoHeaderArtifacts()); assertThat(cppLinkInfo.getBuildInfoHeaderArtifactList()) .containsExactlyElementsIn(buildInfoHeaderArtifacts); assertThat(cppLinkInfo.getLinkOptList()).containsExactlyElementsIn(action.getArguments()); } @Test public void testLinkActionCanConsumeArtifactExtensions() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.STATIC_LINK_TWEAKED_CONFIGURATION); useConfiguration("--features=" + Link.LinkTargetType.STATIC_LIBRARY.getActionName()); ConfiguredTarget hello = getConfiguredTarget("//hello:hello"); Artifact archive = FileType.filter(getFilesToBuild(hello), FileType.of(".lib")).iterator().next(); CppLinkAction action = (CppLinkAction) getGeneratingAction(archive); assertThat(action.getArguments()).contains(archive.getExecPathString()); } @Test public void testObjectFileNamesCanBeSpecifiedInToolchain() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool( mockToolsConfig, "artifact_name_pattern {" + " category_name: 'object_file'" + " prefix: ''" + " extension: '.obj'" + "}"); useConfiguration(); ConfiguredTarget hello = getConfiguredTarget("//hello:hello"); assertThat(artifactByPath(getFilesToBuild(hello), ".a", ".obj")).isNotNull(); } @Test public void testWindowsFileNamePatternsCanBeSpecifiedInToolchain() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool( mockToolsConfig, MockCcSupport.COPY_DYNAMIC_LIBRARIES_TO_BINARY_CONFIGURATION, MockCcSupport.TARGETS_WINDOWS_CONFIGURATION, "needsPic: false", "supports_interface_shared_objects: true", "artifact_name_pattern {" + " category_name: 'object_file'" + " prefix: ''" + " extension: '.obj'" + "}", "artifact_name_pattern {" + " category_name: 'static_library'" + " prefix: ''" + " extension: '.lib'" + "}", "artifact_name_pattern {" + " category_name: 'alwayslink_static_library'" + " prefix: ''" + " extension: '.lo.lib'" + "}", "artifact_name_pattern {" + " category_name: 'executable'" + " prefix: ''" + " extension: '.exe'" + "}", "artifact_name_pattern {" + " category_name: 'dynamic_library'" + " prefix: ''" + " extension: '.dll'" + "}", "artifact_name_pattern {" + " category_name: 'interface_library'" + " prefix: ''" + " extension: '.if.lib'" + "}"); useConfiguration(); ConfiguredTarget hello = getConfiguredTarget("//hello:hello"); Artifact helloObj = getBinArtifact("_objs/hello/hello.obj", getConfiguredTarget("//hello:hello")); CppCompileAction helloObjAction = (CppCompileAction) getGeneratingAction(helloObj); assertThat(helloObjAction).isNotNull(); Artifact helloLib = FileType.filter(getFilesToBuild(hello), CppFileTypes.ARCHIVE).iterator().next(); assertThat(helloLib.getExecPathString()).endsWith("hello.lib"); ConfiguredTarget helloAlwaysLink = getConfiguredTarget("//hello:hello_alwayslink"); Artifact helloLibAlwaysLink = FileType.filter(getFilesToBuild(helloAlwaysLink), CppFileTypes.ALWAYS_LINK_LIBRARY) .iterator() .next(); assertThat(helloLibAlwaysLink.getExecPathString()).endsWith("hello_alwayslink.lo.lib"); ConfiguredTarget helloBin = getConfiguredTarget("//hello:hello_bin"); Artifact helloBinExe = getFilesToBuild(helloBin).iterator().next(); assertThat(helloBinExe.getExecPathString()).endsWith("hello_bin.exe"); assertThat( artifactsToStrings( getOutputGroup(hello, CcLinkingHelper.DYNAMIC_LIBRARY_OUTPUT_GROUP_NAME))) .containsExactly("bin hello/hello.dll", "bin hello/hello.if.lib"); } @Test public void testWrongObjectFileArtifactNamePattern() throws Exception { checkWrongExtensionInArtifactNamePattern( "object_file", ArtifactCategory.OBJECT_FILE.getAllowedExtensions()); } @Test public void testWrongStaticLibraryArtifactNamePattern() throws Exception { checkWrongExtensionInArtifactNamePattern( "static_library", ArtifactCategory.STATIC_LIBRARY.getAllowedExtensions()); } @Test public void testWrongAlwayslinkStaticLibraryArtifactNamePattern() throws Exception { checkWrongExtensionInArtifactNamePattern( "alwayslink_static_library", ArtifactCategory.ALWAYSLINK_STATIC_LIBRARY.getAllowedExtensions()); } @Test public void testWrongExecutableArtifactNamePattern() throws Exception { checkWrongExtensionInArtifactNamePattern( "executable", ArtifactCategory.EXECUTABLE.getAllowedExtensions()); } @Test public void testWrongDynamicLibraryArtifactNamePattern() throws Exception { checkWrongExtensionInArtifactNamePattern( "dynamic_library", ArtifactCategory.DYNAMIC_LIBRARY.getAllowedExtensions()); } @Test public void testWrongInterfaceLibraryArtifactNamePattern() throws Exception { checkWrongExtensionInArtifactNamePattern( "interface_library", ArtifactCategory.INTERFACE_LIBRARY.getAllowedExtensions()); } @Test public void testArtifactSelectionBaseNameTemplating() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.STATIC_LINK_AS_DOT_A_CONFIGURATION); useConfiguration("--features=" + Link.LinkTargetType.STATIC_LIBRARY.getActionName()); ConfiguredTarget hello = getConfiguredTarget("//hello:hello"); Artifact archive = FileType.filter(getFilesToBuild(hello), CppFileTypes.ARCHIVE).iterator().next(); assertThat(archive.getExecPathString()).endsWith("libhello.a"); } @Test public void testArtifactsToAlwaysBuild() throws Exception { useConfiguration("--cpu=k8"); // ArtifactsToAlwaysBuild should apply both for static libraries. ConfiguredTarget helloStatic = getConfiguredTarget("//hello:hello_static"); assertThat(artifactsToStrings(getOutputGroup(helloStatic, OutputGroupInfo.HIDDEN_TOP_LEVEL))) .containsExactly("bin hello/_objs/hello_static/hello.pic.o"); Artifact implSharedObject = getBinArtifact("libhello_static.so", helloStatic); assertThat(getFilesToBuild(helloStatic)).doesNotContain(implSharedObject); // And for shared libraries. ConfiguredTarget hello = getConfiguredTarget("//hello:hello"); assertThat(artifactsToStrings(getOutputGroup(helloStatic, OutputGroupInfo.HIDDEN_TOP_LEVEL))) .containsExactly("bin hello/_objs/hello_static/hello.pic.o"); implSharedObject = getBinArtifact("libhello.so", hello); assertThat(getFilesToBuild(hello)).contains(implSharedObject); } @Test public void testTransitiveArtifactsToAlwaysBuildStatic() throws Exception { useConfiguration("--cpu=k8"); ConfiguredTarget x = scratchConfiguredTarget( "foo", "x", "cc_library(name = 'x', srcs = ['x.cc'], deps = [':y'], linkstatic = 1)", "cc_library(name = 'y', srcs = ['y.cc'], deps = [':z'])", "cc_library(name = 'z', srcs = ['z.cc'])"); assertThat(artifactsToStrings(getOutputGroup(x, OutputGroupInfo.HIDDEN_TOP_LEVEL))) .containsExactly( "bin foo/_objs/x/x.pic.o", "bin foo/_objs/y/y.pic.o", "bin foo/_objs/z/z.pic.o"); } @Test public void testBuildHeaderModulesAsPrerequisites() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.HEADER_MODULES_FEATURE_CONFIGURATION); useConfiguration("--cpu=k8"); ConfiguredTarget x = scratchConfiguredTarget( "foo", "x", "package(features = ['header_modules'])", "cc_library(name = 'x', srcs = ['x.cc'], deps = [':y'])", "cc_library(name = 'y', hdrs = ['y.h'])"); assertThat( ActionsTestUtil.baseNamesOf( getOutputGroup(x, OutputGroupInfo.COMPILATION_PREREQUISITES))) .isEqualTo("y.h y.cppmap stl.cppmap crosstool.cppmap x.cppmap y.pic.pcm x.cc"); } @Test public void testCodeCoverage() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.HEADER_MODULES_FEATURE_CONFIGURATION); useConfiguration("--cpu=k8", "--collect_code_coverage"); ConfiguredTarget x = scratchConfiguredTarget( "foo", "x", "package(features = ['header_modules'])", "cc_library(name = 'x', srcs = ['x.cc'])"); assertThat( ActionsTestUtil.baseArtifactNames( x.getProvider(InstrumentedFilesProvider.class).getInstrumentationMetadataFiles())) .containsExactly("x.pic.gcno"); } @Test public void testDisablingHeaderModulesWhenDependingOnModuleBuildTransitively() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.HEADER_MODULES_FEATURE_CONFIGURATION); useConfiguration(); scratch.file("module/BUILD", "package(features = ['header_modules'])", "cc_library(", " name = 'module',", " srcs = ['a.cc', 'a.h'],", ")"); scratch.file("nomodule/BUILD", "package(features = ['-header_modules'])", "cc_library(", " name = 'nomodule',", " srcs = ['a.cc', 'a.h'],", " deps = ['//module']", ")"); CppCompileAction moduleAction = getCppCompileAction("//module:module"); assertThat(moduleAction.getCompilerOptions()).contains("module_name://module:module"); CppCompileAction noModuleAction = getCppCompileAction("//nomodule:nomodule"); assertThat(noModuleAction.getCompilerOptions()).doesNotContain("module_name://module:module"); } /** * Returns the non-system module maps in {@code input}. */ private Iterable<Artifact> getNonSystemModuleMaps(Iterable<Artifact> input) { return Iterables.filter(input, new Predicate<Artifact>() { @Override public boolean apply(Artifact input) { PathFragment path = input.getExecPath(); return CppFileTypes.CPP_MODULE_MAP.matches(path) && !path.endsWith(STL_CPPMAP) && !path.endsWith(CROSSTOOL_CPPMAP); } }); } /** * Returns the header module artifacts in {@code input}. */ private Iterable<Artifact> getHeaderModules(Iterable<Artifact> input) { return Iterables.filter(input, new Predicate<Artifact>() { @Override public boolean apply(Artifact input) { return CppFileTypes.CPP_MODULE.matches(input.getExecPath()); } }); } /** * Returns the flags in {@code input} that reference a header module. */ private Iterable<String> getHeaderModuleFlags(Iterable<String> input) { List<String> names = new ArrayList<>(); for (String flag : input) { if (CppFileTypes.CPP_MODULE.matches(flag)) { names.add(PathFragment.create(flag).getBaseName()); } } return names; } @Test public void testCompileHeaderModules() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool( mockToolsConfig, "" + "feature { name: 'header_modules' implies: 'use_header_modules' }" + "feature { name: 'module_maps' }" + "feature { name: 'use_header_modules' }"); useConfiguration("--cpu=k8"); scratch.file("module/BUILD", "package(features = ['header_modules'])", "cc_library(", " name = 'a',", " srcs = ['a.h', 'a.cc'],", " deps = ['b']", ")", "cc_library(", " name = 'b',", " srcs = ['b.h'],", " textual_hdrs = ['t.h'],", ")"); ConfiguredTarget moduleB = getConfiguredTarget("//module:b"); Artifact bModuleArtifact = getBinArtifact("_objs/b/b.pic.pcm", moduleB); CppCompileAction bModuleAction = (CppCompileAction) getGeneratingAction(bModuleArtifact); assertThat(bModuleAction.getIncludeScannerSources()).containsExactly( getSourceArtifact("module/b.h"), getSourceArtifact("module/t.h")); assertThat(bModuleAction.getInputs()).contains(getGenfilesArtifact("b.cppmap", moduleB)); ConfiguredTarget moduleA = getConfiguredTarget("//module:a"); Artifact aObjectArtifact = getBinArtifact("_objs/a/a.pic.o", moduleA); CppCompileAction aObjectAction = (CppCompileAction) getGeneratingAction(aObjectArtifact); assertThat(aObjectAction.getIncludeScannerSources()).containsExactly( getSourceArtifact("module/a.cc")); assertThat(aObjectAction.getCcCompilationContext().getTransitiveModules(true)) .contains(getBinArtifact("_objs/b/b.pic.pcm", moduleB)); assertThat(aObjectAction.getInputs()).contains(getGenfilesArtifact("b.cppmap", moduleB)); assertNoEvents(); } private void setupPackagesForSourcesWithSameBaseNameTests() throws Exception { scratch.file( "foo/BUILD", "cc_library(", " name = 'lib',", " srcs = ['a.cc', 'subpkg1/b.cc', 'subpkg1/a.c', '//bar:srcs', 'subpkg2/A.c'],", ")"); scratch.file("bar/BUILD", "filegroup(name = 'srcs', srcs = ['a.cpp'])"); } @Test public void testContainingSourcesWithSameBaseName() throws Exception { AnalysisMock.get().ccSupport().setup(mockToolsConfig); useConfiguration("--cpu=k8"); setupPackagesForSourcesWithSameBaseNameTests(); getConfiguredTarget("//foo:lib"); Artifact a0 = getBinArtifact("_objs/lib/0/a.pic.o", getConfiguredTarget("//foo:lib")); Artifact a1 = getBinArtifact("_objs/lib/1/a.pic.o", getConfiguredTarget("//foo:lib")); Artifact a2 = getBinArtifact("_objs/lib/2/a.pic.o", getConfiguredTarget("//foo:lib")); Artifact a3 = getBinArtifact("_objs/lib/3/A.pic.o", getConfiguredTarget("//foo:lib")); Artifact b = getBinArtifact("_objs/lib/b.pic.o", getConfiguredTarget("//foo:lib")); assertThat(getGeneratingAction(a0)).isNotNull(); assertThat(getGeneratingAction(a1)).isNotNull(); assertThat(getGeneratingAction(a2)).isNotNull(); assertThat(getGeneratingAction(a3)).isNotNull(); assertThat(getGeneratingAction(b)).isNotNull(); assertThat(getGeneratingAction(a0).getInputs()).contains(getSourceArtifact("foo/a.cc")); assertThat(getGeneratingAction(a1).getInputs()).contains(getSourceArtifact("foo/subpkg1/a.c")); assertThat(getGeneratingAction(a2).getInputs()).contains(getSourceArtifact("bar/a.cpp")); assertThat(getGeneratingAction(a3).getInputs()).contains(getSourceArtifact("foo/subpkg2/A.c")); assertThat(getGeneratingAction(b).getInputs()).contains(getSourceArtifact("foo/subpkg1/b.cc")); } private void setupPackagesForModuleTests(boolean useHeaderModules) throws Exception { scratch.file("module/BUILD", "package(features = ['header_modules'])", "cc_library(", " name = 'b',", " srcs = ['b.h'],", " deps = ['//nomodule:a'],", ")", "cc_library(", " name = 'g',", " srcs = ['g.h', 'g.cc'],", " deps = ['//nomodule:c'],", ")", "cc_library(", " name = 'j',", " srcs = ['j.h', 'j.cc'],", " deps = ['//nomodule:c', '//nomodule:i'],", ")"); scratch.file("nomodule/BUILD", "package(features = ['-header_modules'" + (useHeaderModules ? ", 'use_header_modules'" : "") + "])", "cc_library(", " name = 'y',", " srcs = ['y.h'],", ")", "cc_library(", " name = 'z',", " srcs = ['z.h'],", " deps = [':y'],", ")", "cc_library(", " name = 'a',", " srcs = ['a.h'],", " deps = [':z'],", ")", "cc_library(", " name = 'c',", " srcs = ['c.h', 'c.cc'],", " deps = ['//module:b'],", ")", "cc_library(", " name = 'd',", " srcs = ['d.h', 'd.cc'],", " deps = [':c'],", ")", "cc_library(", " name = 'e',", " srcs = ['e.h'],", " deps = [':a'],", ")", "cc_library(", " name = 'f',", " srcs = ['f.h', 'f.cc'],", " deps = [':e'],", ")", "cc_library(", " name = 'h',", " srcs = ['h.h', 'h.cc'],", " deps = ['//module:g'],", ")", "cc_library(", " name = 'i',", " srcs = ['i.h', 'i.cc'],", " deps = [':h'],", ")"); } @Test public void testCompileHeaderModulesTransitively() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.HEADER_MODULES_FEATURE_CONFIGURATION); useConfiguration("--cpu=k8"); setupPackagesForModuleTests(/*useHeaderModules=*/false); // The //nomodule:f target only depends on non-module targets, thus it should be module-free. ConfiguredTarget nomoduleF = getConfiguredTarget("//nomodule:f"); ConfiguredTarget nomoduleE = getConfiguredTarget("//nomodule:e"); assertThat(getGeneratingAction(getBinArtifact("_objs/f/f.pic.pcm", nomoduleF))).isNull(); Artifact fObjectArtifact = getBinArtifact("_objs/f/f.pic.o", nomoduleF); CppCompileAction fObjectAction = (CppCompileAction) getGeneratingAction(fObjectArtifact); // Only the module map of f itself itself and the direct dependencies are needed. assertThat(getNonSystemModuleMaps(fObjectAction.getInputs())) .containsExactly( getGenfilesArtifact("f.cppmap", nomoduleF), getGenfilesArtifact("e.cppmap", nomoduleE)); assertThat(getHeaderModules(fObjectAction.getInputs())).isEmpty(); assertThat(fObjectAction.getIncludeScannerSources()).containsExactly( getSourceArtifact("nomodule/f.cc")); assertThat(getHeaderModuleFlags(fObjectAction.getCompilerOptions())).isEmpty(); // The //nomodule:c target will get the header module for //module:b, which is a direct // dependency. ConfiguredTarget nomoduleC = getConfiguredTarget("//nomodule:c"); assertThat(getGeneratingAction(getBinArtifact("_objs/c/c.pic.pcm", nomoduleC))).isNull(); Artifact cObjectArtifact = getBinArtifact("_objs/c/c.pic.o", nomoduleC); CppCompileAction cObjectAction = (CppCompileAction) getGeneratingAction(cObjectArtifact); assertThat(getNonSystemModuleMaps(cObjectAction.getInputs())) .containsExactly( getGenfilesArtifact("b.cppmap", "//module:b"), getGenfilesArtifact("c.cppmap", nomoduleC)); assertThat(getHeaderModules(cObjectAction.getInputs())).isEmpty(); // All headers of transitive dependencies that are built as modules are needed as entry points // for include scanning. assertThat(cObjectAction.getIncludeScannerSources()).containsExactly( getSourceArtifact("nomodule/c.cc")); assertThat(cObjectAction.getMainIncludeScannerSource()).isEqualTo( getSourceArtifact("nomodule/c.cc")); assertThat(getHeaderModuleFlags(cObjectAction.getCompilerOptions())).isEmpty(); // The //nomodule:d target depends on //module:b via one indirection (//nomodule:c). getConfiguredTarget("//nomodule:d"); assertThat( getGeneratingAction( getBinArtifact("_objs/d/d.pic.pcm", getConfiguredTarget("//nomodule:d")))) .isNull(); Artifact dObjectArtifact = getBinArtifact("_objs/d/d.pic.o", getConfiguredTarget("//nomodule:d")); CppCompileAction dObjectAction = (CppCompileAction) getGeneratingAction(dObjectArtifact); // Module map 'c.cppmap' is needed because it is a direct dependency. assertThat(getNonSystemModuleMaps(dObjectAction.getInputs())).containsExactly( getGenfilesArtifact("c.cppmap", "//nomodule:c"), getGenfilesArtifact("d.cppmap", "//nomodule:d")); assertThat(getHeaderModules(dObjectAction.getInputs())).isEmpty(); assertThat(dObjectAction.getIncludeScannerSources()).containsExactly( getSourceArtifact("nomodule/d.cc")); assertThat(getHeaderModuleFlags(dObjectAction.getCompilerOptions())).isEmpty(); // The //module:j target depends on //module:g via //nomodule:h and on //module:b via // both //module:g and //nomodule:c. ConfiguredTarget moduleJ = getConfiguredTarget("//module:j"); Artifact jObjectArtifact = getBinArtifact("_objs/j/j.pic.o", moduleJ); CppCompileAction jObjectAction = (CppCompileAction) getGeneratingAction(jObjectArtifact); assertThat(getHeaderModules(jObjectAction.getCcCompilationContext().getTransitiveModules(true))) .containsExactly( getBinArtifact("_objs/b/b.pic.pcm", getConfiguredTarget("//module:b")), getBinArtifact("_objs/g/g.pic.pcm", getConfiguredTarget("//module:g"))); assertThat(jObjectAction.getIncludeScannerSources()).containsExactly( getSourceArtifact("module/j.cc")); assertThat(jObjectAction.getMainIncludeScannerSource()).isEqualTo( getSourceArtifact("module/j.cc")); assertThat(getHeaderModules(jObjectAction.getCcCompilationContext().getTransitiveModules(true))) .containsExactly( getBinArtifact("_objs/b/b.pic.pcm", getConfiguredTarget("//module:b")), getBinArtifact("_objs/g/g.pic.pcm", getConfiguredTarget("//module:g"))); } @Test public void testCompileUsingHeaderModulesTransitively() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.HEADER_MODULES_FEATURE_CONFIGURATION); useConfiguration("--cpu=k8"); setupPackagesForModuleTests( /*useHeaderModules=*/true); ConfiguredTarget nomoduleF = getConfiguredTarget("//nomodule:f"); Artifact fObjectArtifact = getBinArtifact("_objs/f/f.pic.o", getConfiguredTarget("//nomodule:f")); CppCompileAction fObjectAction = (CppCompileAction) getGeneratingAction(fObjectArtifact); // Only the module map of f itself itself and the direct dependencies are needed. assertThat(getNonSystemModuleMaps(fObjectAction.getInputs())) .containsExactly( getGenfilesArtifact("f.cppmap", nomoduleF), getGenfilesArtifact("e.cppmap", "//nomodule:e")); getConfiguredTarget("//nomodule:c"); Artifact cObjectArtifact = getBinArtifact("_objs/c/c.pic.o", getConfiguredTarget("//nomodule:c")); CppCompileAction cObjectAction = (CppCompileAction) getGeneratingAction(cObjectArtifact); assertThat(getNonSystemModuleMaps(cObjectAction.getInputs())) .containsExactly( getGenfilesArtifact("b.cppmap", "//module:b"), getGenfilesArtifact("c.cppmap", "//nomodule:c")); assertThat(getHeaderModules(cObjectAction.getCcCompilationContext().getTransitiveModules(true))) .containsExactly(getBinArtifact("_objs/b/b.pic.pcm", getConfiguredTarget("//module:b"))); getConfiguredTarget("//nomodule:d"); Artifact dObjectArtifact = getBinArtifact("_objs/d/d.pic.o", getConfiguredTarget("//nomodule:d")); CppCompileAction dObjectAction = (CppCompileAction) getGeneratingAction(dObjectArtifact); assertThat(getNonSystemModuleMaps(dObjectAction.getInputs())) .containsExactly( getGenfilesArtifact("c.cppmap", "//nomodule:c"), getGenfilesArtifact("d.cppmap", "//nomodule:d")); assertThat(getHeaderModules(dObjectAction.getCcCompilationContext().getTransitiveModules(true))) .containsExactly(getBinArtifact("_objs/b/b.pic.pcm", getConfiguredTarget("//module:b"))); } private void writeSimpleCcLibrary() throws Exception { scratch.file("module/BUILD", "cc_library(", " name = 'map',", " srcs = ['a.cc', 'a.h'],", ")"); } @Test public void testPicNotAvailableError() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool( mockToolsConfig, MockCcSupport.EMPTY_STATIC_LIBRARY_ACTION_CONFIG, MockCcSupport.EMPTY_COMPILE_ACTION_CONFIG, MockCcSupport.NO_LEGACY_FEATURES_FEATURE); useConfiguration("--cpu=k8"); writeSimpleCcLibrary(); reporter.removeHandler(failFastHandler); getConfiguredTarget("//module:map"); assertContainsEvent("PIC compilation is requested but the toolchain does not support it"); } @Test public void testToolchainWithoutPicForNoPicCompilation() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool( mockToolsConfig, "needsPic: false", MockCcSupport.EMPTY_COMPILE_ACTION_CONFIG, MockCcSupport.EMPTY_EXECUTABLE_ACTION_CONFIG, MockCcSupport.EMPTY_DYNAMIC_LIBRARY_ACTION_CONFIG, MockCcSupport.EMPTY_TRANSITIVE_DYNAMIC_LIBRARY_ACTION_CONFIG, MockCcSupport.EMPTY_STATIC_LIBRARY_ACTION_CONFIG, MockCcSupport.EMPTY_STRIP_ACTION_CONFIG, MockCcSupport.NO_LEGACY_FEATURES_FEATURE); useConfiguration(); scratchConfiguredTarget("a", "a", "cc_binary(name='a', srcs=['a.cc'], deps=[':b'])", "cc_library(name='b', srcs=['b.cc'])"); } @Test public void testNoCppModuleMap() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool( mockToolsConfig, MockCcSupport.EMPTY_COMPILE_ACTION_CONFIG, MockCcSupport.EMPTY_EXECUTABLE_ACTION_CONFIG, MockCcSupport.EMPTY_STATIC_LIBRARY_ACTION_CONFIG, MockCcSupport.EMPTY_DYNAMIC_LIBRARY_ACTION_CONFIG, MockCcSupport.EMPTY_TRANSITIVE_DYNAMIC_LIBRARY_ACTION_CONFIG, MockCcSupport.NO_LEGACY_FEATURES_FEATURE, MockCcSupport.PIC_FEATURE); useConfiguration(); writeSimpleCcLibrary(); assertNoCppModuleMapAction("//module:map"); } @Test public void testCppModuleMap() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, "feature { name: 'module_maps' }"); useConfiguration(); writeSimpleCcLibrary(); CppModuleMapAction action = getCppModuleMapAction("//module:map"); assertThat(ActionsTestUtil.baseArtifactNames(action.getDependencyArtifacts())).containsExactly( "stl.cppmap", "crosstool.cppmap"); assertThat(artifactsToStrings(action.getPrivateHeaders())) .containsExactly("src module/a.h"); assertThat(action.getPublicHeaders()).isEmpty(); } /** * Historically, blaze hasn't added the pre-compiled libraries from srcs to the files to build. * This test ensures that we do not accidentally break that - we may do so intentionally. */ @Test public void testFilesToBuildWithPrecompiledStaticLibrary() throws Exception { ConfiguredTarget hello = scratchConfiguredTarget("precompiled", "library", "cc_library(name = 'library', ", " srcs = ['missing.a'])"); assertThat(artifactsToStrings(getFilesToBuild(hello))) .doesNotContain("src precompiled/missing.a"); } @Test public void testAllowDuplicateNonCompiledSources() throws Exception { ConfiguredTarget x = scratchConfiguredTarget( "x", "x", "filegroup(name = 'xso', srcs = ['x.so'])", "cc_library(name = 'x', srcs = ['x.so', ':xso'])"); assertThat(x).isNotNull(); } @Test public void testDoNotCompileSourceFilesInHeaders() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.PARSE_HEADERS_FEATURE_CONFIGURATION); useConfiguration("--features=parse_headers"); ConfiguredTarget x = scratchConfiguredTarget("x", "x", "cc_library(name = 'x', hdrs = ['x.cc'])"); assertThat(getGeneratingAction(getBinArtifact("_objs/x/.pic.o", x))).isNull(); } @Test public void testProcessHeadersInDependencies() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.PARSE_HEADERS_FEATURE_CONFIGURATION); useConfiguration("--features=parse_headers", "--process_headers_in_dependencies"); ConfiguredTarget x = scratchConfiguredTarget( "foo", "x", "cc_library(name = 'x', deps = [':y'])", "cc_library(name = 'y', hdrs = ['y.h'])"); assertThat(ActionsTestUtil.baseNamesOf(getOutputGroup(x, OutputGroupInfo.HIDDEN_TOP_LEVEL))) .isEqualTo("y.h.processed"); } @Test public void testProcessHeadersInDependenciesOfBinaries() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.PARSE_HEADERS_FEATURE_CONFIGURATION); useConfiguration("--features=parse_headers", "--process_headers_in_dependencies"); ConfiguredTarget x = scratchConfiguredTarget( "foo", "x", "cc_binary(name = 'x', deps = [':y', ':z'])", "cc_library(name = 'y', hdrs = ['y.h'])", "cc_library(name = 'z', srcs = ['z.cc'])"); String hiddenTopLevel = ActionsTestUtil.baseNamesOf(getOutputGroup(x, OutputGroupInfo.HIDDEN_TOP_LEVEL)); assertThat(hiddenTopLevel).contains("y.h.processed"); assertThat(hiddenTopLevel).doesNotContain("z.pic.o"); } @Test public void testDoNotProcessHeadersInDependencies() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.PARSE_HEADERS_FEATURE_CONFIGURATION); useConfiguration("--features=parse_headers"); ConfiguredTarget x = scratchConfiguredTarget( "foo", "x", "cc_library(name = 'x', deps = [':y'])", "cc_library(name = 'y', hdrs = ['y.h'])"); assertThat(ActionsTestUtil.baseNamesOf(getOutputGroup(x, OutputGroupInfo.HIDDEN_TOP_LEVEL))) .isEmpty(); } @Test public void testProcessHeadersInCompileOnlyMode() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.PARSE_HEADERS_FEATURE_CONFIGURATION); useConfiguration("--features=parse_headers", "--process_headers_in_dependencies"); ConfiguredTarget y = scratchConfiguredTarget( "foo", "y", "cc_library(name = 'x', deps = [':y'])", "cc_library(name = 'y', hdrs = ['y.h'])"); assertThat(ActionsTestUtil.baseNamesOf(getOutputGroup(y, OutputGroupInfo.FILES_TO_COMPILE))) .isEqualTo("y.h.processed"); } @Test public void testIncludePathOrder() throws Exception { scratch.file("foo/BUILD", "cc_library(", " name = 'bar',", " includes = ['bar'],", ")", "cc_library(", " name = 'foo',", " srcs = ['foo.cc'],", " includes = ['foo'],", " deps = [':bar'],", ")"); ConfiguredTarget target = getConfiguredTarget("//foo"); CppCompileAction action = getCppCompileAction(target); String genfilesDir = getConfiguration(target).getGenfilesFragment().toString(); String binDir = getConfiguration(target).getBinFragment().toString(); // Local include paths come first. assertContainsSublist( action.getCompilerOptions(), ImmutableList.of( "-isystem", "foo/foo", "-isystem", genfilesDir + "/foo/foo", "-isystem", binDir + "/foo/foo", "-isystem", "foo/bar", "-isystem", genfilesDir + "/foo/bar", "-isystem", binDir + "/foo/bar")); } @Test public void testDefinesOrder() throws Exception { scratch.file("foo/BUILD", "cc_library(", " name = 'bar',", " defines = ['BAR'],", ")", "cc_library(", " name = 'foo',", " srcs = ['foo.cc'],", " defines = ['FOO'],", " deps = [':bar'],", ")"); CppCompileAction action = getCppCompileAction("//foo"); // Inherited defines come first. assertContainsSublist(action.getCompilerOptions(), ImmutableList.of("-DBAR", "-DFOO")); } // Regression test - setting "-shared" caused an exception when computing the link command. @Test public void testLinkOptsNotPassedToStaticLink() throws Exception { scratchConfiguredTarget("foo", "foo", "cc_library(", " name = 'foo',", " srcs = ['foo.cc'],", " linkopts = ['-shared'],", ")"); } private static final String COMPILATION_MODE_FEATURES = "" + "feature {" + " name: 'dbg'" + " flag_set {" + " action: 'c++-compile'" + " flag_group { flag: '-dbg' }" + " }" + "}" + "feature {" + " name: 'fastbuild'" + " flag_set {" + " action: 'c++-compile'" + " flag_group { flag: '-fastbuild' }" + " }" + "}" + "feature {" + " name: 'opt'" + " flag_set {" + " action: 'c++-compile'" + " flag_group { flag: '-opt' }" + " }" + "}"; private List<String> getCompilationModeFlags(String... flags) throws Exception { AnalysisMock.get().ccSupport().setupCrosstool(mockToolsConfig, COMPILATION_MODE_FEATURES); useConfiguration(flags); scratch.overwriteFile("mode/BUILD", "cc_library(name = 'a', srcs = ['a.cc'])"); getConfiguredTarget("//mode:a"); Artifact objectArtifact = getBinArtifact("_objs/a/a.pic.o", getConfiguredTarget("//mode:a")); CppCompileAction action = (CppCompileAction) getGeneratingAction(objectArtifact); return action.getCompilerOptions(); } @Test public void testCompilationModeFeatures() throws Exception { List<String> flags; flags = getCompilationModeFlags("--cpu=k8"); assertThat(flags).contains("-fastbuild"); assertThat(flags).containsNoneOf("-opt", "-dbg"); flags = getCompilationModeFlags("--cpu=k8", "--compilation_mode=fastbuild"); assertThat(flags).contains("-fastbuild"); assertThat(flags).containsNoneOf("-opt", "-dbg"); flags = getCompilationModeFlags("--cpu=k8", "--compilation_mode=opt"); assertThat(flags).contains("-opt"); assertThat(flags).containsNoneOf("-fastbuild", "-dbg"); flags = getCompilationModeFlags("--cpu=k8", "--compilation_mode=dbg"); assertThat(flags).contains("-dbg"); assertThat(flags).containsNoneOf("-fastbuild", "-opt"); } private List<String> getHostAndTargetFlags(boolean useHost) throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.HOST_AND_NONHOST_CONFIGURATION); scratch.overwriteFile("mode/BUILD", "cc_library(name = 'a', srcs = ['a.cc'])"); useConfiguration("--cpu=k8"); ConfiguredTarget target; String objectPath; if (useHost) { target = getHostConfiguredTarget("//mode:a"); objectPath = "_objs/a/a.o"; } else { target = getConfiguredTarget("//mode:a"); objectPath = "_objs/a/a.pic.o"; } Artifact objectArtifact = getBinArtifact(objectPath, target); CppCompileAction action = (CppCompileAction) getGeneratingAction(objectArtifact); assertThat(action).isNotNull(); return action.getCompilerOptions(); } @Test public void testHostAndNonHostFeatures() throws Exception { List<String> flags; flags = getHostAndTargetFlags(true); assertThat(flags).contains("-host"); assertThat(flags).doesNotContain("-nonhost"); flags = getHostAndTargetFlags(false); assertThat(flags).contains("-nonhost"); assertThat(flags).doesNotContain("-host"); } @Test public void testIncludePathsOutsideExecutionRoot() throws Exception { checkError( "root", "a", "The include path '../somewhere' references a path outside of the execution root.", "cc_library(name='a', srcs=['a.cc'], copts=['-Id/../../somewhere'])"); } @Test public void testAbsoluteIncludePathsOutsideExecutionRoot() throws Exception { checkError( "root", "a", "The include path '/somewhere' references a path outside of the execution root.", "cc_library(name='a', srcs=['a.cc'], copts=['-I/somewhere'])"); } @Test public void testSystemIncludePathsOutsideExecutionRoot() throws Exception { checkError( "root", "a", "The include path '../system' references a path outside of the execution root.", "cc_library(name='a', srcs=['a.cc'], copts=['-isystem../system'])"); } @Test public void testAbsoluteSystemIncludePathsOutsideExecutionRoot() throws Exception { checkError( "root", "a", "The include path '/system' references a path outside of the execution root.", "cc_library(name='a', srcs=['a.cc'], copts=['-isystem/system'])"); } /** * Tests that configurable "srcs" doesn't crash because of orphaned implicit .so outputs. * (see {@link CcLibrary#appearsToHaveObjectFiles}). */ @Test public void testConfigurableSrcs() throws Exception { scratch.file("foo/BUILD", "cc_library(", " name = 'foo',", " srcs = select({'//conditions:default': []}),", ")"); ConfiguredTarget target = getConfiguredTarget("//foo:foo"); Artifact soOutput = getBinArtifact("libfoo.so", target); assertThat(getGeneratingAction(soOutput)).isInstanceOf(FailAction.class); } @Test public void alwaysAddStaticAndDynamicLibraryToFilesToBuildWhenBuilding() throws Exception { useConfiguration("--cpu=k8"); ConfiguredTarget target = scratchConfiguredTarget("a", "b", "cc_library(name = 'b', srcs = ['source.cc'])"); assertThat(artifactsToStrings(getFilesToBuild(target))) .containsExactly("bin a/libb.a", "bin a/libb.ifso", "bin a/libb.so"); } @Test public void addOnlyStaticLibraryToFilesToBuildWhenWrappingIffImplicitOutput() throws Exception { // This shared library has the same name as the archive generated by this rule, so it should // override said archive. However, said archive should still be put in files to build. ConfiguredTargetAndData target = scratchConfiguredTargetAndData("a", "b", "cc_library(name = 'b', srcs = ['libb.so'])"); if (target.getTarget().getAssociatedRule().getImplicitOutputsFunction() != ImplicitOutputsFunction.NONE) { assertThat(artifactsToStrings(getFilesToBuild(target.getConfiguredTarget()))) .containsExactly("bin a/libb.a"); } else { assertThat(artifactsToStrings(getFilesToBuild(target.getConfiguredTarget()))).isEmpty(); } } @Test public void addStaticLibraryToStaticSharedLinkParamsWhenBuilding() throws Exception { ConfiguredTarget target = scratchConfiguredTarget("a", "foo", "cc_library(name = 'foo', srcs = ['foo.cc'])"); Iterable<Artifact> libraries = LinkerInputs.toNonSolibArtifacts( target .get(CcLinkingInfo.PROVIDER) .getCcLinkParamsStore() .getCcLinkParams(true, true) .getLibraries()); assertThat(artifactsToStrings(libraries)).contains("bin a/libfoo.a"); } @Test public void dontAddStaticLibraryToStaticSharedLinkParamsWhenWrappingSameLibraryIdentifier() throws Exception { ConfiguredTarget target = scratchConfiguredTarget("a", "foo", "cc_library(name = 'foo', srcs = ['libfoo.so'])"); Iterable<Artifact> libraries = LinkerInputs.toNonSolibArtifacts( target .get(CcLinkingInfo.PROVIDER) .getCcLinkParamsStore() .getCcLinkParams(true, true) .getLibraries()); assertThat(artifactsToStrings(libraries)).doesNotContain("bin a/libfoo.a"); assertThat(artifactsToStrings(libraries)).contains("src a/libfoo.so"); } @Test public void onlyAddOneWrappedLibraryWithSameLibraryIdentifierToLinkParams() throws Exception { ConfiguredTarget target = scratchConfiguredTarget( "a", "foo", "cc_library(name = 'foo', srcs = ['libfoo.lo', 'libfoo.so'])"); Iterable<Artifact> libraries = LinkerInputs.toNonSolibArtifacts( target .get(CcLinkingInfo.PROVIDER) .getCcLinkParamsStore() .getCcLinkParams(true, true) .getLibraries()); assertThat(artifactsToStrings(libraries)).doesNotContain("src a/libfoo.so"); assertThat(artifactsToStrings(libraries)).contains("src a/libfoo.lo"); } @Test public void testCcLinkParamsHasDynamicLibrariesForRuntime() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool( mockToolsConfig, MockCcSupport.COPY_DYNAMIC_LIBRARIES_TO_BINARY_CONFIGURATION); useConfiguration("--cpu=k8", "--features=copy_dynamic_libraries_to_binary"); ConfiguredTarget target = scratchConfiguredTarget("a", "foo", "cc_library(name = 'foo', srcs = ['foo.cc'])"); Iterable<Artifact> libraries = target .get(CcLinkingInfo.PROVIDER) .getCcLinkParamsStore() .getCcLinkParams(false, true) .getDynamicLibrariesForRuntime(); assertThat(artifactsToStrings(libraries)).doesNotContain("bin a/libfoo.ifso"); assertThat(artifactsToStrings(libraries)).contains("bin a/libfoo.so"); } @Test public void testCcLinkParamsHasDynamicLibrariesForRuntimeWithoutCopyFeature() throws Exception { useConfiguration("--cpu=k8"); ConfiguredTarget target = scratchConfiguredTarget("a", "foo", "cc_library(name = 'foo', srcs = ['foo.cc'])"); Iterable<Artifact> libraries = target .get(CcLinkingInfo.PROVIDER) .getCcLinkParamsStore() .getCcLinkParams(false, true) .getDynamicLibrariesForRuntime(); assertThat(artifactsToStrings(libraries)).doesNotContain("bin _solib_k8/liba_Slibfoo.ifso"); assertThat(artifactsToStrings(libraries)).contains("bin _solib_k8/liba_Slibfoo.so"); } @Test public void testCcLinkParamsDoNotHaveDynamicLibrariesForRuntime() throws Exception { useConfiguration("--cpu=k8"); ConfiguredTarget target = scratchConfiguredTarget( "a", "foo", "cc_library(name = 'foo', srcs = ['foo.cc'], linkstatic=1)"); Iterable<Artifact> libraries = target .get(CcLinkingInfo.PROVIDER) .getCcLinkParamsStore() .getCcLinkParams(false, true) .getDynamicLibrariesForRuntime(); assertThat(artifactsToStrings(libraries)).isEmpty(); } @Test public void forbidBuildingAndWrappingSameLibraryIdentifier() throws Exception { useConfiguration("--cpu=k8"); checkError( "a", "foo", "in cc_library rule //a:foo: Can't put libfoo.lo into the srcs of a cc_library with the " + "same name (foo) which also contains other code or objects to link; it shares a name " + "with libfoo.a, libfoo.ifso, libfoo.so (output compiled and linked from the " + "non-library sources of this rule), which could cause confusion", "cc_library(name = 'foo', srcs = ['foo.cc', 'libfoo.lo'])"); } @Test public void testProcessedHeadersWithPicSharedLibsAndNoPicBinaries() throws Exception { AnalysisMock.get() .ccSupport() .setupCrosstool(mockToolsConfig, MockCcSupport.PARSE_HEADERS_FEATURE_CONFIGURATION); useConfiguration("--features=parse_headers", "-c", "opt"); // Should not crash scratchConfiguredTarget("a", "a", "cc_library(name='a', hdrs=['a.h'])"); } @Test public void testStlWithAlias() throws Exception { scratch.file("a/BUILD", "cc_library(name='a')", "alias(name='stl', actual=':realstl')", "cc_library(name='realstl')"); useConfiguration("--experimental_stl=//a:stl"); getConfiguredTarget("//a:a"); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.search.*; import org.apache.lucene.search.Filter; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.join.BitDocIdSetFilter; import org.elasticsearch.common.ParseField; import org.apache.lucene.search.join.JoinUtil; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.fielddata.IndexParentChildFieldData; import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.internal.ParentFieldMapper; import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper; import org.elasticsearch.index.query.support.XContentStructure; import org.elasticsearch.index.search.child.ChildrenConstantScoreQuery; import org.elasticsearch.index.search.child.ChildrenQuery; import org.elasticsearch.index.search.child.ScoreType; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SubSearchContext; import java.io.IOException; /** * */ public class HasChildQueryParser implements QueryParser { public static final String NAME = "has_child"; private static final ParseField QUERY_FIELD = new ParseField("query", "filter"); private final InnerHitsQueryParserHelper innerHitsQueryParserHelper; @Inject public HasChildQueryParser(InnerHitsQueryParserHelper innerHitsQueryParserHelper) { this.innerHitsQueryParserHelper = innerHitsQueryParserHelper; } @Override public String[] names() { return new String[] { NAME, Strings.toCamelCase(NAME) }; } @Override public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException { XContentParser parser = parseContext.parser(); boolean queryFound = false; float boost = 1.0f; String childType = null; ScoreType scoreType = ScoreType.NONE; int minChildren = 0; int maxChildren = 0; int shortCircuitParentDocSet = 8192; String queryName = null; Tuple<String, SubSearchContext> innerHits = null; String currentFieldName = null; XContentParser.Token token; XContentStructure.InnerQuery iq = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (parseContext.isDeprecatedSetting(currentFieldName)) { // skip } else if (token == XContentParser.Token.START_OBJECT) { // Usually, the query would be parsed here, but the child // type may not have been extracted yet, so use the // XContentStructure.<type> facade to parse if available, // or delay parsing if not. if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { iq = new XContentStructure.InnerQuery(parseContext, childType == null ? null : new String[] { childType }); queryFound = true; } else if ("inner_hits".equals(currentFieldName)) { innerHits = innerHitsQueryParserHelper.parse(parseContext); } else { throw new QueryParsingException(parseContext, "[has_child] query does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) { childType = parser.text(); } else if ("score_type".equals(currentFieldName) || "scoreType".equals(currentFieldName)) { scoreType = ScoreType.fromString(parser.text()); } else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) { scoreType = ScoreType.fromString(parser.text()); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if ("min_children".equals(currentFieldName) || "minChildren".equals(currentFieldName)) { minChildren = parser.intValue(true); } else if ("max_children".equals(currentFieldName) || "maxChildren".equals(currentFieldName)) { maxChildren = parser.intValue(true); } else if ("short_circuit_cutoff".equals(currentFieldName)) { shortCircuitParentDocSet = parser.intValue(); } else if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else { throw new QueryParsingException(parseContext, "[has_child] query does not support [" + currentFieldName + "]"); } } } if (!queryFound) { throw new QueryParsingException(parseContext, "[has_child] requires 'query' field"); } if (childType == null) { throw new QueryParsingException(parseContext, "[has_child] requires 'type' field"); } Query innerQuery = iq.asQuery(childType); if (innerQuery == null) { return null; } innerQuery.setBoost(boost); DocumentMapper childDocMapper = parseContext.mapperService().documentMapper(childType); if (childDocMapper == null) { throw new QueryParsingException(parseContext, "[has_child] No mapping for for type [" + childType + "]"); } ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper(); if (parentFieldMapper.active() == false) { throw new QueryParsingException(parseContext, "[has_child] _parent field has no parent type configured"); } if (innerHits != null) { ParsedQuery parsedQuery = new ParsedQuery(innerQuery, parseContext.copyNamedQueries()); InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(innerHits.v2(), parsedQuery, null, parseContext.mapperService(), childDocMapper); String name = innerHits.v1() != null ? innerHits.v1() : childType; parseContext.addInnerHits(name, parentChildInnerHits); } String parentType = parentFieldMapper.type(); DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType); if (parentDocMapper == null) { throw new QueryParsingException(parseContext, "[has_child] Type [" + childType + "] points to a non existent parent type [" + parentType + "]"); } if (maxChildren > 0 && maxChildren < minChildren) { throw new QueryParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'"); } BitDocIdSetFilter nonNestedDocsFilter = null; if (parentDocMapper.hasNestedObjects()) { nonNestedDocsFilter = parseContext.bitsetFilter(Queries.newNonNestedFilter()); } // wrap the query with type query innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter()); final Query query; final ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper.fieldType()); if (parseContext.indexVersionCreated().onOrAfter(Version.V_2_0_0_beta1)) { query = joinUtilHelper(parentType, parentChildIndexFieldData, parentDocMapper.typeFilter(), scoreType, innerQuery, minChildren, maxChildren); } else { // TODO: use the query API Filter parentFilter = new QueryWrapperFilter(parentDocMapper.typeFilter()); if (minChildren > 1 || maxChildren > 0 || scoreType != ScoreType.NONE) { query = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter, innerQuery, scoreType, minChildren, maxChildren, shortCircuitParentDocSet, nonNestedDocsFilter); } else { query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentType, childType, parentFilter, shortCircuitParentDocSet, nonNestedDocsFilter); } } if (queryName != null) { parseContext.addNamedQuery(queryName, query); } query.setBoost(boost); return query; } public static Query joinUtilHelper(String parentType, ParentChildIndexFieldData parentChildIndexFieldData, Query toQuery, ScoreType scoreType, Query innerQuery, int minChildren, int maxChildren) throws IOException { ScoreMode scoreMode; // TODO: move entirely over from ScoreType to org.apache.lucene.join.ScoreMode, when we drop the 1.x parent child code. switch (scoreType) { case NONE: scoreMode = ScoreMode.None; break; case MIN: scoreMode = ScoreMode.Min; break; case MAX: scoreMode = ScoreMode.Max; break; case SUM: scoreMode = ScoreMode.Total; break; case AVG: scoreMode = ScoreMode.Avg; break; default: throw new UnsupportedOperationException("score type [" + scoreType + "] not supported"); } // 0 in pre 2.x p/c impl means unbounded if (maxChildren == 0) { maxChildren = Integer.MAX_VALUE; } return new LateParsingQuery(toQuery, innerQuery, minChildren, maxChildren, parentType, scoreMode, parentChildIndexFieldData); } final static class LateParsingQuery extends Query { private final Query toQuery; private final Query innerQuery; private final int minChildren; private final int maxChildren; private final String parentType; private final ScoreMode scoreMode; private final ParentChildIndexFieldData parentChildIndexFieldData; private final Object identity = new Object(); LateParsingQuery(Query toQuery, Query innerQuery, int minChildren, int maxChildren, String parentType, ScoreMode scoreMode, ParentChildIndexFieldData parentChildIndexFieldData) { this.toQuery = toQuery; this.innerQuery = innerQuery; this.minChildren = minChildren; this.maxChildren = maxChildren; this.parentType = parentType; this.scoreMode = scoreMode; this.parentChildIndexFieldData = parentChildIndexFieldData; } @Override public Query rewrite(IndexReader reader) throws IOException { SearchContext searchContext = SearchContext.current(); if (searchContext == null) { throw new IllegalArgumentException("Search context is required to be set"); } String joinField = ParentFieldMapper.joinField(parentType); IndexReader indexReader = searchContext.searcher().getIndexReader(); IndexSearcher indexSearcher = new IndexSearcher(indexReader); IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexReader); MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType); return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren); } // Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals(): // We can't check for actually equality here, since we need to IndexReader for this, but // that isn't available on all cases during query parse time, so instead rely on identity: @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (!super.equals(o)) return false; LateParsingQuery that = (LateParsingQuery) o; return identity.equals(that.identity); } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + identity.hashCode(); return result; } @Override public String toString(String s) { return "LateParsingQuery {parentType=" + parentType + "}"; } } }
// Copyright (c) 2006 - 2011, Markus Strauch. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. package net.sf.sdedit.ui.impl; import java.awt.Component; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Map.Entry; import java.util.TreeMap; import javax.swing.Action; import javax.swing.BorderFactory; import javax.swing.Icon; import javax.swing.JComponent; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.border.Border; import javax.swing.text.BadLocationException; import javax.swing.text.Document; import net.sf.sdedit.diagram.Diagram; import net.sf.sdedit.diagram.Lifeline; import net.sf.sdedit.editor.plugin.FileHandler; import net.sf.sdedit.icons.Icons; import net.sf.sdedit.message.ForwardMessage; import net.sf.sdedit.text.TextBasedMessageData; import net.sf.sdedit.ui.Tab; import net.sf.sdedit.ui.components.Zoomable; import net.sf.sdedit.ui.components.buttons.Activator; import net.sf.sdedit.util.EF; import net.sf.sdedit.util.Grep.Region; import net.sf.sdedit.util.Pair; import net.sf.sdedit.util.TableModelAdapter; import net.sf.sdedit.util.TableModelAdapter.RowEditor; import net.sf.sdedit.util.TableModelAdapter.RowExpansion; import net.sf.sdedit.util.Utilities; import net.sf.sdedit.util.collection.MultiMap; import com.zookitec.layout.ContainerEF; import com.zookitec.layout.ExplicitConstraints; import com.zookitec.layout.ExplicitLayout; import com.zookitec.layout.Expression; import com.zookitec.layout.MathEF; public class ClassTab extends Tab implements RowExpansion, RowEditor { private static final long serialVersionUID = 7210793282951827425L; private String className; private MultiMap<String, ForwardMessage, LinkedList<?>, TreeMap<?, ?>> messages; private static final String[] LIFELINE_TABLE_COLUMN_NAMES = { "Name", "Anonymous", "External", "Thread" }; private static final Class<?>[] LIFELINE_TABLE_COLUMN_TYPES = { String.class, Boolean.class, Boolean.class, Boolean.class }; private static final String[] METHOD_TABLE_COLUMN_NAMES = { "Method", "#Occurrences" }; private static final Class<?>[] METHOD_TABLE_COLUMN_TYPES = { String.class, String.class }; private boolean canClose; private DiagramTextTab diagramTextTab; private JPanel content; private JLabel nameLabel; private static final int NAME_LABEL_MARGIN = 3; public ClassTab(DiagramTextTab diagramTextTab, String className) { super(diagramTextTab.get_UI()); this.diagramTextTab = diagramTextTab; this.className = className; canClose = false; this.messages = new MultiMap<String, ForwardMessage, LinkedList<?>, TreeMap<?, ?>>( LinkedList.class, TreeMap.class); } private void addMessage(ForwardMessage message) { String method = message.getText(); if (method != null && method.length() > 0) { int br = method.indexOf('('); if (br > 0) { method = method.substring(0, br); } } messages.add(method, message); } private void addNameLabel () { nameLabel = new JLabel(); nameLabel.setText("Class " + className); nameLabel.setBorder(BorderFactory.createEmptyBorder(NAME_LABEL_MARGIN, NAME_LABEL_MARGIN, NAME_LABEL_MARGIN, NAME_LABEL_MARGIN)); ExplicitConstraints c = new ExplicitConstraints(nameLabel); c.setX(EF.centeredX(content, nameLabel)); content.add(nameLabel, c); } private JScrollPane addScrollPane (Component comp, Expression y, Expression height, String title, int borderWidth) { JScrollPane scrollPane = new JScrollPane(comp); ExplicitConstraints c = EF.inheritBounds(scrollPane, content); c.setY(y); c.setHeight(height); c.setX(ContainerEF.left(content)); Border outer = BorderFactory.createEmptyBorder(borderWidth, borderWidth, borderWidth, borderWidth); Border inner = BorderFactory.createTitledBorder(title); scrollPane.setBorder(BorderFactory.createCompoundBorder(outer, inner)); content.add(scrollPane, c); return scrollPane; } public void updateData(Collection<Lifeline> lifelines, Collection<ForwardMessage> collection) { getContentPanel().removeAll(); getContentPanel().setLayout(new ExplicitLayout()); messages.clear(); content = new JPanel(); content.setLayout(new ExplicitLayout()); ExplicitConstraints c = EF.inheritBounds(content, getContentPanel()); c.setWidth(MathEF.min(MathEF.constant(640), ContainerEF.width(getContentPanel()))); getContentPanel().add(content, c); addNameLabel(); Expression height = EF.underHeight(nameLabel, 0).divide(2); Expression objPaneY = EF.underY(nameLabel, 0); Expression metPaneY = objPaneY.add(height); TableModelAdapter tma = new TableModelAdapter( LIFELINE_TABLE_COLUMN_NAMES, LIFELINE_TABLE_COLUMN_TYPES, this, this); tma.setData(lifelines); JTable lifelineTable = new JTable(tma); addScrollPane(lifelineTable, objPaneY, height, "Instances", 5); for (ForwardMessage message : collection) { addMessage(message); } TableModelAdapter methodTableModelAdapter = new TableModelAdapter( METHOD_TABLE_COLUMN_NAMES, METHOD_TABLE_COLUMN_TYPES, this, this); methodTableModelAdapter.setData(this.messages.entries()); JTable methodTable = new JTable(methodTableModelAdapter); addScrollPane(methodTable, metPaneY, height, "Methods", 5); getContentPanel().invalidate(); getContentPanel().revalidate(); } @Override public Icon getIcon() { return Icons.getIcon("class"); } @Override protected Zoomable<? extends JComponent> getZoomable() { return null; } @Override public boolean canClose() { return canClose; } @Override public boolean canGoHome() { return false; } @Override protected void _getContextActions(List<Action> actionList) { } @Override protected List<Pair<Action, Activator>> getOverloadedActions() { return null; } @Override public FileHandler getFileHandler() { return null; } public Object[] expand(Object row) { Object[] expanded = null; if (row instanceof Lifeline) { Lifeline line = (Lifeline) row; expanded = new Object[] { line.getName(), line.isAnonymous(), line.isExternal(), line.hasThread() }; } else if (row instanceof Entry) { @SuppressWarnings("unchecked") Entry<String, Collection<ForwardMessage>> entry = (Entry<String, Collection<ForwardMessage>>) row; expanded = new Object[] { entry.getKey(), String.valueOf(entry.getValue().size()) }; } return expanded; } public void forceClose() { canClose = true; close(false); } public boolean isEditable(Object row, int index) { if (row instanceof Lifeline) { return index == 0; } return false; } public void setValue(Object row, int index, Object value) { if (row instanceof Lifeline) { Lifeline lifeline = (Lifeline) row; if (index == 0) { String newName = (String) value; if (newName != null && newName.length() > 0) { for (Lifeline existing : diagramTextTab.getDiagram() .getAllLifelines()) { if (existing.getName().equals(newName)) { return; } } try { renameLifeline(lifeline, newName); } catch (BadLocationException e) { e.printStackTrace(); } } } } } private int addWhitespace(Document document, int lineBegin) throws BadLocationException { int w = 0; while (Character.isWhitespace(document.getText(lineBegin + w - 1, 1) .charAt(0))) { w++; } return lineBegin + w; } private int replace(Document document, int lineBegin, Region region, String oldName, String newName) throws BadLocationException { int rs = region.getStart(); String text = region.getText(); int len = text.length(); lineBegin = addWhitespace(document, lineBegin); String newText = Utilities.replaceFirst(text, oldName, newName); document.remove(lineBegin + rs - 1, len); document.insertString(lineBegin + rs - 1, newText, null); return newText.length() - text.length(); } private void renameLifeline(Lifeline lifeline, String newName) throws BadLocationException { diagramTextTab.setIgnoreChanges(true); Diagram diagram = diagramTextTab.getDiagram(); Document document = diagramTextTab.getTextArea().getDocument(); int lineBegin = (Integer) diagram.getStateForDrawable(lifeline .getHead()); int diff = replace(document, lineBegin, lifeline.getNameRegion(), lifeline.getName(), newName); diff = newName.length() - lifeline.getName().length(); for (ForwardMessage msg : diagram.getMessages()) { lineBegin = diff + (Integer) diagram.getStateForDrawable(msg.getArrow()); TextBasedMessageData md = (TextBasedMessageData) msg.getData(); if (lifeline.getName().equals(md.getCaller())) { diff += replace(document, lineBegin, md.getRegion("caller"), lifeline.getName(), newName); } lineBegin = diff + (Integer) diagram.getStateForDrawable(msg.getArrow()); if (lifeline.getName().equals(md.getCallee())) { diff += replace(document, lineBegin, md.getRegion("callee"), lifeline.getName(), newName); } } diagramTextTab.setIgnoreChanges(false); diagramTextTab.refresh(true); } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.tools.consistency; import com.facebook.buck.log.thrift.rulekeys.FullRuleKey; import com.facebook.buck.log.thrift.rulekeys.RuleKeyHash; import com.facebook.buck.log.thrift.rulekeys.Value; import com.facebook.buck.tools.consistency.DifferState.DiffResult; import com.facebook.buck.tools.consistency.DifferState.MaxDifferencesException; import com.facebook.buck.tools.consistency.RuleKeyFileParser.ParsedRuleKeyFile; import com.facebook.buck.tools.consistency.RuleKeyFileParser.RuleKeyNode; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; /** * Prints out differences found between rule keys and their properties. * * <p>Normal usage is to instantiate a printer, call {@link #addTarget(String)} when encountering a * new RuleKey, then calling {@link #addProperty(String)} on that target scope when examining * properties of a RuleKey */ public class RuleKeyDiffPrinter { private final DiffPrinter diffPrinter; private final DifferState differState; /** * The scope for printing information about a target. Helps ensure that target information is * always printed if any differences are found within the target */ public class TargetScope implements AutoCloseable { private final String newKey; private final String origKey; private final String target; private List<String> pathComponents = new ArrayList<>(); private boolean printedTarget = false; private TargetScope(String target, String originalKey, String newKey) { this.target = target; this.origKey = originalKey; this.newKey = newKey; } @Override public void close() {} /** Prints out the target name and old/new hash */ private void printHeader() { if (!printedTarget) { printedTarget = true; diffPrinter.printHeader(target, String.format(" (%s vs %s)", origKey, newKey)); } } /** * Adds a new property to the target scope. If any changes are recorded on this object (e.g. * with {@link #added(ParsedRuleKeyFile, Value) added}), the details of the target will be * printed first. * * @param propertyName The path to the property to print at the beginning of the line. e.g. '0' * for the first element of an array, or 'target_name' for a property named 'target_name' in * the main rule key map. * @return A {@link PropertyScope} object that can record changes in the object. */ public PropertyScope addProperty(String propertyName) { return new PropertyScope(propertyName); } /** * A scope representing a single property of a RuleKey, or a nested value of a RuleKey. This is * used to print differences between two different {@link Value} objects */ public class PropertyScope implements AutoCloseable { private PropertyScope(String name) { pathComponents.add(name); } /** Pops the most recent property off of the stack of nested properties */ @Override public void close() { pathComponents.remove(pathComponents.size() - 1); } /** * Creates a nested property scope that is one level deeper. Useful for printing values inside * of a map or list */ public PropertyScope addNestedProperty(String newProperty) { return new PropertyScope(newProperty); } /** Make sure we've printed the header and haven't encountered too many differences */ private void validateAndUpdateState() throws MaxDifferencesException { differState.incrementDifferenceCount(); printHeader(); } private void printAdd(ParsedRuleKeyFile file, Value value) { diffPrinter.printAdd( String.format( "%s: %s", String.join("/", pathComponents), valueAsReadableString(file, value))); } private void printRemove(ParsedRuleKeyFile file, Value value) { diffPrinter.printRemove( String.format( "%s: %s", String.join("/", pathComponents), valueAsReadableString(file, value))); } /** * Prints that a property was removed from the original rule key in the new one * * @param file The file object that has all of the rule keys from the original log file * @param value The value that was removed * @throws MaxDifferencesException Thrown if the maximum number of differences was found * before this removal was printed */ public void removed(ParsedRuleKeyFile file, Value value) throws MaxDifferencesException { validateAndUpdateState(); printRemove(file, value); } /** * Prints that a property was added to the new rule key that wasn't present in the original * one * * @param file The file object that has all of the rule keys from the new log file * @param value The value that was added * @throws MaxDifferencesException Thrown if the maximum number of differences was found * before this addition was printed */ public void added(ParsedRuleKeyFile file, Value value) throws MaxDifferencesException { validateAndUpdateState(); printAdd(file, value); } /** * Prints that there was a difference between two values that are at the same path. e.g. if * two instances of a rule key have different files at index 0 of their sources list, the * filenames would be printed out here. * * @param originalFile The file object that has all of the rule keys from the original log * file * @param originalValue The value from the original rule key * @param newFile The file object that has all of the rule keys from the new log file * @param newValue The value from the new rule key * @throws MaxDifferencesException Thrown if the maximum number of differences was found * before this change was printed */ public void changed( ParsedRuleKeyFile originalFile, Value originalValue, ParsedRuleKeyFile newFile, Value newValue) throws MaxDifferencesException { validateAndUpdateState(); printRemove(originalFile, originalValue); printAdd(newFile, newValue); } /** * Record a change that should not have an add/remove/chage line printed. This ensures that we * count the difference and print the header, but don't force rule key differences to be * printed right before we recurse */ public void recordEmptyChange() throws MaxDifferencesException { differState.incrementDifferenceCount(); printHeader(); } } } /** * Creates an instance of {@link RuleKeyDiffPrinter} * * @param diffPrinter An object that prints additions/removals * @param differState The state of the actual diffing */ public RuleKeyDiffPrinter(DiffPrinter diffPrinter, DifferState differState) { this.diffPrinter = diffPrinter; this.differState = differState; } /** * Determines whether any changes have been printed * * @return whether any changes have been printed */ public DiffResult hasChanges() { return differState.hasChanges(); } /** * Adds a new {@link TargetScope} to this printer. * * @param target The target name * @param oldKey The old rule key hash * @param newKey The new rule key hash */ public TargetScope addTarget(String target, String oldKey, String newKey) { return new TargetScope(target, oldKey, newKey); } /** * Gets a string representation of a Value to use when printing diffs * * @param file The file that this value came from. This is used primarily for looking up readable * names should {@code value} be a {@link RuleKeyHash} type. * @param value The value to convert to a string * @return A string suitable for printing in a diff */ public static String valueAsReadableString(ParsedRuleKeyFile file, Value value) { switch (value.getSetField()) { case STRING_VALUE: return value.getStringValue(); case NUMBER_VALUE: return String.valueOf(value.getNumberValue()); case BOOL_VALUE: return String.valueOf(value.getBoolValue()); case NULL_VALUE: return "null"; case HASHED_PATH: return String.format( "Path: %s, hash: %s", value.getHashedPath().path, value.getHashedPath().hash); case PATH: return String.format("Path: %s", value.getPath().path); case SHA1_HASH: return String.format("Sha1: %s", value.getSha1Hash().sha1); case PATTERN: return String.format("Regex Pattern: %s", value.getPattern().pattern); case BYTE_ARRAY: return String.format("Byte array length: %s", value.getByteArray().length); case CONTAINER_MAP: return String.format("Map: Length: %s", value.getContainerMap().size()); case CONTAINER_LIST: return String.format("List: Length: %s", value.getContainerList().size()); case RULE_KEY_HASH: RuleKeyNode rule = file.rules.get(value.getRuleKeyHash().sha1); if (rule == null) { return String.format("RuleKey: %s", value.getRuleKeyHash().sha1); } else { return String.format( "RuleKey(%s) %s", value.getRuleKeyHash().sha1, getRuleKeyName(file, rule.ruleKey)); } case ARCHIVE_MEMBER_PATH: return String.format( "ArchiveMemberPath: %s!%s, hash: %s", value.getArchiveMemberPath().archivePath, value.getArchiveMemberPath().memberPath, value.getArchiveMemberPath().hash); case BUILD_RULE_TYPE: return String.format("BuildRuleType: %s", value.getBuildRuleType().type); case WRAPPER: return String.format( "Wrapper: %s/%s", value.getWrapper().type, valueAsReadableString(file, value.getWrapper().value)); case BUILD_TARGET: return String.format("BuildTarget: %s", value.getBuildTarget().name); case TARGET_PATH: return String.format("TargetPath: %s", value.getTargetPath().path); case KEY: break; } return value.getSetField().getFieldName(); } /** * Get a useful display name for the given rule key. This is done because sometimes the name is * not present for a rule key, and the only description is really available in one of its * properties, like the 'arg' property * * @param file The file that this rule key came from. This is used primarily for looking up * readable names should identifying properties be a {@link RuleKeyHash} type. * @param ruleKey The rule key to get a name for * @return An appropriate rule key name, or "UNKNOWN NAME" if no name could be determined */ public static String getRuleKeyName(ParsedRuleKeyFile file, FullRuleKey ruleKey) { String target = ruleKey.name; if (target != null && !target.isEmpty()) { return target; } Value argName = ruleKey.values.get("arg"); if (argName != null) { return String.format("argument: %s", getRuleKeyName(file, argName)); } return "UNKNOWN NAME"; } private static String getRuleKeyName(ParsedRuleKeyFile file, Value value) { switch (value.getSetField()) { case STRING_VALUE: return value.getStringValue(); case NUMBER_VALUE: return Double.toString(value.getNumberValue()); case BOOL_VALUE: return Boolean.toString(value.getBoolValue()); case NULL_VALUE: return "null"; case HASHED_PATH: return value.getHashedPath().path; case PATH: return value.getPath().path; case SHA1_HASH: return value.getSha1Hash().sha1; case PATTERN: return value.getPattern().pattern; case BYTE_ARRAY: return String.format("ByteArray, length %s", value.getByteArray().length); case CONTAINER_MAP: return value.getContainerMap().entrySet().stream() .map( entry -> String.format("%s: %s", entry.getKey(), getRuleKeyName(file, entry.getValue()))) .collect(Collectors.joining(", ")); case CONTAINER_LIST: return value.getContainerList().stream() .map(v -> getRuleKeyName(file, v)) .collect(Collectors.joining(", ")); case RULE_KEY_HASH: RuleKeyNode found = file.rules.get(value.getRuleKeyHash().sha1); if (found != null) { return getRuleKeyName(file, found.ruleKey); } return "UNKNOWN RULE KEY"; case ARCHIVE_MEMBER_PATH: return String.format( "%s!%s", value.getArchiveMemberPath().archivePath, value.getArchiveMemberPath().memberPath); case BUILD_RULE_TYPE: return value.getBuildRuleType().type; case WRAPPER: return getRuleKeyName(file, value.getWrapper().value); case BUILD_TARGET: return value.getBuildTarget().name; case TARGET_PATH: return value.getTargetPath().path; case KEY: return value.getKey().key; } // Should not really happen since switch above covers all union members return "UNKNOWN UNION MEMBER"; } }
package com.gagnepain.cashcash.repository; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import org.springframework.social.connect.Connection; import org.springframework.social.connect.ConnectionData; import org.springframework.social.connect.ConnectionFactory; import org.springframework.social.connect.ConnectionFactoryLocator; import org.springframework.social.connect.ConnectionKey; import org.springframework.social.connect.ConnectionRepository; import org.springframework.social.connect.NoSuchConnectionException; import org.springframework.social.connect.NotConnectedException; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import com.gagnepain.cashcash.domain.SocialUserConnection; public class CustomSocialConnectionRepository implements ConnectionRepository { private final String userId; private final SocialUserConnectionRepository socialUserConnectionRepository; private final ConnectionFactoryLocator connectionFactoryLocator; public CustomSocialConnectionRepository(final String userId, final SocialUserConnectionRepository socialUserConnectionRepository, final ConnectionFactoryLocator connectionFactoryLocator) { this.userId = userId; this.socialUserConnectionRepository = socialUserConnectionRepository; this.connectionFactoryLocator = connectionFactoryLocator; } @Override public MultiValueMap<String, Connection<?>> findAllConnections() { final List<SocialUserConnection> socialUserConnections = socialUserConnectionRepository.findAllByUserIdOrderByProviderIdAscRankAsc( userId); final List<Connection<?>> connections = socialUserConnectionsToConnections(socialUserConnections); final MultiValueMap<String, Connection<?>> connectionsByProviderId = new LinkedMultiValueMap<>(); final Set<String> registeredProviderIds = connectionFactoryLocator.registeredProviderIds(); for (final String registeredProviderId : registeredProviderIds) { connectionsByProviderId.put(registeredProviderId, Collections.emptyList()); } for (final Connection<?> connection : connections) { final String providerId = connection.getKey() .getProviderId(); if (connectionsByProviderId.get(providerId) .size() == 0) { connectionsByProviderId.put(providerId, new LinkedList<>()); } connectionsByProviderId.add(providerId, connection); } return connectionsByProviderId; } @Override public List<Connection<?>> findConnections(final String providerId) { final List<SocialUserConnection> socialUserConnections = socialUserConnectionRepository.findAllByUserIdAndProviderIdOrderByRankAsc( userId, providerId); return socialUserConnectionsToConnections(socialUserConnections); } @Override @SuppressWarnings("unchecked") public <A> List<Connection<A>> findConnections(final Class<A> apiType) { final List<?> connections = findConnections(getProviderId(apiType)); return (List<Connection<A>>) connections; } @Override public MultiValueMap<String, Connection<?>> findConnectionsToUsers(final MultiValueMap<String, String> providerUserIdsByProviderId) { if (providerUserIdsByProviderId == null || providerUserIdsByProviderId.isEmpty()) { throw new IllegalArgumentException("Unable to execute find: no providerUsers provided"); } final MultiValueMap<String, Connection<?>> connectionsForUsers = new LinkedMultiValueMap<>(); for (final Map.Entry<String, List<String>> entry : providerUserIdsByProviderId.entrySet()) { final String providerId = entry.getKey(); final List<String> providerUserIds = entry.getValue(); final List<Connection<?>> connections = providerUserIdsToConnections(providerId, providerUserIds); connections.forEach(connection -> connectionsForUsers.add(providerId, connection)); } return connectionsForUsers; } @Override public Connection<?> getConnection(final ConnectionKey connectionKey) { final SocialUserConnection socialUserConnection = socialUserConnectionRepository.findOneByUserIdAndProviderIdAndProviderUserId( userId, connectionKey.getProviderId(), connectionKey.getProviderUserId()); return Optional.ofNullable(socialUserConnection) .map(this::socialUserConnectionToConnection) .orElseThrow(() -> new NoSuchConnectionException(connectionKey)); } @Override @SuppressWarnings("unchecked") public <A> Connection<A> getConnection(final Class<A> apiType, final String providerUserId) { final String providerId = getProviderId(apiType); return (Connection<A>) getConnection(new ConnectionKey(providerId, providerUserId)); } @Override @SuppressWarnings("unchecked") public <A> Connection<A> getPrimaryConnection(final Class<A> apiType) { final String providerId = getProviderId(apiType); final Connection<A> connection = (Connection<A>) findPrimaryConnection(providerId); if (connection == null) { throw new NotConnectedException(providerId); } return connection; } @Override @SuppressWarnings("unchecked") public <A> Connection<A> findPrimaryConnection(final Class<A> apiType) { final String providerId = getProviderId(apiType); return (Connection<A>) findPrimaryConnection(providerId); } @Override @Transactional public void addConnection(final Connection<?> connection) { final Long rank = getNewMaxRank(connection.getKey() .getProviderId()).longValue(); final SocialUserConnection socialUserConnectionToSave = connectionToUserSocialConnection(connection, rank); socialUserConnectionRepository.save(socialUserConnectionToSave); } @Override @Transactional public void updateConnection(final Connection<?> connection) { final SocialUserConnection socialUserConnection = socialUserConnectionRepository.findOneByUserIdAndProviderIdAndProviderUserId( userId, connection.getKey() .getProviderId(), connection.getKey() .getProviderUserId()); if (socialUserConnection != null) { final SocialUserConnection socialUserConnectionToUdpate = connectionToUserSocialConnection(connection, socialUserConnection.getRank()); socialUserConnectionToUdpate.setId(socialUserConnection.getId()); socialUserConnectionRepository.save(socialUserConnectionToUdpate); } } @Override @Transactional public void removeConnections(final String providerId) { socialUserConnectionRepository.deleteByUserIdAndProviderId(userId, providerId); } @Override @Transactional public void removeConnection(final ConnectionKey connectionKey) { socialUserConnectionRepository.deleteByUserIdAndProviderIdAndProviderUserId(userId, connectionKey.getProviderId(), connectionKey.getProviderUserId()); } private Double getNewMaxRank(final String providerId) { final List<SocialUserConnection> socialUserConnections = socialUserConnectionRepository.findAllByUserIdAndProviderIdOrderByRankAsc( userId, providerId); return socialUserConnections.stream() .mapToDouble(SocialUserConnection::getRank) .max() .orElse(0D) + 1D; } private Connection<?> findPrimaryConnection(final String providerId) { final List<SocialUserConnection> socialUserConnections = socialUserConnectionRepository.findAllByUserIdAndProviderIdOrderByRankAsc( userId, providerId); if (socialUserConnections.size() > 0) { return socialUserConnectionToConnection(socialUserConnections.get(0)); } else { return null; } } private SocialUserConnection connectionToUserSocialConnection(final Connection<?> connection, final Long rank) { final ConnectionData connectionData = connection.createData(); return new SocialUserConnection(userId, connection.getKey() .getProviderId(), connection.getKey() .getProviderUserId(), rank, connection.getDisplayName(), connection.getProfileUrl(), connection.getImageUrl(), connectionData.getAccessToken(), connectionData.getSecret(), connectionData.getRefreshToken(), connectionData.getExpireTime()); } private List<Connection<?>> providerUserIdsToConnections(final String providerId, final List<String> providerUserIds) { final List<SocialUserConnection> socialUserConnections = socialUserConnectionRepository.findAllByUserIdAndProviderIdAndProviderUserIdIn( userId, providerId, providerUserIds); return socialUserConnectionsToConnections(socialUserConnections); } private List<Connection<?>> socialUserConnectionsToConnections(final List<SocialUserConnection> socialUserConnections) { return socialUserConnections.stream() .map(this::socialUserConnectionToConnection) .collect(Collectors.toList()); } private Connection<?> socialUserConnectionToConnection(final SocialUserConnection socialUserConnection) { final ConnectionData connectionData = new ConnectionData(socialUserConnection.getProviderId(), socialUserConnection.getProviderUserId(), socialUserConnection.getDisplayName(), socialUserConnection.getProfileURL(), socialUserConnection.getImageURL(), socialUserConnection.getAccessToken(), socialUserConnection.getSecret(), socialUserConnection.getRefreshToken(), socialUserConnection.getExpireTime()); final ConnectionFactory<?> connectionFactory = connectionFactoryLocator.getConnectionFactory(connectionData.getProviderId()); return connectionFactory.createConnection(connectionData); } private <A> String getProviderId(final Class<A> apiType) { return connectionFactoryLocator.getConnectionFactory(apiType) .getProviderId(); } }
package ai.elimu.web.content.multimedia.audio; import ai.elimu.dao.AudioContributionEventDao; import java.io.IOException; import java.util.Calendar; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.Logger; import ai.elimu.dao.AudioDao; import ai.elimu.dao.EmojiDao; import ai.elimu.dao.StoryBookParagraphDao; import ai.elimu.dao.WordDao; import ai.elimu.model.content.Emoji; import ai.elimu.model.content.StoryBookParagraph; import ai.elimu.model.content.Word; import ai.elimu.model.content.multimedia.Audio; import ai.elimu.model.contributor.AudioContributionEvent; import ai.elimu.model.contributor.Contributor; import ai.elimu.model.enums.ContentLicense; import ai.elimu.model.enums.Platform; import ai.elimu.model.v2.enums.content.AudioFormat; import ai.elimu.model.v2.enums.content.LiteracySkill; import ai.elimu.model.v2.enums.content.NumeracySkill; import ai.elimu.util.DiscordHelper; import ai.elimu.util.audio.AudioMetadataExtractionHelper; import ai.elimu.web.context.EnvironmentContextLoaderListener; import java.io.File; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.http.HttpSession; import org.apache.logging.log4j.LogManager; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.validation.BindingResult; import org.springframework.web.bind.ServletRequestDataBinder; import org.springframework.web.bind.annotation.InitBinder; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.support.ByteArrayMultipartFileEditor; @Controller @RequestMapping("/content/multimedia/audio/create") public class AudioCreateController { private final Logger logger = LogManager.getLogger(); @Autowired private AudioDao audioDao; @Autowired private WordDao wordDao; @Autowired private StoryBookParagraphDao storyBookParagraphDao; @Autowired private EmojiDao emojiDao; @Autowired private AudioContributionEventDao audioContributionEventDao; @RequestMapping(method = RequestMethod.GET) public String handleRequest( Model model, @RequestParam(required = false) Long wordId, @RequestParam(required = false) Long storyBookParagraphId, @RequestParam(required = false) String autoFillTitle, @RequestParam(required = false) String autoFillTranscription ) { logger.info("handleRequest"); Audio audio = new Audio(); // Pre-select the Audio's corresponding Word if (wordId != null) { Word word = wordDao.read(wordId); audio.setWord(word); } // Pre-select the Audio's corresponding StoryBookParagraph if (storyBookParagraphId != null) { StoryBookParagraph storyBookParagraph = storyBookParagraphDao.read(storyBookParagraphId); audio.setStoryBookParagraph(storyBookParagraph); } // Pre-fill the Audio's title if (StringUtils.isNotBlank(autoFillTitle)) { audio.setTitle(autoFillTitle); } // Pre-fill the Audio's transcription if (StringUtils.isNotBlank(autoFillTranscription)) { audio.setTranscription(autoFillTranscription); } model.addAttribute("audio", audio); model.addAttribute("words", wordDao.readAllOrdered()); model.addAttribute("storyBookParagraphs", storyBookParagraphDao.readAll()); model.addAttribute("contentLicenses", ContentLicense.values()); model.addAttribute("literacySkills", LiteracySkill.values()); model.addAttribute("numeracySkills", NumeracySkill.values()); model.addAttribute("timeStart", System.currentTimeMillis()); model.addAttribute("emojisByWordId", getEmojisByWordId()); return "content/multimedia/audio/create"; } @RequestMapping(method = RequestMethod.POST) public String handleSubmit( HttpServletRequest request, HttpSession session, /*@Valid*/ Audio audio, @RequestParam("bytes") MultipartFile multipartFile, BindingResult result, Model model) { logger.info("handleSubmit"); try { byte[] bytes = multipartFile.getBytes(); if (multipartFile.isEmpty() || (bytes == null) || (bytes.length == 0)) { result.rejectValue("bytes", "NotNull"); } else { String originalFileName = multipartFile.getOriginalFilename(); logger.info("originalFileName: " + originalFileName); if (originalFileName.toLowerCase().endsWith(".mp3")) { audio.setAudioFormat(AudioFormat.MP3); } else if (originalFileName.toLowerCase().endsWith(".ogg")) { audio.setAudioFormat(AudioFormat.OGG); } else if (originalFileName.toLowerCase().endsWith(".wav")) { audio.setAudioFormat(AudioFormat.WAV); } else { result.rejectValue("bytes", "typeMismatch"); } if (audio.getAudioFormat() != null) { String contentType = multipartFile.getContentType(); logger.info("contentType: " + contentType); audio.setContentType(contentType); audio.setBytes(bytes); // TODO: convert to a default audio format? // Convert from MultipartFile to File, and extract audio duration String tmpDir = System.getProperty("java.io.tmpdir"); File tmpDirElimuAi = new File(tmpDir, "elimu-ai"); tmpDirElimuAi.mkdir(); File file = new File(tmpDirElimuAi, multipartFile.getOriginalFilename()); logger.info("file: " + file); multipartFile.transferTo(file); Long durationMs = AudioMetadataExtractionHelper.getDurationInMilliseconds(file); logger.info("durationMs: " + durationMs); audio.setDurationMs(durationMs); } } } catch (IOException e) { logger.error(e); } if (result.hasErrors()) { model.addAttribute("words", wordDao.readAllOrdered()); model.addAttribute("storyBookParagraphs", storyBookParagraphDao.readAll()); model.addAttribute("contentLicenses", ContentLicense.values()); model.addAttribute("literacySkills", LiteracySkill.values()); model.addAttribute("numeracySkills", NumeracySkill.values()); model.addAttribute("timeStart", request.getParameter("timeStart")); model.addAttribute("emojisByWordId", getEmojisByWordId()); return "content/multimedia/audio/create"; } else { audio.setTitle(audio.getTitle().toLowerCase()); audio.setTimeLastUpdate(Calendar.getInstance()); audioDao.create(audio); AudioContributionEvent audioContributionEvent = new AudioContributionEvent(); audioContributionEvent.setContributor((Contributor) session.getAttribute("contributor")); audioContributionEvent.setTime(Calendar.getInstance()); audioContributionEvent.setAudio(audio); audioContributionEvent.setRevisionNumber(audio.getRevisionNumber()); audioContributionEvent.setComment(StringUtils.abbreviate(request.getParameter("contributionComment"), 1000)); audioContributionEvent.setTimeSpentMs(System.currentTimeMillis() - Long.valueOf(request.getParameter("timeStart"))); audioContributionEvent.setPlatform(Platform.WEBAPP); audioContributionEventDao.create(audioContributionEvent); String contentUrl = "http://" + EnvironmentContextLoaderListener.PROPERTIES.getProperty("content.language").toLowerCase() + ".elimu.ai/content/multimedia/audio/edit/" + audio.getId(); DiscordHelper.sendChannelMessage( "Audio created: " + contentUrl, "\"" + audio.getTranscription() + "\"", "Comment: \"" + audioContributionEvent.getComment() + "\"", null, null ); return "redirect:/content/multimedia/audio/list#" + audio.getId(); } } /** * See http://www.mkyong.com/spring-mvc/spring-mvc-failed-to-convert-property-value-in-file-upload-form/ * <p></p> * Fixes this error message: * "Cannot convert value of type [org.springframework.web.multipart.support.StandardMultipartHttpServletRequest$StandardMultipartFile] to required type [byte] for property 'bytes[0]'" */ @InitBinder protected void initBinder(HttpServletRequest request, ServletRequestDataBinder binder) throws ServletException { logger.info("initBinder"); binder.registerCustomEditor(byte[].class, new ByteArrayMultipartFileEditor()); } private Map<Long, String> getEmojisByWordId() { logger.info("getEmojisByWordId"); Map<Long, String> emojisByWordId = new HashMap<>(); for (Word word : wordDao.readAll()) { String emojiGlyphs = ""; List<Emoji> emojis = emojiDao.readAllLabeled(word); for (Emoji emoji : emojis) { emojiGlyphs += emoji.getGlyph(); } if (StringUtils.isNotBlank(emojiGlyphs)) { emojisByWordId.put(word.getId(), emojiGlyphs); } } return emojisByWordId; } }
package org.davidmoten.rx.jdbc; import java.sql.Array; import java.sql.Blob; import java.sql.CallableStatement; import java.sql.Clob; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.NClob; import java.sql.SQLClientInfoException; import java.sql.SQLException; import java.sql.SQLWarning; import java.sql.SQLXML; import java.sql.Savepoint; import java.sql.Statement; import java.sql.Struct; import java.util.Map; import java.util.Properties; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import org.davidmoten.rx.jdbc.exceptions.CannotForkTransactedConnection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; final class TransactedConnection implements Connection { private static final Logger log = LoggerFactory.getLogger(TransactedConnection.class); private final Connection con; private final AtomicInteger counter; TransactedConnection(Connection con, AtomicInteger counter) { log.debug("constructing TransactedConnection from {}, {}", con, counter); this.con = con; this.counter = counter; } public TransactedConnection(Connection con) { this(con, new AtomicInteger(1)); } public int counter() { return counter.get(); } @Override public void abort(Executor executor) throws SQLException { con.abort(executor); } @Override public void clearWarnings() throws SQLException { con.clearWarnings(); } public TransactedConnection fork() { log.debug("forking connection"); if (counter.getAndIncrement() > 0) { return new TransactedConnection(con, counter); } else { throw new CannotForkTransactedConnection( "cannot fork TransactedConnection because already closed"); } } @Override public void close() throws SQLException { log.debug("TransactedConnection attempt close"); if (counter.get() == 0) { log.debug("TransactedConnection close"); con.close(); } } @Override public void commit() throws SQLException { log.debug("TransactedConnection commit attempt, counter={}", counter.get()); if (counter.decrementAndGet() == 0) { log.debug("TransactedConnection actual commit"); con.commit(); } } @Override public void rollback() throws SQLException { log.debug("TransactedConnection rollback attempt, counter={}", counter.get()); if (counter.decrementAndGet() == 0) { log.debug("TransactedConnection actual rollback"); con.rollback(); } } @Override public Array createArrayOf(String typeName, Object[] elements) throws SQLException { return con.createArrayOf(typeName, elements); } @Override public Blob createBlob() throws SQLException { return con.createBlob(); } @Override public Clob createClob() throws SQLException { return con.createClob(); } @Override public NClob createNClob() throws SQLException { return con.createNClob(); } @Override public SQLXML createSQLXML() throws SQLException { return con.createSQLXML(); } @Override public Statement createStatement() throws SQLException { return con.createStatement(); } @Override public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { return con.createStatement(resultSetType, resultSetConcurrency, resultSetHoldability); } @Override public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException { return con.createStatement(resultSetType, resultSetConcurrency); } @Override public Struct createStruct(String typeName, Object[] attributes) throws SQLException { return con.createStruct(typeName, attributes); } @Override public boolean getAutoCommit() throws SQLException { return con.getAutoCommit(); } @Override public String getCatalog() throws SQLException { return con.getCatalog(); } @Override public Properties getClientInfo() throws SQLException { return con.getClientInfo(); } @Override public String getClientInfo(String name) throws SQLException { return con.getClientInfo(name); } @Override public int getHoldability() throws SQLException { return con.getHoldability(); } @Override public DatabaseMetaData getMetaData() throws SQLException { return con.getMetaData(); } @Override public int getNetworkTimeout() throws SQLException { return con.getNetworkTimeout(); } @Override public String getSchema() throws SQLException { return con.getSchema(); } @Override public int getTransactionIsolation() throws SQLException { return con.getTransactionIsolation(); } @Override public Map<String, Class<?>> getTypeMap() throws SQLException { return con.getTypeMap(); } @Override public SQLWarning getWarnings() throws SQLException { return con.getWarnings(); } @Override public boolean isClosed() throws SQLException { return con.isClosed(); } @Override public boolean isReadOnly() throws SQLException { return con.isReadOnly(); } @Override public boolean isValid(int timeout) throws SQLException { return con.isValid(timeout); } @Override public boolean isWrapperFor(Class<?> arg0) throws SQLException { return con.isWrapperFor(arg0); } @Override public String nativeSQL(String sql) throws SQLException { return con.nativeSQL(sql); } @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { return con.prepareCall(sql, resultSetType, resultSetConcurrency, resultSetHoldability); } @Override public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { return con.prepareCall(sql, resultSetType, resultSetConcurrency); } @Override public CallableStatement prepareCall(String sql) throws SQLException { return con.prepareCall(sql); } @Override public TransactedPreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { return new TransactedPreparedStatement(this, con.prepareStatement(sql, resultSetType, resultSetConcurrency, resultSetHoldability)); } @Override public TransactedPreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException { return new TransactedPreparedStatement(this, con.prepareStatement(sql, resultSetType, resultSetConcurrency)); } @Override public TransactedPreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { return new TransactedPreparedStatement(this, con.prepareStatement(sql, autoGeneratedKeys)); } @Override public TransactedPreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { return new TransactedPreparedStatement(this, con.prepareStatement(sql, columnIndexes)); } @Override public TransactedPreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { return new TransactedPreparedStatement(this, con.prepareStatement(sql, columnNames)); } @Override public TransactedPreparedStatement prepareStatement(String sql) throws SQLException { return new TransactedPreparedStatement(this, con.prepareStatement(sql)); } @Override public void releaseSavepoint(Savepoint savepoint) throws SQLException { con.releaseSavepoint(savepoint); } @Override public void rollback(Savepoint savepoint) throws SQLException { con.rollback(savepoint); } @Override public void setAutoCommit(boolean autoCommit) throws SQLException { con.setAutoCommit(autoCommit); } @Override public void setCatalog(String catalog) throws SQLException { con.setCatalog(catalog); } @Override public void setClientInfo(Properties properties) throws SQLClientInfoException { con.setClientInfo(properties); } @Override public void setClientInfo(String name, String value) throws SQLClientInfoException { con.setClientInfo(name, value); } @Override public void setHoldability(int holdability) throws SQLException { con.setHoldability(holdability); } @Override public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException { con.setNetworkTimeout(executor, milliseconds); } @Override public void setReadOnly(boolean readOnly) throws SQLException { con.setReadOnly(readOnly); } @Override public Savepoint setSavepoint() throws SQLException { return con.setSavepoint(); } @Override public Savepoint setSavepoint(String name) throws SQLException { return con.setSavepoint(name); } @Override public void setSchema(String schema) throws SQLException { con.setSchema(schema); } @Override public void setTransactionIsolation(int level) throws SQLException { con.setTransactionIsolation(level); } @Override public void setTypeMap(Map<String, Class<?>> map) throws SQLException { con.setTypeMap(map); } @Override public <T> T unwrap(Class<T> arg0) throws SQLException { return con.unwrap(arg0); } public void incrementCounter() { counter.incrementAndGet(); } public void decrementCounter() { counter.decrementAndGet(); } @Override public String toString() { return "TransactedConnection [con=" + con + ", counter=" + counter + "]"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.iterative.task; import org.apache.flink.api.common.functions.Function; import org.apache.flink.api.common.operators.util.JoinHashMap; import org.apache.flink.api.common.typeutils.TypeComparator; import org.apache.flink.api.common.typeutils.TypeComparatorFactory; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.common.typeutils.TypeSerializerFactory; import org.apache.flink.core.io.IOReadableWritable; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.runtime.execution.Environment; import org.apache.flink.runtime.io.disk.InputViewIterator; import org.apache.flink.runtime.io.network.TaskEventDispatcher; import org.apache.flink.runtime.io.network.api.EndOfSuperstepEvent; import org.apache.flink.runtime.io.network.api.writer.RecordWriter; import org.apache.flink.runtime.io.network.api.writer.RecordWriterBuilder; import org.apache.flink.runtime.io.network.partition.ResultPartitionID; import org.apache.flink.runtime.iterative.concurrent.BlockingBackChannel; import org.apache.flink.runtime.iterative.concurrent.BlockingBackChannelBroker; import org.apache.flink.runtime.iterative.concurrent.Broker; import org.apache.flink.runtime.iterative.concurrent.IterationAggregatorBroker; import org.apache.flink.runtime.iterative.concurrent.SolutionSetBroker; import org.apache.flink.runtime.iterative.concurrent.SolutionSetUpdateBarrier; import org.apache.flink.runtime.iterative.concurrent.SolutionSetUpdateBarrierBroker; import org.apache.flink.runtime.iterative.concurrent.SuperstepBarrier; import org.apache.flink.runtime.iterative.concurrent.SuperstepKickoffLatch; import org.apache.flink.runtime.iterative.concurrent.SuperstepKickoffLatchBroker; import org.apache.flink.runtime.iterative.event.AllWorkersDoneEvent; import org.apache.flink.runtime.iterative.event.TerminationEvent; import org.apache.flink.runtime.iterative.event.WorkerDoneEvent; import org.apache.flink.runtime.iterative.io.SerializedUpdateBuffer; import org.apache.flink.runtime.operators.BatchTask; import org.apache.flink.runtime.operators.Driver; import org.apache.flink.runtime.operators.hash.CompactingHashTable; import org.apache.flink.runtime.operators.util.TaskConfig; import org.apache.flink.types.Value; import org.apache.flink.util.Collector; import org.apache.flink.util.MutableObjectIterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * The head is responsible for coordinating an iteration and can run a {@link Driver} inside. It * will read the initial input and establish a {@link BlockingBackChannel} to the iteration's tail. * After successfully processing the input, it will send EndOfSuperstep events to its outputs. It * must also be connected to a synchronization task and after each superstep, it will wait until it * receives an {@link AllWorkersDoneEvent} from the sync, which signals that all other heads have * also finished their iteration. Starting with the second iteration, the input for the head is the * output of the tail, transmitted through the backchannel. Once the iteration is done, the head * will send a {@link TerminationEvent} to all it's connected tasks, signaling them to shutdown. * * <p>Assumption on the ordering of the outputs: - The first n output gates write to channels that * go to the tasks of the step function. - The next m output gates to to the tasks that consume the * final solution. - The last output gate connects to the synchronization task. * * @param <X> The type of the bulk partial solution / solution set and the final output. * @param <Y> The type of the feed-back data set (bulk partial solution / workset). For bulk * iterations, {@code Y} is the same as {@code X} */ public class IterationHeadTask<X, Y, S extends Function, OT> extends AbstractIterativeTask<S, OT> { private static final Logger log = LoggerFactory.getLogger(IterationHeadTask.class); private Collector<X> finalOutputCollector; private TypeSerializerFactory<Y> feedbackTypeSerializer; private TypeSerializerFactory<X> solutionTypeSerializer; private RecordWriter<IOReadableWritable> toSync; private ResultPartitionID toSyncPartitionId; private int feedbackDataInput; // workset or bulk partial solution // -------------------------------------------------------------------------------------------- /** * Create an Invokable task and set its environment. * * @param environment The environment assigned to this invokable. */ public IterationHeadTask(Environment environment) { super(environment); } // -------------------------------------------------------------------------------------------- @Override protected int getNumTaskInputs() { // this task has an additional input in the workset case for the initial solution set boolean isWorkset = config.getIsWorksetIteration(); return driver.getNumberOfInputs() + (isWorkset ? 1 : 0); } @Override protected void initOutputs() throws Exception { // initialize the regular outputs first (the ones into the step function). super.initOutputs(); // at this time, the outputs to the step function are created // add the outputs for the final solution List<RecordWriter<?>> finalOutputWriters = new ArrayList<RecordWriter<?>>(); final TaskConfig finalOutConfig = this.config.getIterationHeadFinalOutputConfig(); final ClassLoader userCodeClassLoader = getUserCodeClassLoader(); this.finalOutputCollector = BatchTask.getOutputCollector( this, finalOutConfig, userCodeClassLoader, finalOutputWriters, config.getNumOutputs(), finalOutConfig.getNumOutputs()); // sanity check the setup final int writersIntoStepFunction = this.eventualOutputs.size(); final int writersIntoFinalResult = finalOutputWriters.size(); final int syncGateIndex = this.config.getIterationHeadIndexOfSyncOutput(); if (writersIntoStepFunction + writersIntoFinalResult != syncGateIndex) { throw new Exception( "Error: Inconsistent head task setup - wrong mapping of output gates."); } // now, we can instantiate the sync gate this.toSync = new RecordWriterBuilder<>().build(getEnvironment().getWriter(syncGateIndex)); this.toSyncPartitionId = getEnvironment().getWriter(syncGateIndex).getPartitionId(); } /** * The iteration head prepares the backchannel: it allocates memory, instantiates a {@link * BlockingBackChannel} and hands it to the iteration tail via a {@link Broker} singleton. */ private BlockingBackChannel initBackChannel() throws Exception { /* get the size of the memory available to the backchannel */ int backChannelMemoryPages = getMemoryManager().computeNumberOfPages(this.config.getRelativeBackChannelMemory()); /* allocate the memory available to the backchannel */ List<MemorySegment> segments = new ArrayList<MemorySegment>(); int segmentSize = getMemoryManager().getPageSize(); getMemoryManager().allocatePages(this, segments, backChannelMemoryPages); /* instantiate the backchannel */ BlockingBackChannel backChannel = new BlockingBackChannel( new SerializedUpdateBuffer(segments, segmentSize, getIOManager())); /* hand the backchannel over to the iteration tail */ Broker<BlockingBackChannel> broker = BlockingBackChannelBroker.instance(); broker.handIn(brokerKey(), backChannel); return backChannel; } private <BT> CompactingHashTable<BT> initCompactingHashTable() throws Exception { // get some memory double hashjoinMemorySize = config.getRelativeSolutionSetMemory(); final ClassLoader userCodeClassLoader = getUserCodeClassLoader(); TypeSerializerFactory<BT> solutionTypeSerializerFactory = config.getSolutionSetSerializer(userCodeClassLoader); TypeComparatorFactory<BT> solutionTypeComparatorFactory = config.getSolutionSetComparator(userCodeClassLoader); TypeSerializer<BT> solutionTypeSerializer = solutionTypeSerializerFactory.getSerializer(); TypeComparator<BT> solutionTypeComparator = solutionTypeComparatorFactory.createComparator(); CompactingHashTable<BT> hashTable = null; List<MemorySegment> memSegments = null; boolean success = false; try { int numPages = getMemoryManager().computeNumberOfPages(hashjoinMemorySize); memSegments = getMemoryManager().allocatePages(getContainingTask(), numPages); hashTable = new CompactingHashTable<BT>( solutionTypeSerializer, solutionTypeComparator, memSegments); success = true; return hashTable; } finally { if (!success) { if (hashTable != null) { try { hashTable.close(); } catch (Throwable t) { log.error( "Error closing the solution set hash table after unsuccessful creation.", t); } } if (memSegments != null) { try { getMemoryManager().release(memSegments); } catch (Throwable t) { log.error( "Error freeing memory after error during solution set hash table creation.", t); } } } } } private <BT> JoinHashMap<BT> initJoinHashMap() { TypeSerializerFactory<BT> solutionTypeSerializerFactory = config.getSolutionSetSerializer(getUserCodeClassLoader()); TypeComparatorFactory<BT> solutionTypeComparatorFactory = config.getSolutionSetComparator(getUserCodeClassLoader()); TypeSerializer<BT> solutionTypeSerializer = solutionTypeSerializerFactory.getSerializer(); TypeComparator<BT> solutionTypeComparator = solutionTypeComparatorFactory.createComparator(); return new JoinHashMap<BT>(solutionTypeSerializer, solutionTypeComparator); } private void readInitialSolutionSet( CompactingHashTable<X> solutionSet, MutableObjectIterator<X> solutionSetInput) throws IOException { solutionSet.open(); solutionSet.buildTableWithUniqueKey(solutionSetInput); } private void readInitialSolutionSet( JoinHashMap<X> solutionSet, MutableObjectIterator<X> solutionSetInput) throws IOException { TypeSerializer<X> serializer = solutionTypeSerializer.getSerializer(); X next; while ((next = solutionSetInput.next(serializer.createInstance())) != null) { solutionSet.insertOrReplace(next); } } private SuperstepBarrier initSuperstepBarrier() { SuperstepBarrier barrier = new SuperstepBarrier(getUserCodeClassLoader()); TaskEventDispatcher taskEventDispatcher = getEnvironment().getTaskEventDispatcher(); ResultPartitionID partitionId = toSyncPartitionId; taskEventDispatcher.subscribeToEvent(partitionId, barrier, AllWorkersDoneEvent.class); taskEventDispatcher.subscribeToEvent(partitionId, barrier, TerminationEvent.class); return barrier; } @Override public void run() throws Exception { final String brokerKey = brokerKey(); final int workerIndex = getEnvironment().getTaskInfo().getIndexOfThisSubtask(); final boolean objectSolutionSet = config.isSolutionSetUnmanaged(); CompactingHashTable<X> solutionSet = null; // if workset iteration JoinHashMap<X> solutionSetObjectMap = null; // if workset iteration with unmanaged solution set boolean waitForSolutionSetUpdate = config.getWaitForSolutionSetUpdate(); boolean isWorksetIteration = config.getIsWorksetIteration(); try { /* used for receiving the current iteration result from iteration tail */ SuperstepKickoffLatch nextStepKickoff = new SuperstepKickoffLatch(); SuperstepKickoffLatchBroker.instance().handIn(brokerKey, nextStepKickoff); BlockingBackChannel backChannel = initBackChannel(); SuperstepBarrier barrier = initSuperstepBarrier(); SolutionSetUpdateBarrier solutionSetUpdateBarrier = null; feedbackDataInput = config.getIterationHeadPartialSolutionOrWorksetInputIndex(); feedbackTypeSerializer = this.getInputSerializer(feedbackDataInput); excludeFromReset(feedbackDataInput); int initialSolutionSetInput; if (isWorksetIteration) { initialSolutionSetInput = config.getIterationHeadSolutionSetInputIndex(); solutionTypeSerializer = config.getSolutionSetSerializer(getUserCodeClassLoader()); // setup the index for the solution set @SuppressWarnings("unchecked") MutableObjectIterator<X> solutionSetInput = (MutableObjectIterator<X>) createInputIterator( inputReaders[initialSolutionSetInput], solutionTypeSerializer); // read the initial solution set if (objectSolutionSet) { solutionSetObjectMap = initJoinHashMap(); readInitialSolutionSet(solutionSetObjectMap, solutionSetInput); SolutionSetBroker.instance().handIn(brokerKey, solutionSetObjectMap); } else { solutionSet = initCompactingHashTable(); readInitialSolutionSet(solutionSet, solutionSetInput); SolutionSetBroker.instance().handIn(brokerKey, solutionSet); } if (waitForSolutionSetUpdate) { solutionSetUpdateBarrier = new SolutionSetUpdateBarrier(); SolutionSetUpdateBarrierBroker.instance() .handIn(brokerKey, solutionSetUpdateBarrier); } } else { // bulk iteration case @SuppressWarnings("unchecked") TypeSerializerFactory<X> solSer = (TypeSerializerFactory<X>) feedbackTypeSerializer; solutionTypeSerializer = solSer; // = termination Criterion tail if (waitForSolutionSetUpdate) { solutionSetUpdateBarrier = new SolutionSetUpdateBarrier(); SolutionSetUpdateBarrierBroker.instance() .handIn(brokerKey, solutionSetUpdateBarrier); } } // instantiate all aggregators and register them at the iteration global registry RuntimeAggregatorRegistry aggregatorRegistry = new RuntimeAggregatorRegistry( config.getIterationAggregators(getUserCodeClassLoader())); IterationAggregatorBroker.instance().handIn(brokerKey, aggregatorRegistry); DataInputView superstepResult = null; while (this.running && !terminationRequested()) { if (log.isInfoEnabled()) { log.info(formatLogString("starting iteration [" + currentIteration() + "]")); } barrier.setup(); if (waitForSolutionSetUpdate) { solutionSetUpdateBarrier.setup(); } if (!inFirstIteration()) { feedBackSuperstepResult(superstepResult); } super.run(); // signal to connected tasks that we are done with the superstep sendEndOfSuperstepToAllIterationOutputs(); if (waitForSolutionSetUpdate) { solutionSetUpdateBarrier.waitForSolutionSetUpdate(); } // blocking call to wait for the result superstepResult = backChannel.getReadEndAfterSuperstepEnded(); if (log.isInfoEnabled()) { log.info(formatLogString("finishing iteration [" + currentIteration() + "]")); } sendEventToSync( new WorkerDoneEvent(workerIndex, aggregatorRegistry.getAllAggregators())); if (log.isInfoEnabled()) { log.info( formatLogString( "waiting for other workers in iteration [" + currentIteration() + "]")); } barrier.waitForOtherWorkers(); if (barrier.terminationSignaled()) { if (log.isInfoEnabled()) { log.info( formatLogString( "head received termination request in iteration [" + currentIteration() + "]")); } requestTermination(); nextStepKickoff.signalTermination(); } else { incrementIterationCounter(); String[] globalAggregateNames = barrier.getAggregatorNames(); Value[] globalAggregates = barrier.getAggregates(); aggregatorRegistry.updateGlobalAggregatesAndReset( globalAggregateNames, globalAggregates); nextStepKickoff.triggerNextSuperstep(); } } if (log.isInfoEnabled()) { log.info( formatLogString( "streaming out final result after [" + currentIteration() + "] iterations")); } if (isWorksetIteration) { if (objectSolutionSet) { streamSolutionSetToFinalOutput(solutionSetObjectMap); } else { streamSolutionSetToFinalOutput(solutionSet); } } else { streamOutFinalOutputBulk( new InputViewIterator<X>( superstepResult, this.solutionTypeSerializer.getSerializer())); } this.finalOutputCollector.close(); } finally { // make sure we unregister everything from the broker: // - backchannel // - aggregator registry // - solution set index IterationAggregatorBroker.instance().remove(brokerKey); BlockingBackChannelBroker.instance().remove(brokerKey); SuperstepKickoffLatchBroker.instance().remove(brokerKey); SolutionSetBroker.instance().remove(brokerKey); SolutionSetUpdateBarrierBroker.instance().remove(brokerKey); if (solutionSet != null) { solutionSet.close(); } } } private void streamOutFinalOutputBulk(MutableObjectIterator<X> results) throws IOException { final Collector<X> out = this.finalOutputCollector; X record = this.solutionTypeSerializer.getSerializer().createInstance(); while ((record = results.next(record)) != null) { out.collect(record); } } private void streamSolutionSetToFinalOutput(CompactingHashTable<X> hashTable) throws IOException { final MutableObjectIterator<X> results = hashTable.getEntryIterator(); final Collector<X> output = this.finalOutputCollector; X record = solutionTypeSerializer.getSerializer().createInstance(); while ((record = results.next(record)) != null) { output.collect(record); } } @SuppressWarnings("unchecked") private void streamSolutionSetToFinalOutput(JoinHashMap<X> soluionSet) throws IOException { final Collector<X> output = this.finalOutputCollector; for (Object e : soluionSet.values()) { output.collect((X) e); } } private void feedBackSuperstepResult(DataInputView superstepResult) { this.inputs[this.feedbackDataInput] = new InputViewIterator<Y>( superstepResult, this.feedbackTypeSerializer.getSerializer()); } private void sendEndOfSuperstepToAllIterationOutputs() throws IOException, InterruptedException { if (log.isDebugEnabled()) { log.debug(formatLogString("Sending end-of-superstep to all iteration outputs.")); } for (RecordWriter<?> eventualOutput : this.eventualOutputs) { eventualOutput.broadcastEvent(EndOfSuperstepEvent.INSTANCE); } } private void sendEventToSync(WorkerDoneEvent event) throws IOException, InterruptedException { if (log.isInfoEnabled()) { log.info( formatLogString( "sending " + WorkerDoneEvent.class.getSimpleName() + " to sync")); } this.toSync.broadcastEvent(event); } }
/* * Copyright (c) 2014 Spotify AB. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.helios.agent; import com.google.common.base.Throwables; import com.google.common.collect.Maps; import com.google.common.util.concurrent.AbstractIdleService; import com.spotify.helios.common.Json; import com.spotify.helios.common.descriptors.JobId; import com.spotify.helios.common.descriptors.Task; import com.spotify.helios.common.descriptors.TaskStatus; import com.spotify.helios.common.descriptors.TaskStatusEvent; import com.spotify.helios.servicescommon.KafkaRecord; import com.spotify.helios.servicescommon.KafkaSender; import com.spotify.helios.servicescommon.coordination.Paths; import com.spotify.helios.servicescommon.coordination.PersistentPathChildrenCache; import com.spotify.helios.servicescommon.coordination.ZooKeeperClient; import com.spotify.helios.servicescommon.coordination.ZooKeeperClientProvider; import com.spotify.helios.servicescommon.coordination.ZooKeeperUpdatingPersistentDirectory; import org.apache.curator.framework.state.ConnectionState; import org.apache.kafka.common.serialization.ByteArraySerializer; import org.apache.kafka.common.serialization.StringSerializer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.nio.file.Path; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import static com.google.common.base.Preconditions.checkNotNull; import static com.spotify.helios.common.descriptors.Descriptor.parse; /** * The Helios Agent's view into ZooKeeper. * * This caches ZK state to local disk so the agent can continue to function in the face of a ZK * outage. */ public class ZooKeeperAgentModel extends AbstractIdleService implements AgentModel { private static final Logger log = LoggerFactory.getLogger(ZooKeeperAgentModel.class); private static final String TASK_CONFIG_FILENAME = "task-config.json"; private static final String TASK_HISTORY_FILENAME = "task-history.json"; private static final String TASK_STATUS_FILENAME = "task-status.json"; private final PersistentPathChildrenCache<Task> tasks; private final ZooKeeperUpdatingPersistentDirectory taskStatuses; private final TaskHistoryWriter historyWriter; private final KafkaSender kafkaSender; private final String agent; private final CopyOnWriteArrayList<AgentModel.Listener> listeners = new CopyOnWriteArrayList<>(); public ZooKeeperAgentModel(final ZooKeeperClientProvider provider, final KafkaClientProvider kafkaProvider, final String host, final Path stateDirectory) throws IOException, InterruptedException { // TODO(drewc): we're constructing too many heavyweight things in the ctor, these kinds of // things should be passed in/provider'd/etc. final ZooKeeperClient client = provider.get("ZooKeeperAgentModel_ctor"); this.agent = checkNotNull(host); final Path taskConfigFile = stateDirectory.resolve(TASK_CONFIG_FILENAME); this.tasks = client.pathChildrenCache(Paths.configHostJobs(host), taskConfigFile, Json.type(Task.class)); tasks.addListener(new JobsListener()); final Path taskStatusFile = stateDirectory.resolve(TASK_STATUS_FILENAME); this.taskStatuses = ZooKeeperUpdatingPersistentDirectory.create("agent-model-task-statuses", provider, taskStatusFile, Paths.statusHostJobs(host)); this.historyWriter = new TaskHistoryWriter( host, client, stateDirectory.resolve(TASK_HISTORY_FILENAME)); this.kafkaSender = new KafkaSender( kafkaProvider.getProducer(new StringSerializer(), new ByteArraySerializer())); } @Override protected void startUp() throws Exception { tasks.startAsync().awaitRunning(); taskStatuses.startAsync().awaitRunning(); historyWriter.startAsync().awaitRunning(); } @Override protected void shutDown() throws Exception { tasks.stopAsync().awaitTerminated(); taskStatuses.stopAsync().awaitTerminated(); historyWriter.stopAsync().awaitTerminated(); } private JobId jobIdFromTaskPath(final String path) { final String prefix = Paths.configHostJobs(agent) + "/"; return JobId.fromString(path.replaceFirst(prefix, "")); } /** * Returns the tasks (basically, a pair of {@link JobId} and {@link Task}) for the current agent. */ @Override public Map<JobId, Task> getTasks() { final Map<JobId, Task> tasks = Maps.newHashMap(); for (Map.Entry<String, Task> entry : this.tasks.getNodes().entrySet()) { final JobId id = jobIdFromTaskPath(entry.getKey()); tasks.put(id, entry.getValue()); } return tasks; } /** * Returns the {@link TaskStatus}es for all tasks assigned to the current agent. */ @Override public Map<JobId, TaskStatus> getTaskStatuses() { final Map<JobId, TaskStatus> statuses = Maps.newHashMap(); for (Map.Entry<String, byte[]> entry : this.taskStatuses.entrySet()) { try { final JobId id = JobId.fromString(entry.getKey()); final TaskStatus status = Json.read(entry.getValue(), TaskStatus.class); statuses.put(id, status); } catch (IOException e) { throw Throwables.propagate(e); } } return statuses; } /** * Set the {@link TaskStatus} for the job identified by {@code jobId}. */ @Override public void setTaskStatus(final JobId jobId, final TaskStatus status) throws InterruptedException { log.debug("setting task status: {}", status); taskStatuses.put(jobId.toString(), status.toJsonBytes()); try { historyWriter.saveHistoryItem(status); } catch (Exception e) { // Log error here and keep going as saving task history is not critical. // This is to prevent bad data in the queue from screwing up the actually important Helios // agent operations. log.error("Error saving task status {} to ZooKeeper: {}", status, e); } final TaskStatusEvent event = new TaskStatusEvent(status, System.currentTimeMillis(), agent); kafkaSender.send(KafkaRecord.of(TaskStatusEvent.KAFKA_TOPIC, event.toJsonBytes())); } /** * Get the {@link TaskStatus} for the job identified by {@code jobId}. */ @Override public TaskStatus getTaskStatus(final JobId jobId) { final byte[] data = taskStatuses.get(jobId.toString()); if (data == null) { return null; } try { return parse(data, TaskStatus.class); } catch (IOException e) { throw Throwables.propagate(e); } } /** * Remove the {@link TaskStatus} for the job identified by {@code jobId}. */ @Override public void removeTaskStatus(final JobId jobId) throws InterruptedException { taskStatuses.remove(jobId.toString()); } /** * Add a listener that will be notified when tasks are changed. */ @Override public void addListener(final AgentModel.Listener listener) { listeners.add(listener); listener.tasksChanged(this); } /** * Remove a listener that will be notified when tasks are changed. */ @Override public void removeListener(final AgentModel.Listener listener) { listeners.remove(listener); } protected void fireTasksUpdated() { for (final AgentModel.Listener listener : listeners) { try { listener.tasksChanged(this); } catch (Exception e) { log.error("listener threw exception", e); } } } private class JobsListener implements PersistentPathChildrenCache.Listener { @Override public void nodesChanged(final PersistentPathChildrenCache<?> cache) { fireTasksUpdated(); } @Override public void connectionStateChanged(final ConnectionState state) { // ignore } } }
package com.fasterxml.jackson.datatype.jsr310.deser; import java.time.*; import java.time.format.DateTimeFormatter; import java.time.temporal.ChronoField; import java.time.temporal.ChronoUnit; import java.time.temporal.Temporal; import java.util.Map; import java.util.TimeZone; import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.exc.MismatchedInputException; import com.fasterxml.jackson.datatype.jsr310.DecimalUtils; import com.fasterxml.jackson.datatype.jsr310.MockObjectConfiguration; import com.fasterxml.jackson.datatype.jsr310.ModuleTestBase; import org.junit.Test; import static org.junit.Assert.*; import static org.junit.Assert.assertNull; public class OffsetDateTimeDeserTest extends ModuleTestBase { private static final DateTimeFormatter FORMATTER = DateTimeFormatter.ISO_OFFSET_DATE_TIME; private final TypeReference<Map<String, OffsetDateTime>> MAP_TYPE_REF = new TypeReference<Map<String, OffsetDateTime>>() { }; private static final ZoneId Z1 = ZoneId.of("America/Chicago"); private static final ZoneId Z2 = ZoneId.of("America/Anchorage"); private static final ZoneId Z3 = ZoneId.of("America/Los_Angeles"); final static class Wrapper { @JsonFormat( pattern="yyyy_MM_dd'T'HH:mm:ssZ", shape=JsonFormat.Shape.STRING) public OffsetDateTime value; public Wrapper() { } public Wrapper(OffsetDateTime v) { value = v; } } private ObjectMapper MAPPER = newMapper(); @Test public void testDeserializationAsFloat01WithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(0L), Z1); OffsetDateTime value = MAPPER.readValue("0.000000000", OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, value.getOffset()); } @Test public void testDeserializationAsFloat01WithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(0L), Z1); ObjectMapper m = newMapper() .setTimeZone(TimeZone.getDefault()); OffsetDateTime value = m.readValue("0.000000000", OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getDefaultOffset(date), value.getOffset()); } @Test public void testDeserializationAsFloat02WithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 183917322), Z2); OffsetDateTime value = MAPPER.readValue("123456789.183917322", OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, value.getOffset()); } @Test public void testDeserializationAsFloat02WithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 183917322), Z2); OffsetDateTime value = MAPPER.readerFor(OffsetDateTime.class) .with(TimeZone.getDefault()) .readValue("123456789.183917322"); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getDefaultOffset(date), value.getOffset()); } @Test public void testDeserializationAsFloat03WithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); OffsetDateTime value = MAPPER.readValue( DecimalUtils.toDecimal(date.toEpochSecond(), date.getNano()), OffsetDateTime.class ); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, value.getOffset()); } @Test public void testDeserializationAsFloat03WithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); ObjectMapper m = newMapper() .setTimeZone(TimeZone.getDefault()); OffsetDateTime value = m.readValue( DecimalUtils.toDecimal(date.toEpochSecond(), date.getNano()), OffsetDateTime.class ); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getDefaultOffset(date), value.getOffset()); } @Test public void testDeserializationAsInt01NanosecondsWithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(0L), Z1); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, true); OffsetDateTime value = m.readValue("0", OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, value.getOffset()); } @Test public void testDeserializationAsInt01NanosecondsWithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(0L), Z1); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, true) .setTimeZone(TimeZone.getDefault()); OffsetDateTime value = m.readValue("0", OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getDefaultOffset(date), value.getOffset()); } @Test public void testDeserializationAsInt01MillisecondsWithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(0L), Z1); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, false); OffsetDateTime value = m.readValue("0", OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, value.getOffset()); } @Test public void testDeserializationAsInt01MillisecondsWithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(0L), Z1); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, false) .setTimeZone(TimeZone.getDefault()); OffsetDateTime value = m.readValue("0", OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getDefaultOffset(date), value.getOffset()); } @Test public void testDeserializationAsInt02NanosecondsWithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 0), Z2); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, true); OffsetDateTime value = m.readValue("123456789", OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, value.getOffset()); } @Test public void testDeserializationAsInt02NanosecondsWithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 0), Z2); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, true) .setTimeZone(TimeZone.getDefault()); OffsetDateTime value = m.readValue("123456789", OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getDefaultOffset(date), value.getOffset()); } @Test public void testDeserializationAsInt02MillisecondsWithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 422000000), Z2); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, false); OffsetDateTime value = m.readValue("123456789422", OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, value.getOffset()); } @Test public void testDeserializationAsInt02MillisecondsWithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 422000000), Z2); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, false) .setTimeZone(TimeZone.getDefault()); OffsetDateTime value = m.readValue("123456789422", OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getDefaultOffset(date), value.getOffset()); } @Test public void testDeserializationAsInt03NanosecondsWithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); date = date.minus(date.getNano(), ChronoUnit.NANOS); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, true); OffsetDateTime value = m.readValue(Long.toString(date.toEpochSecond()), OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, value.getOffset()); } @Test public void testDeserializationAsInt03NanosecondsWithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); date = date.minus(date.getNano(), ChronoUnit.NANOS); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, true) .setTimeZone(TimeZone.getDefault()); OffsetDateTime value = m.readValue(Long.toString(date.toEpochSecond()), OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getDefaultOffset(date), value.getOffset()); } @Test public void testDeserializationAsInt03MillisecondsWithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); date = date.minus(date.getNano() - (date.get(ChronoField.MILLI_OF_SECOND) * 1_000_000L), ChronoUnit.NANOS); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, false); OffsetDateTime value = m.readValue(Long.toString(date.toInstant().toEpochMilli()), OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, value.getOffset()); } @Test public void testDeserializationAsInt03MillisecondsWithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); date = date.minus(date.getNano() - (date.get(ChronoField.MILLI_OF_SECOND) * 1_000_000L), ChronoUnit.NANOS); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, false) .setTimeZone(TimeZone.getDefault()); OffsetDateTime value = m.readValue(Long.toString(date.toInstant().toEpochMilli()), OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getDefaultOffset(date), value.getOffset()); } @Test public void testDeserializationAsString01WithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(0L), Z1); ObjectReader r = MAPPER.readerFor(OffsetDateTime.class) .with(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE); OffsetDateTime value = r.readValue('"' + FORMATTER.format(date) + '"'); assertIsEqual(date, value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, value.getOffset()); } @Test public void testDeserializationAsString01WithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(0L), Z1); ObjectReader r = MAPPER.readerFor(OffsetDateTime.class) .with(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE) .with(TimeZone.getDefault()); OffsetDateTime value = r.readValue('"' + FORMATTER.format(date) + '"'); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getDefaultOffset(date), value.getOffset()); } @Test public void testDeserializationAsString01WithTimeZoneTurnedOff() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(0L), Z1); ObjectReader r = MAPPER.readerFor(OffsetDateTime.class) .without(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE) .with(TimeZone.getDefault()); OffsetDateTime value = r.readValue('"' + FORMATTER.format(date) + '"'); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getOffset(value, Z1), value.getOffset()); } @Test public void testDeserializationAsString01WithTimeZoneColonless() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(0L), Z1); ObjectReader r = MAPPER.readerFor(OffsetDateTime.class) .without(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE); String sDate = offsetWithoutColon(FORMATTER.format(date)); OffsetDateTime value = r.readValue('"' + sDate + '"'); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getOffset(value, Z1), value.getOffset()); } @Test public void testDeserializationAsString02WithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 183917322), Z2); ObjectReader r = MAPPER.readerFor(OffsetDateTime.class) .with(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE); OffsetDateTime value = r.readValue('"' + FORMATTER.format(date) + '"'); assertIsEqual(date, value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, value.getOffset()); } @Test public void testDeserializationAsString02WithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 183917322), Z2); ObjectReader r = MAPPER.readerFor(OffsetDateTime.class) .with(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE) .with(TimeZone.getDefault()); OffsetDateTime value = r.readValue('"' + FORMATTER.format(date) + '"'); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getDefaultOffset(date), value.getOffset()); } @Test public void testDeserializationAsString02WithTimeZoneTurnedOff() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 183917322), Z2); ObjectMapper m = newMapper() .configure(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE, false) .setTimeZone(TimeZone.getDefault()); OffsetDateTime value = m.readValue('"' + FORMATTER.format(date) + '"', OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getOffset(value, Z2), value.getOffset()); } @Test public void testDeserializationAsString02WithTimeZoneColonless() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 183917322), Z2); ObjectMapper m = newMapper() .configure(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE, false); String sDate = offsetWithoutColon(FORMATTER.format(date)); OffsetDateTime value = m.readValue('"' + sDate + '"', OffsetDateTime.class); assertNotNull("The value should not be null.", value); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getOffset(value, Z2), value.getOffset()); } @Test public void testDeserializationAsString03WithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); ObjectReader r = MAPPER.readerFor(OffsetDateTime.class) .with(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE); OffsetDateTime value = r.readValue('"' + FORMATTER.format(date) + '"'); assertIsEqual(date, value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, value.getOffset()); } @Test public void testDeserializationAsString03WithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); ObjectReader r = MAPPER.readerFor(OffsetDateTime.class) .with(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE) .with(TimeZone.getDefault()); OffsetDateTime value = r.readValue('"' + FORMATTER.format(date) + '"'); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getDefaultOffset(date), value.getOffset()); } @Test public void testDeserializationAsString03WithTimeZoneTurnedOff() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); ObjectReader r = MAPPER.readerFor(OffsetDateTime.class) .without(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE) .with(TimeZone.getDefault()); OffsetDateTime value = r.readValue('"' + FORMATTER.format(date) + '"'); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getOffset(value, Z3), value.getOffset()); } @Test public void testDeserializationAsString03WithTimeZoneColonless() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); ObjectReader r = MAPPER.readerFor(OffsetDateTime.class) .without(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE); String sDate = offsetWithoutColon(FORMATTER.format(date)); OffsetDateTime value = r.readValue('"' + sDate + '"'); assertIsEqual(date, value); assertEquals("The time zone is not correct.", getOffset(value, Z3), value.getOffset()); } @Test public void testDeserializationWithTypeInfo01WithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 183917322), Z2); ObjectMapper m = newMapper() .addMixIn(Temporal.class, MockObjectConfiguration.class); Temporal value = m.readValue( "[\"" + OffsetDateTime.class.getName() + "\",123456789.183917322]", Temporal.class ); assertTrue("The value should be an OffsetDateTime.", value instanceof OffsetDateTime); assertIsEqual(date, (OffsetDateTime) value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, ((OffsetDateTime) value).getOffset()); } @Test public void testDeserializationWithTypeInfo01WithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 183917322), Z2); ObjectMapper m = newMapper() .setTimeZone(TimeZone.getDefault()) .addMixIn(Temporal.class, MockObjectConfiguration.class); Temporal value = m.readValue( "[\"" + OffsetDateTime.class.getName() + "\",123456789.183917322]", Temporal.class ); assertTrue("The value should be an OffsetDateTime.", value instanceof OffsetDateTime); assertIsEqual(date, (OffsetDateTime) value); assertEquals("The time zone is not correct.", getDefaultOffset(date), ((OffsetDateTime) value).getOffset()); } @Test public void testDeserializationWithTypeInfo02WithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 0), Z2); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, true) .addMixIn(Temporal.class, MockObjectConfiguration.class); Temporal value = m.readValue( "[\"" + OffsetDateTime.class.getName() + "\",123456789]", Temporal.class ); assertTrue("The value should be an OffsetDateTime.", value instanceof OffsetDateTime); assertIsEqual(date, (OffsetDateTime) value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, ((OffsetDateTime) value).getOffset()); } @Test public void testDeserializationWithTypeInfo02WithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 0), Z2); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, true) .setTimeZone(TimeZone.getDefault()) .addMixIn(Temporal.class, MockObjectConfiguration.class); Temporal value = m.readValue( "[\"" + OffsetDateTime.class.getName() + "\",123456789]", Temporal.class ); assertTrue("The value should be an OffsetDateTime.", value instanceof OffsetDateTime); assertIsEqual(date, (OffsetDateTime) value); assertEquals("The time zone is not correct.", getDefaultOffset(date), ((OffsetDateTime) value).getOffset()); } @Test public void testDeserializationWithTypeInfo03WithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 422000000), Z2); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, false) .addMixIn(Temporal.class, MockObjectConfiguration.class); Temporal value = m.readValue( "[\"" + OffsetDateTime.class.getName() + "\",123456789422]", Temporal.class ); assertTrue("The value should be an OffsetDateTime.", value instanceof OffsetDateTime); assertIsEqual(date, (OffsetDateTime) value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, ((OffsetDateTime) value).getOffset()); } @Test public void testDeserializationWithTypeInfo03WithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.ofInstant(Instant.ofEpochSecond(123456789L, 422000000), Z2); ObjectMapper m = newMapper() .configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, false) .setTimeZone(TimeZone.getDefault()) .addMixIn(Temporal.class, MockObjectConfiguration.class); Temporal value = m.readValue( "[\"" + OffsetDateTime.class.getName() + "\",123456789422]", Temporal.class ); assertTrue("The value should be an OffsetDateTime.", value instanceof OffsetDateTime); assertIsEqual(date, (OffsetDateTime) value); assertEquals("The time zone is not correct.", getDefaultOffset(date), ((OffsetDateTime) value).getOffset()); } @Test public void testDeserializationWithTypeInfo04WithoutTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); ObjectMapper m = newMapper() .configure(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE, true) .addMixIn(Temporal.class, MockObjectConfiguration.class); Temporal value = m.readValue( "[\"" + OffsetDateTime.class.getName() + "\",\"" + FORMATTER.format(date) + "\"]", Temporal.class ); assertTrue("The value should be an OffsetDateTime.", value instanceof OffsetDateTime); assertIsEqual(date, (OffsetDateTime) value); assertEquals("The time zone is not correct.", ZoneOffset.UTC, ((OffsetDateTime) value).getOffset()); } @Test public void testDeserializationWithTypeInfo04WithTimeZone() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); ObjectMapper m = newMapper() .configure(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE, true) .setTimeZone(TimeZone.getDefault()) .addMixIn(Temporal.class, MockObjectConfiguration.class); Temporal value = m.readValue( "[\"" + OffsetDateTime.class.getName() + "\",\"" + FORMATTER.format(date) + "\"]", Temporal.class ); assertTrue("The value should be an OffsetDateTime.", value instanceof OffsetDateTime); assertIsEqual(date, (OffsetDateTime) value); assertEquals("The time zone is not correct.", getDefaultOffset(date), ((OffsetDateTime) value).getOffset()); } @Test public void testDeserializationWithTypeInfo04WithTimeZoneTurnedOff() throws Exception { OffsetDateTime date = OffsetDateTime.now(Z3); ObjectMapper m = newMapper() .configure(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE, false) .setTimeZone(TimeZone.getDefault()) .addMixIn(Temporal.class, MockObjectConfiguration.class); Temporal value = m.readValue( "[\"" + OffsetDateTime.class.getName() + "\",\"" + FORMATTER.format(date) + "\"]", Temporal.class ); assertTrue("The value should be an OffsetDateTime.", value instanceof OffsetDateTime); OffsetDateTime cast = (OffsetDateTime) value; assertIsEqual(date, cast); assertEquals("The time zone is not correct.", getOffset(cast, Z3), cast.getOffset()); } @Test public void testCustomPatternWithAnnotations() throws Exception { OffsetDateTime inputValue = OffsetDateTime.ofInstant(Instant.ofEpochSecond(0L), UTC); final Wrapper input = new Wrapper(inputValue); final ObjectMapper m = newMapper(); String json = m.writeValueAsString(input); assertEquals(a2q("{'value':'1970_01_01T00:00:00+0000'}"), json); Wrapper result = m.readValue(json, Wrapper.class); assertEquals(input.value, result.value); } // [datatype-jsr310#79] @Test public void testRoundTripOfOffsetDateTimeAndJavaUtilDate() throws Exception { ObjectMapper mapper = newMapper(); mapper.configure(SerializationFeature.WRITE_DATE_TIMESTAMPS_AS_NANOSECONDS, false); mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); mapper.configure(DeserializationFeature.READ_DATE_TIMESTAMPS_AS_NANOSECONDS, false); Instant givenInstant = LocalDate.of(2016, 1, 1).atStartOfDay().atZone(ZoneOffset.UTC).toInstant(); String json = mapper.writeValueAsString(java.util.Date.from(givenInstant)); OffsetDateTime actual = mapper.readValue(json, OffsetDateTime.class); // this fails assertEquals(givenInstant.atOffset(ZoneOffset.UTC), actual); } /* /********************************************************** /* Tests for empty string handling /********************************************************** */ @Test public void testLenientDeserializeFromEmptyString() throws Exception { String key = "OffsetDateTime"; ObjectMapper mapper = newMapper(); ObjectReader objectReader = mapper.readerFor(MAP_TYPE_REF); String valueFromNullStr = mapper.writeValueAsString(asMap(key, null)); Map<String, OffsetDateTime> actualMapFromNullStr = objectReader.readValue(valueFromNullStr); OffsetDateTime actualDateFromNullStr = actualMapFromNullStr.get(key); assertNull(actualDateFromNullStr); String valueFromEmptyStr = mapper.writeValueAsString(asMap(key, "")); Map<String, OffsetDateTime> actualMapFromEmptyStr = objectReader.readValue(valueFromEmptyStr); OffsetDateTime actualDateFromEmptyStr = actualMapFromEmptyStr.get(key); assertEquals("empty string failed to deserialize to null with lenient setting", null, actualDateFromEmptyStr); } @Test ( expected = MismatchedInputException.class) public void testStrictDeserializeFromEmptyString() throws Exception { final String key = "OffsetDateTime"; final ObjectMapper mapper = mapperBuilder().build(); mapper.configOverride(OffsetDateTime.class) .setFormat(JsonFormat.Value.forLeniency(false)); final ObjectReader objectReader = mapper.readerFor(MAP_TYPE_REF); String valueFromNullStr = mapper.writeValueAsString(asMap(key, null)); Map<String, OffsetDateTime> actualMapFromNullStr = objectReader.readValue(valueFromNullStr); assertNull(actualMapFromNullStr.get(key)); String valueFromEmptyStr = mapper.writeValueAsString(asMap(key, "")); objectReader.readValue(valueFromEmptyStr); } // [module-java8#166] @Test public void testDeserializationNoAdjustIfMIN() throws Exception { OffsetDateTime date = OffsetDateTime.MIN; ObjectMapper m = newMapper() .configure(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE, true) .setTimeZone(TimeZone.getTimeZone(Z1)) .addMixIn(Temporal.class, MockObjectConfiguration.class); Temporal value = m.readValue( "[\"" + OffsetDateTime.class.getName() + "\",\"" + FORMATTER.format(date) + "\"]", Temporal.class ); assertNotNull("The value should not be null.", value); assertTrue("The value should be an OffsetDateTime.", value instanceof OffsetDateTime); OffsetDateTime actualValue = (OffsetDateTime) value; assertIsEqual(date, actualValue); assertEquals(date.getOffset(),actualValue.getOffset()); } @Test public void testDeserializationNoAdjustIfMAX() throws Exception { OffsetDateTime date = OffsetDateTime.MAX; ObjectMapper m = newMapper() .configure(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE, true) .setTimeZone(TimeZone.getTimeZone(Z1)) .addMixIn(Temporal.class, MockObjectConfiguration.class); Temporal value = m.readValue( "[\"" + OffsetDateTime.class.getName() + "\",\"" + FORMATTER.format(date) + "\"]", Temporal.class ); assertNotNull("The value should not be null.", value); assertTrue("The value should be an OffsetDateTime.", value instanceof OffsetDateTime); OffsetDateTime actualValue = (OffsetDateTime) value; assertIsEqual(date, actualValue); assertEquals(date.getOffset(),actualValue.getOffset()); } private static void assertIsEqual(OffsetDateTime expected, OffsetDateTime actual) { assertTrue("The value is not correct. Expected timezone-adjusted <" + expected + ">, actual <" + actual + ">.", expected.isEqual(actual)); } private static ZoneOffset getDefaultOffset(OffsetDateTime date) { return ZoneId.systemDefault().getRules().getOffset(date.toLocalDateTime()); } private static ZoneOffset getOffset(OffsetDateTime date, ZoneId zone) { return zone.getRules().getOffset(date.toLocalDateTime()); } private static String offsetWithoutColon(String string){ return new StringBuilder(string).deleteCharAt(string.lastIndexOf(":")).toString(); } }
/** * BSD-style license; for more info see http://pmd.sourceforge.net/license.html */ package net.sourceforge.pmd; import static org.junit.Assert.assertEquals; import java.io.File; import java.util.List; import org.junit.Test; public class RuleSetReferenceIdTest { private static void assertRuleSetReferenceId(final boolean expectedExternal, final String expectedRuleSetFileName, final boolean expectedAllRules, final String expectedRuleName, final String expectedToString, final RuleSetReferenceId reference) { assertEquals("Wrong external", expectedExternal, reference.isExternal()); assertEquals("Wrong RuleSet file name", expectedRuleSetFileName, reference.getRuleSetFileName()); assertEquals("Wrong all Rule reference", expectedAllRules, reference.isAllRules()); assertEquals("Wrong Rule name", expectedRuleName, reference.getRuleName()); assertEquals("Wrong toString()", expectedToString, reference.toString()); } @Test(expected = IllegalArgumentException.class) public void testCommaInSingleId() { new RuleSetReferenceId("bad,id"); } @Test(expected = IllegalArgumentException.class) public void testInternalWithInternal() { new RuleSetReferenceId("SomeRule", new RuleSetReferenceId("SomeOtherRule")); } @Test(expected = IllegalArgumentException.class) public void testExternalWithExternal() { new RuleSetReferenceId("someruleset.xml/SomeRule", new RuleSetReferenceId("someruleset.xml/SomeOtherRule")); } @Test(expected = IllegalArgumentException.class) public void testExternalWithInternal() { new RuleSetReferenceId("someruleset.xml/SomeRule", new RuleSetReferenceId("SomeOtherRule")); } @Test public void testInteralWithExternal() { // This is okay new RuleSetReferenceId("SomeRule", new RuleSetReferenceId("someruleset.xml/SomeOtherRule")); } @Test public void testEmptyRuleSet() { // This is representative of how the Test framework creates RuleSetReferenceId from static RuleSet XMLs RuleSetReferenceId reference = new RuleSetReferenceId(null); assertRuleSetReferenceId(true, null, true, null, "anonymous all Rule", reference); } @Test public void testInternalWithExternalRuleSet() { // This is representative of how the RuleSetFactory temporarily pairs an internal reference // with an external reference. RuleSetReferenceId internalRuleSetReferenceId = new RuleSetReferenceId("MockRuleName"); assertRuleSetReferenceId(false, null, false, "MockRuleName", "MockRuleName", internalRuleSetReferenceId); RuleSetReferenceId externalRuleSetReferenceId = new RuleSetReferenceId("rulesets/java/basic.xml"); assertRuleSetReferenceId(true, "rulesets/java/basic.xml", true, null, "rulesets/java/basic.xml", externalRuleSetReferenceId); RuleSetReferenceId pairRuleSetReferenceId = new RuleSetReferenceId("MockRuleName", externalRuleSetReferenceId); assertRuleSetReferenceId(true, "rulesets/java/basic.xml", false, "MockRuleName", "rulesets/java/basic.xml/MockRuleName", pairRuleSetReferenceId); } @Test public void testOneSimpleRuleSet() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("java-basic"); assertEquals(1, references.size()); assertRuleSetReferenceId(true, "rulesets/java/basic.xml", true, null, "rulesets/java/basic.xml", references .get(0)); } @Test public void testMultipleSimpleRuleSet() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("java-unusedcode,java-basic"); assertEquals(2, references.size()); assertRuleSetReferenceId(true, "rulesets/java/unusedcode.xml", true, null, "rulesets/java/unusedcode.xml", references.get(0)); assertRuleSetReferenceId(true, "rulesets/java/basic.xml", true, null, "rulesets/java/basic.xml", references .get(1)); } @Test public void testOneReleaseRuleSet() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("50"); assertEquals(1, references.size()); assertRuleSetReferenceId(true, "rulesets/releases/50.xml", true, null, "rulesets/releases/50.xml", references .get(0)); } @Test public void testOneFullRuleSet() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("rulesets/java/unusedcode.xml"); assertEquals(1, references.size()); assertRuleSetReferenceId(true, "rulesets/java/unusedcode.xml", true, null, "rulesets/java/unusedcode.xml", references.get(0)); } @Test public void testOneFullRuleSetURL() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("file://somepath/rulesets/java/unusedcode.xml"); assertEquals(1, references.size()); assertRuleSetReferenceId(true, "file://somepath/rulesets/java/unusedcode.xml", true, null, "file://somepath/rulesets/java/unusedcode.xml", references.get(0)); } @Test public void testMultipleFullRuleSet() { List<RuleSetReferenceId> references = RuleSetReferenceId .parse("rulesets/java/unusedcode.xml,rulesets/java/basic.xml"); assertEquals(2, references.size()); assertRuleSetReferenceId(true, "rulesets/java/unusedcode.xml", true, null, "rulesets/java/unusedcode.xml", references.get(0)); assertRuleSetReferenceId(true, "rulesets/java/basic.xml", true, null, "rulesets/java/basic.xml", references .get(1)); } @Test public void testMixRuleSet() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("rulesets/java/unusedcode.xml,xml-basic"); assertEquals(2, references.size()); assertRuleSetReferenceId(true, "rulesets/java/unusedcode.xml", true, null, "rulesets/java/unusedcode.xml", references.get(0)); assertRuleSetReferenceId(true, "rulesets/xml/basic.xml", true, null, "rulesets/xml/basic.xml", references .get(1)); } @Test public void testUnknownRuleSet() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("nonexistant.xml"); assertEquals(1, references.size()); assertRuleSetReferenceId(true, "nonexistant.xml", true, null, "nonexistant.xml", references.get(0)); } @Test public void testUnknownAndSimpleRuleSet() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("jsp-basic,nonexistant.xml"); assertEquals(2, references.size()); assertRuleSetReferenceId(true, "rulesets/jsp/basic.xml", true, null, "rulesets/jsp/basic.xml", references .get(0)); assertRuleSetReferenceId(true, "nonexistant.xml", true, null, "nonexistant.xml", references.get(1)); } @Test public void testSimpleRuleSetAndRule() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("java-basic/EmptyCatchBlock"); assertEquals(1, references.size()); assertRuleSetReferenceId(true, "rulesets/java/basic.xml", false, "EmptyCatchBlock", "rulesets/java/basic.xml/EmptyCatchBlock", references.get(0)); } @Test public void testFullRuleSetAndRule() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("rulesets/java/basic.xml/EmptyCatchBlock"); assertEquals(1, references.size()); assertRuleSetReferenceId(true, "rulesets/java/basic.xml", false, "EmptyCatchBlock", "rulesets/java/basic.xml/EmptyCatchBlock", references.get(0)); } @Test public void testFullRuleSetURLAndRule() { List<RuleSetReferenceId> references = RuleSetReferenceId .parse("file://somepath/rulesets/java/unusedcode.xml/EmptyCatchBlock"); assertEquals(1, references.size()); assertRuleSetReferenceId(true, "file://somepath/rulesets/java/unusedcode.xml", false, "EmptyCatchBlock", "file://somepath/rulesets/java/unusedcode.xml/EmptyCatchBlock", references.get(0)); } @Test public void testInternalRuleSetAndRule() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("EmptyCatchBlock"); assertEquals(1, references.size()); assertRuleSetReferenceId(false, null, false, "EmptyCatchBlock", "EmptyCatchBlock", references.get(0)); } @Test public void testRelativePathRuleSet() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("pmd/pmd-ruleset.xml"); assertEquals(1, references.size()); assertRuleSetReferenceId(true, "pmd/pmd-ruleset.xml", true, null, "pmd/pmd-ruleset.xml", references.get(0)); } @Test public void testAbsolutePathRuleSet() { List<RuleSetReferenceId> references = RuleSetReferenceId.parse("/home/foo/pmd/pmd-ruleset.xml"); assertEquals(1, references.size()); assertRuleSetReferenceId(true, "/home/foo/pmd/pmd-ruleset.xml", true, null, "/home/foo/pmd/pmd-ruleset.xml", references.get(0)); } @Test public void testFooRules() throws Exception { String fooRulesFile = new File("./src/test/resources/net/sourceforge/pmd/rulesets/foo-project/foo-rules").getCanonicalPath(); List<RuleSetReferenceId> references = RuleSetReferenceId.parse(fooRulesFile); assertEquals(1, references.size()); assertRuleSetReferenceId(true, fooRulesFile, true, null, fooRulesFile, references.get(0)); } public static junit.framework.Test suite() { return new junit.framework.JUnit4TestAdapter(RuleSetReferenceIdTest.class); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.intention.impl; import com.intellij.codeInsight.PsiEquivalenceUtil; import com.intellij.psi.*; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.siyeh.ig.psiutils.CommentTracker; import com.siyeh.ig.psiutils.ControlFlowUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.List; import static com.intellij.util.ObjectUtils.tryCast; public class SplitConditionUtil { public static PsiPolyadicExpression findCondition(PsiElement element) { return findCondition(element, true, true); } public static PsiPolyadicExpression findCondition(PsiElement element, boolean acceptAnd, boolean acceptOr) { if (!(element instanceof PsiJavaToken)) { return null; } PsiJavaToken token = (PsiJavaToken)element; if (!(token.getParent() instanceof PsiPolyadicExpression)) return null; PsiPolyadicExpression expression = (PsiPolyadicExpression)token.getParent(); boolean isAndExpression = acceptAnd && expression.getOperationTokenType() == JavaTokenType.ANDAND; boolean isOrExpression = acceptOr && expression.getOperationTokenType() == JavaTokenType.OROR; if (!isAndExpression && !isOrExpression) return null; while (expression.getParent() instanceof PsiPolyadicExpression) { expression = (PsiPolyadicExpression)expression.getParent(); if (isAndExpression && expression.getOperationTokenType() != JavaTokenType.ANDAND) return null; if (isOrExpression && expression.getOperationTokenType() != JavaTokenType.OROR) return null; } return expression; } public static PsiExpression getROperands(PsiPolyadicExpression expression, PsiJavaToken separator) { return getROperands(expression, separator, new CommentTracker()); } public static PsiExpression getROperands(PsiPolyadicExpression expression, PsiJavaToken separator, CommentTracker ct) { PsiElement next = PsiTreeUtil.skipWhitespacesAndCommentsForward(separator); final int offsetInParent; if (next == null) { offsetInParent = separator.getStartOffsetInParent() + separator.getTextLength(); } else { ct.markRangeUnchanged(next, expression.getLastChild()); offsetInParent = next.getStartOffsetInParent(); } PsiElementFactory factory = JavaPsiFacade.getInstance(expression.getProject()).getElementFactory(); String rOperands = expression.getText().substring(offsetInParent); return factory.createExpressionFromText(rOperands, expression.getParent()); } public static PsiExpression getLOperands(PsiPolyadicExpression expression, PsiJavaToken separator) { return getLOperands(expression, separator, new CommentTracker()); } public static PsiExpression getLOperands(PsiPolyadicExpression expression, PsiJavaToken separator, CommentTracker ct) { PsiElement prev = separator; if (prev.getPrevSibling() instanceof PsiWhiteSpace) prev = prev.getPrevSibling(); ct.markRangeUnchanged(expression.getFirstChild(), prev.getPrevSibling()); PsiElementFactory factory = JavaPsiFacade.getInstance(expression.getProject()).getElementFactory(); String rOperands = expression.getText().substring(0, prev.getStartOffsetInParent()); return factory.createExpressionFromText(rOperands, expression.getParent()); } @Nullable static PsiIfStatement create(@NotNull PsiElementFactory factory, @NotNull PsiIfStatement ifStatement, @NotNull PsiExpression extract, @NotNull PsiExpression leave, @NotNull IElementType operation, CommentTracker tracker) { PsiStatement thenBranch = ifStatement.getThenBranch(); if (thenBranch == null) { return null; } PsiStatement elseBranch = ifStatement.getElseBranch(); if (operation == JavaTokenType.OROR) { return createOrOr(factory, thenBranch, elseBranch, extract, leave, tracker); } if (operation == JavaTokenType.ANDAND) { return createAndAnd(factory, thenBranch, elseBranch, extract, leave, tracker); } return null; } @NotNull private static PsiIfStatement createAndAnd(@NotNull PsiElementFactory factory, @NotNull PsiStatement thenBranch, @Nullable PsiStatement elseBranch, @NotNull PsiExpression extract, @NotNull PsiExpression leave, CommentTracker tracker) { List<String> elseChain = new ArrayList<>(); boolean chainFinished = false; while (!chainFinished) { PsiIfStatement nextIf = tryCast(ControlFlowUtils.stripBraces(elseBranch), PsiIfStatement.class); if (nextIf == null) break; PsiExpression nextCondition = PsiUtil.skipParenthesizedExprDown(nextIf.getCondition()); if (nextCondition == null) break; if (PsiEquivalenceUtil.areElementsEquivalent(extract, nextCondition) && nextIf.getThenBranch() != null) { elseChain.add(tracker.text(nextIf.getThenBranch())); chainFinished = true; } else { if (!(nextCondition instanceof PsiPolyadicExpression)) break; PsiPolyadicExpression nextPolyadic = (PsiPolyadicExpression)nextCondition; if (!nextPolyadic.getOperationTokenType().equals(JavaTokenType.ANDAND)) break; PsiExpression[] nextOperands = nextPolyadic.getOperands(); PsiExpression[] operands; if (extract instanceof PsiPolyadicExpression && ((PsiPolyadicExpression)extract).getOperationTokenType().equals(JavaTokenType.ANDAND)) { operands = ((PsiPolyadicExpression)extract).getOperands(); } else { operands = new PsiExpression[]{extract}; } if (nextOperands.length <= operands.length) break; for (int i = 0; i < operands.length; i++) { if (!PsiEquivalenceUtil.areElementsEquivalent(nextOperands[i], operands[i])) break; } PsiExpression nextExtracted = getROperands(nextPolyadic, nextPolyadic.getTokenBeforeOperand(nextOperands[operands.length]), tracker); elseChain.add(createIfString(nextExtracted, nextIf.getThenBranch(), (PsiStatement)null, tracker)); } elseBranch = nextIf.getElseBranch(); } if (!chainFinished && elseBranch != null) { elseChain.add(elseBranch.getText()); } String thenString; if (elseChain.isEmpty()) { thenString = createIfString(leave, thenBranch, (String)null, tracker); } else { thenString = "{" + createIfString(leave, thenBranch, String.join("\nelse ", elseChain), tracker) + "\n}"; } String ifString = createIfString(extract, thenString, elseBranch, tracker); return (PsiIfStatement)factory.createStatementFromText(ifString, thenBranch); } @NotNull private static PsiIfStatement createOrOr(@NotNull PsiElementFactory factory, @NotNull PsiStatement thenBranch, @Nullable PsiStatement elseBranch, @NotNull PsiExpression extract, @NotNull PsiExpression leave, CommentTracker tracker) { return (PsiIfStatement)factory.createStatementFromText( createIfString(extract, thenBranch, createIfString(leave, thenBranch, elseBranch, tracker), tracker), thenBranch); } @NotNull private static String createIfString(@NotNull PsiExpression condition, @NotNull PsiStatement thenBranch, @Nullable PsiStatement elseBranch, CommentTracker tracker) { PsiExpression stripped = PsiUtil.skipParenthesizedExprDown(condition); return createIfString(tracker.text(stripped == null ? condition : stripped), toThenBranchString(tracker.markUnchanged(thenBranch)), toElseBranchString(elseBranch != null ? tracker.markUnchanged(elseBranch) : null, false)); } @NotNull private static String createIfString(@NotNull PsiExpression condition, @NotNull PsiStatement thenBranch, @Nullable String elseBranch, CommentTracker tracker) { PsiExpression stripped = PsiUtil.skipParenthesizedExprDown(condition); return createIfString(tracker.text(stripped == null ? condition : stripped), toThenBranchString(tracker.markUnchanged(thenBranch)), elseBranch); } @NotNull private static String createIfString(@NotNull PsiExpression condition, @NotNull String thenBranch, @Nullable PsiStatement elseBranch, CommentTracker tracker) { PsiExpression stripped = PsiUtil.skipParenthesizedExprDown(condition); return createIfString(tracker.text(stripped == null ? condition : stripped), thenBranch, toElseBranchString(elseBranch != null ? tracker.markUnchanged(elseBranch) : null, true)); } @NotNull private static String createIfString(@NotNull String condition, @NotNull String thenBranch, @Nullable String elseBranch) { final String elsePart = elseBranch != null ? "\n else " + elseBranch : ""; return "if (" + condition + ")\n" + thenBranch + elsePart; } @NotNull private static String toThenBranchString(@NotNull PsiStatement statement) { if (!(statement instanceof PsiBlockStatement)) { return "{ " + statement.getText() + "\n }"; } return statement.getText(); } @Nullable private static String toElseBranchString(@Nullable PsiStatement statement, boolean skipElse) { if (statement == null) { return null; } if (statement instanceof PsiBlockStatement || skipElse && statement instanceof PsiIfStatement) { return statement.getText(); } return "{ " + statement.getText() + "\n }"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.sql.Array; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import java.util.Properties; import org.apache.phoenix.schema.types.PhoenixArray; import org.apache.phoenix.util.PropertiesUtil; import org.apache.phoenix.util.SchemaUtil; import org.apache.phoenix.util.StringUtil; import org.junit.Test; import org.junit.experimental.categories.Category; @Category(ParallelStatsDisabledTest.class) public class Array2IT extends ArrayIT { private static final String TEST_QUERY = "select ?[2] from \"SYSTEM\".\"CATALOG\" limit 1"; @Test public void testFixedWidthCharArray() throws Exception { Connection conn; PreparedStatement stmt; ResultSet rs; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); conn = DriverManager.getConnection(getUrl(), props); String table = generateUniqueName(); conn.createStatement().execute("CREATE TABLE " + table + " ( k VARCHAR PRIMARY KEY, a CHAR(5) ARRAY)"); conn.close(); conn = DriverManager.getConnection(getUrl(), props); rs = conn.getMetaData().getColumns(null, null, table, "A"); assertTrue(rs.next()); assertEquals(5, rs.getInt("COLUMN_SIZE")); conn.close(); conn = DriverManager.getConnection(getUrl(), props); stmt = conn.prepareStatement("UPSERT INTO " + table + " VALUES(?,?)"); stmt.setString(1, "a"); String[] s = new String[] {"1","2"}; Array array = conn.createArrayOf("CHAR", s); stmt.setArray(2, array); stmt.execute(); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); rs = conn.createStatement().executeQuery("SELECT k, a[2] FROM " + table); assertTrue(rs.next()); assertEquals("a",rs.getString(1)); assertEquals("2",rs.getString(2)); conn.close(); } @Test public void testSelectArrayUsingUpsertLikeSyntax() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, false, getUrl()); String query = "SELECT a_double_array FROM " + table + " WHERE a_double_array = CAST(ARRAY [ 25.343, 36.763, 37.56,386.63] AS DOUBLE ARRAY)"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); Double[] doubleArr = new Double[4]; doubleArr[0] = 25.343; doubleArr[1] = 36.763; doubleArr[2] = 37.56; doubleArr[3] = 386.63; Array array = conn.createArrayOf("DOUBLE", doubleArr); PhoenixArray resultArray = (PhoenixArray) rs.getArray(1); assertEquals(resultArray, array); assertEquals("[25.343, 36.763, 37.56, 386.63]", rs.getString(1)); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testArrayIndexUsedInWhereClause() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, false, getUrl()); int a_index = 0; String query = "SELECT a_double_array[2] FROM " + table + " where a_double_array["+a_index+"2]<?"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); Double[] doubleArr = new Double[1]; doubleArr[0] = 40.0; conn.createArrayOf("DOUBLE", doubleArr); statement.setDouble(1, 40.0d); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); // Need to support primitive doubleArr = new Double[1]; doubleArr[0] = 36.763; Double result = rs.getDouble(1); assertEquals(doubleArr[0], result); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testArrayIndexUsedInGroupByClause() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, false, getUrl()); String query = "SELECT a_double_array[2] FROM " + table + " GROUP BY a_double_array[2]"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); Double[] doubleArr = new Double[1]; doubleArr[0] = 40.0; conn.createArrayOf("DOUBLE", doubleArr); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); doubleArr = new Double[1]; doubleArr[0] = 36.763; Double result = rs.getDouble(1); assertEquals(doubleArr[0], result); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testVariableLengthArrayWithNullValue() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, true, getUrl()); String query = "SELECT a_string_array[2] FROM " + table; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); String[] strArr = new String[1]; strArr[0] = "XYZWER"; String result = rs.getString(1); assertNull(result); } finally { conn.close(); } } @Test public void testSelectSpecificIndexOfAVariableArrayAlongWithAnotherColumn1() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, false, getUrl()); String query = "SELECT a_string_array[3],A_INTEGER FROM " + table; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); String[] strArr = new String[1]; strArr[0] = "XYZWER"; String result = rs.getString(1); assertEquals(strArr[0], result); int a_integer = rs.getInt(2); assertEquals(1, a_integer); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testSelectSpecificIndexOfAVariableArrayAlongWithAnotherColumn2() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, false, getUrl()); String query = "SELECT A_INTEGER, a_string_array[3] FROM " + table; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); String[] strArr = new String[1]; strArr[0] = "XYZWER"; int a_integer = rs.getInt(1); assertEquals(1, a_integer); String result = rs.getString(2); assertEquals(strArr[0], result); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testSelectMultipleArrayColumns() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, false, getUrl()); String query = "SELECT a_string_array[3], a_double_array[2] FROM " + table; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); String[] strArr = new String[1]; strArr[0] = "XYZWER"; Double[] doubleArr = new Double[1]; doubleArr[0] = 36.763d; Double a_double = rs.getDouble(2); assertEquals(doubleArr[0], a_double); String result = rs.getString(1); assertEquals(strArr[0], result); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testSelectSameArrayColumnMultipleTimesWithDifferentIndices() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, false, getUrl()); String query = "SELECT a_string_array[1], a_string_array[2], " + "a_string_array[3], a_double_array[1], a_double_array[2], a_double_array[3] " + "FROM " + table; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals("ABC", rs.getString(1)); assertEquals("CEDF", rs.getString(2)); assertEquals("XYZWER", rs.getString(3)); assertEquals(25.343, rs.getDouble(4), 0.0); assertEquals(36.763, rs.getDouble(5), 0.0); assertEquals(37.56, rs.getDouble(6), 0.0); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testSelectSameArrayColumnMultipleTimesWithSameIndices() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, false, getUrl()); String query = "SELECT a_string_array[3], a_string_array[3] FROM " + table; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); String[] strArr = new String[1]; strArr[0] = "XYZWER"; String result = rs.getString(1); assertEquals(strArr[0], result); result = rs.getString(2); assertEquals(strArr[0], result); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testSelectSpecificIndexOfAVariableArray() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, false, getUrl()); String query = "SELECT a_string_array[3] FROM " + table; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); String[] strArr = new String[1]; strArr[0] = "XYZWER"; String result = rs.getString(1); assertEquals(strArr[0], result); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testWithOutOfRangeIndex() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, false, getUrl()); String query = "SELECT a_double_array[100] FROM " + table; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); PhoenixArray resultArray = (PhoenixArray) rs.getArray(1); assertNull(resultArray); } finally { conn.close(); } } @Test public void testArrayLengthFunctionForVariableLength() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, false, getUrl()); String query = "SELECT ARRAY_LENGTH(a_string_array) FROM " + table; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); int result = rs.getInt(1); assertEquals(result, 4); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testArrayLengthFunctionForFixedLength() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); initTablesWithArrays(table, tenantId, null, false, getUrl()); String query = "SELECT ARRAY_LENGTH(a_double_array) FROM " + table; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); int result = rs.getInt(1); assertEquals(result, 4); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testArraySizeRoundtrip() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); try { ResultSet rs = conn.getMetaData().getColumns(null, null, StringUtil.escapeLike(table), StringUtil.escapeLike(SchemaUtil.normalizeIdentifier("x_long_array"))); assertTrue(rs.next()); assertEquals(5, rs.getInt("ARRAY_SIZE")); assertFalse(rs.next()); rs = conn.getMetaData().getColumns(null, null, StringUtil.escapeLike(table), StringUtil.escapeLike(SchemaUtil.normalizeIdentifier("a_string_array"))); assertTrue(rs.next()); assertEquals(3, rs.getInt("ARRAY_SIZE")); assertFalse(rs.next()); rs = conn.getMetaData().getColumns(null, null, StringUtil.escapeLike(table), StringUtil.escapeLike(SchemaUtil.normalizeIdentifier("a_double_array"))); assertTrue(rs.next()); assertEquals(0, rs.getInt("ARRAY_SIZE")); assertTrue(rs.wasNull()); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testVarLengthArrComparisonInWhereClauseWithSameArrays() throws Exception { Connection conn; PreparedStatement stmt; ResultSet rs; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); conn = DriverManager.getConnection(getUrl(), props); String table = generateUniqueName(); conn.createStatement() .execute( "CREATE TABLE " + table + " ( k VARCHAR PRIMARY KEY, a_string_array VARCHAR(100) ARRAY[4], b_string_array VARCHAR(100) ARRAY[4])"); conn.close(); conn = DriverManager.getConnection(getUrl(), props); stmt = conn.prepareStatement("UPSERT INTO " + table + " VALUES(?,?,?)"); stmt.setString(1, "a"); String[] s = new String[] {"abc","def", "ghi","jkl"}; Array array = conn.createArrayOf("VARCHAR", s); stmt.setArray(2, array); s = new String[] {"abc","def", "ghi","jkl"}; array = conn.createArrayOf("VARCHAR", s); stmt.setArray(3, array); stmt.execute(); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); rs = conn.createStatement().executeQuery("SELECT k, a_string_array[2] FROM " + table + " where a_string_array=b_string_array"); assertTrue(rs.next()); assertEquals("a",rs.getString(1)); assertEquals("def",rs.getString(2)); conn.close(); } @Test public void testVarLengthArrComparisonInWhereClauseWithDiffSizeArrays() throws Exception { Connection conn; PreparedStatement stmt; ResultSet rs; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); conn = DriverManager.getConnection(getUrl(), props); String table = generateUniqueName(); conn.createStatement() .execute( "CREATE TABLE " + table + " ( k VARCHAR PRIMARY KEY, a_string_array VARCHAR(100) ARRAY[4], b_string_array VARCHAR(100) ARRAY[4])"); conn.close(); conn = DriverManager.getConnection(getUrl(), props); stmt = conn.prepareStatement("UPSERT INTO " + table + " VALUES(?,?,?)"); stmt.setString(1, "a"); String[] s = new String[] { "abc", "def", "ghi", "jkll" }; Array array = conn.createArrayOf("VARCHAR", s); stmt.setArray(2, array); s = new String[] { "abc", "def", "ghi", "jklm" }; array = conn.createArrayOf("VARCHAR", s); stmt.setArray(3, array); stmt.execute(); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); rs = conn.createStatement().executeQuery( "SELECT k, a_string_array[2] FROM " + table + " where a_string_array<b_string_array"); assertTrue(rs.next()); assertEquals("a", rs.getString(1)); assertEquals("def", rs.getString(2)); conn.close(); } @Test public void testVarLengthArrComparisonWithNulls() throws Exception { Connection conn; PreparedStatement stmt; ResultSet rs; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); conn = DriverManager.getConnection(getUrl(), props); String table = generateUniqueName(); conn.createStatement() .execute( "CREATE TABLE " + table + " ( k VARCHAR PRIMARY KEY, a_string_array VARCHAR(100) ARRAY[4], b_string_array VARCHAR(100) ARRAY[4])"); conn.close(); conn = DriverManager.getConnection(getUrl(), props); stmt = conn.prepareStatement("UPSERT INTO " + table + " VALUES(?,?,?)"); stmt.setString(1, "a"); String[] s = new String[] { "abc", "def", "ghi", "jkll", null, null, "xxx" }; Array array = conn.createArrayOf("VARCHAR", s); stmt.setArray(2, array); s = new String[] { "abc", "def", "ghi", "jkll", null, null, null, "xxx" }; array = conn.createArrayOf("VARCHAR", s); stmt.setArray(3, array); stmt.execute(); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); rs = conn.createStatement().executeQuery( "SELECT k, a_string_array[2] FROM " + table + " where a_string_array>b_string_array"); assertTrue(rs.next()); assertEquals("a", rs.getString(1)); assertEquals("def", rs.getString(2)); conn.close(); } @Test public void testUpsertValuesWithNull() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); String query = "upsert into " + table + " (ORGANIZATION_ID,ENTITY_ID,a_double_array) values('" + tenantId + "','00A123122312312',null)"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); // at Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); int executeUpdate = statement.executeUpdate(); assertEquals(1, executeUpdate); conn.commit(); statement.close(); conn.close(); // create another connection props = PropertiesUtil.deepCopy(TEST_PROPERTIES); conn = DriverManager.getConnection(getUrl(), props); query = "SELECT ARRAY_ELEM(a_double_array,2) FROM " + table; statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); // Need to support primitive Double[] doubleArr = new Double[1]; doubleArr[0] = 0.0d; conn.createArrayOf("DOUBLE", doubleArr); Double result = rs.getDouble(1); assertEquals(doubleArr[0], result); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testUpsertValuesWithNullUsingPreparedStmt() throws Exception { String tenantId = getOrganizationId(); String table = createTableWithArray(getUrl(), getDefaultSplits(tenantId), null); String query = "upsert into " + table + " (ORGANIZATION_ID,ENTITY_ID,a_string_array) values(?, ?, ?)"; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); // at Connection conn = DriverManager.getConnection(getUrl(), props); try { PreparedStatement statement = conn.prepareStatement(query); statement.setString(1, tenantId); statement.setString(2, "00A123122312312"); statement.setNull(3, Types.ARRAY); int executeUpdate = statement.executeUpdate(); assertEquals(1, executeUpdate); conn.commit(); statement.close(); conn.close(); // create another connection props = PropertiesUtil.deepCopy(TEST_PROPERTIES); conn = DriverManager.getConnection(getUrl(), props); query = "SELECT ARRAY_ELEM(a_string_array,1) FROM " + table; statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); String[] strArr = new String[1]; strArr[0] = null; conn.createArrayOf("VARCHAR", strArr); String result = rs.getString(1); assertEquals(strArr[0], result); assertFalse(rs.next()); } finally { conn.close(); } } @Test public void testPKWithArray() throws Exception { Connection conn; PreparedStatement stmt; ResultSet rs; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); conn = DriverManager.getConnection(getUrl(), props); String table = generateUniqueName(); conn.createStatement() .execute( "CREATE TABLE " + table + " ( k VARCHAR, a_string_array VARCHAR(100) ARRAY[4], b_string_array VARCHAR(100) ARRAY[4] \n" + " CONSTRAINT pk PRIMARY KEY (k, b_string_array)) \n"); conn.close(); conn = DriverManager.getConnection(getUrl(), props); stmt = conn.prepareStatement("UPSERT INTO " + table + " VALUES(?,?,?)"); stmt.setString(1, "a"); String[] s = new String[] { "abc", "def", "ghi", "jkll", null, null, "xxx" }; Array array = conn.createArrayOf("VARCHAR", s); stmt.setArray(2, array); s = new String[] { "abc", "def", "ghi", "jkll", null, null, null, "xxx" }; array = conn.createArrayOf("VARCHAR", s); stmt.setArray(3, array); stmt.execute(); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); rs = conn.createStatement().executeQuery( "SELECT k, a_string_array[2] FROM " + table + " where b_string_array[8]='xxx'"); assertTrue(rs.next()); assertEquals("a", rs.getString(1)); assertEquals("def", rs.getString(2)); conn.close(); } @Test public void testPKWithArrayNotInEnd() throws Exception { Connection conn; Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); conn = DriverManager.getConnection(getUrl(), props); String table = generateUniqueName(); try { conn.createStatement().execute( "CREATE TABLE " + table + " ( a_string_array VARCHAR(100) ARRAY[4], b_string_array VARCHAR(100) ARRAY[4], k VARCHAR \n" + " CONSTRAINT pk PRIMARY KEY (b_string_array, k))"); conn.close(); fail(); } catch (SQLException e) { } finally { if (conn != null) { conn.close(); } } } @Test public void testArrayRefToLiteralCharArraySameLengths() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); try (Connection conn = DriverManager.getConnection(getUrl(), props)) { PreparedStatement stmt = conn.prepareStatement(TEST_QUERY); // Test with each element of the char array having same lengths Array array = conn.createArrayOf("CHAR", new String[] {"a","b","c"}); stmt.setArray(1, array); ResultSet rs = stmt.executeQuery(); assertTrue(rs.next()); assertEquals("b", rs.getString(1)); assertFalse(rs.next()); } } @Test public void testArrayRefToLiteralCharArrayDiffLengths() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); try (Connection conn = DriverManager.getConnection(getUrl(), props)) { PreparedStatement stmt = conn.prepareStatement(TEST_QUERY); // Test with each element of the char array having different lengths Array array = conn.createArrayOf("CHAR", new String[] {"a","bb","ccc"}); stmt.setArray(1, array); ResultSet rs = stmt.executeQuery(); assertTrue(rs.next()); assertEquals("bb", rs.getString(1)); assertFalse(rs.next()); } } @Test public void testArrayRefToLiteralBinaryArray() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); try (Connection conn = DriverManager.getConnection(getUrl(), props)) { PreparedStatement stmt = conn.prepareStatement(TEST_QUERY); // Test with each element of the binary array having different lengths byte[][] bytes = {{0,0,1}, {0,0,2,0}, {0,0,0,3,4}}; Array array = conn.createArrayOf("BINARY", bytes); stmt.setArray(1, array); ResultSet rs = stmt.executeQuery(); assertTrue(rs.next()); // Note that all elements are padded to be of the same length // as the longest element of the byte array assertArrayEquals(new byte[] {0,0,2,0,0}, rs.getBytes(1)); assertFalse(rs.next()); } } @Test public void testArrayConstructorWithMultipleRows1() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); String table = generateUniqueName(); String ddl = "CREATE TABLE " + table + " (region_name VARCHAR PRIMARY KEY, a INTEGER, b INTEGER)"; conn.createStatement().execute(ddl); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); PreparedStatement stmt = conn.prepareStatement("UPSERT INTO " + table + " (region_name, a, b) VALUES('a', 6,3)"); stmt.execute(); stmt = conn.prepareStatement("UPSERT INTO " + table + " (region_name, a, b) VALUES('b', 2,4)"); stmt.execute(); stmt = conn.prepareStatement("UPSERT INTO " + table + " (region_name, a, b) VALUES('c', 6,3)"); stmt.execute(); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT COUNT(DISTINCT ARRAY[a,b]) from " + table); assertTrue(rs.next()); assertEquals(2, rs.getInt(1)); } @Test public void testArrayConstructorWithMultipleRows2() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); String table = generateUniqueName(); String ddl = "CREATE TABLE " + table + " (region_name VARCHAR PRIMARY KEY, a INTEGER, b INTEGER)"; conn.createStatement().execute(ddl); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); PreparedStatement stmt = conn.prepareStatement("UPSERT INTO " + table + " (region_name, a, b) VALUES('a', 6,3)"); stmt.execute(); stmt = conn.prepareStatement("UPSERT INTO " + table + " (region_name, a, b) VALUES('b', 2,4)"); stmt.execute(); stmt = conn.prepareStatement("UPSERT INTO " + table + " (region_name, a, b) VALUES('c', 6,3)"); stmt.execute(); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY[a,b] from " + table + " "); assertTrue(rs.next()); Array arr = conn.createArrayOf("INTEGER", new Object[]{6, 3}); assertEquals(arr, rs.getArray(1)); rs.next(); arr = conn.createArrayOf("INTEGER", new Object[]{2, 4}); assertEquals(arr, rs.getArray(1)); rs.next(); arr = conn.createArrayOf("INTEGER", new Object[]{6, 3}); assertEquals(arr, rs.getArray(1)); rs.next(); } @Test public void testArrayConstructorWithMultipleRows3() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); String table = generateUniqueName(); String ddl = "CREATE TABLE " + table + " (region_name VARCHAR PRIMARY KEY, a VARCHAR, b VARCHAR)"; conn.createStatement().execute(ddl); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); PreparedStatement stmt = conn.prepareStatement("UPSERT INTO " + table + " (region_name, a, b) VALUES('a', 'foo', 'abc')"); stmt.execute(); stmt = conn.prepareStatement("UPSERT INTO " + table + " (region_name, a, b) VALUES('b', 'abc', 'dfg')"); stmt.execute(); stmt = conn.prepareStatement("UPSERT INTO " + table + " (region_name, a, b) VALUES('c', 'foo', 'abc')"); stmt.execute(); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY[a,b] from " + table + " "); assertTrue(rs.next()); Array arr = conn.createArrayOf("VARCHAR", new Object[]{"foo", "abc"}); assertEquals(arr, rs.getArray(1)); rs.next(); arr = conn.createArrayOf("VARCHAR", new Object[]{"abc", "dfg"}); assertEquals(arr, rs.getArray(1)); rs.next(); arr = conn.createArrayOf("VARCHAR", new Object[]{"foo", "abc"}); assertEquals(arr, rs.getArray(1)); rs.next(); } @Test public void testArrayConstructorWithMultipleRows4() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); String table = generateUniqueName(); String ddl = "CREATE TABLE " + table + " (region_name VARCHAR PRIMARY KEY, a VARCHAR, b VARCHAR)"; conn.createStatement().execute(ddl); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); PreparedStatement stmt = conn.prepareStatement("UPSERT INTO " + table + " (region_name, a, b) VALUES('a', 'foo', 'abc')"); stmt.execute(); stmt = conn.prepareStatement("UPSERT INTO " + table + " (region_name, a, b) VALUES('b', 'abc', 'dfg')"); stmt.execute(); stmt = conn.prepareStatement("UPSERT INTO " + table + " (region_name, a, b) VALUES('c', 'foo', 'abc')"); stmt.execute(); conn.commit(); conn.close(); conn = DriverManager.getConnection(getUrl(), props); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT COUNT(DISTINCT ARRAY[a,b]) from " + table); assertTrue(rs.next()); assertEquals(2, rs.getInt(1)); } }
/* * Copyright (c) 2008-2015 JPerf * All rights reserved. http://www.jperf.net * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.jperf.chart; import net.jperf.TimingStatistics; import net.jperf.helpers.StatsValueRetriever; import net.jperf.GroupedTimingStatistics; import java.util.*; import java.net.URLEncoder; import java.io.UnsupportedEncodingException; import java.text.DecimalFormat; import java.text.SimpleDateFormat; import java.text.DecimalFormatSymbols; /** * This implementation of StatisticsChartGenerator creates a chart URL in the format expected by the Google Chart API. * * @see <a href="http://code.google.com/apis/chart/">Google Chart API</a> * @author Alex Devine */ public class GoogleChartGenerator implements StatisticsChartGenerator { /** * The DEFAULT_BASE_URL points to Google's charting server at chart.apis.google.com. */ public static final String DEFAULT_BASE_URL = "http://chart.apis.google.com/chart?"; /** * The maximum supported chart size is 300,000 pixels per the Google Chart API. */ public static final int MAX_POSSIBLE_CHART_SIZE = 300000; /** * The default chart width is 750 pixels. */ public static final int DEFAULT_CHART_WIDTH = 750; /** * The default chart height is 400 pixels. */ public static final int DEFAULT_CHART_HEIGHT = 400; /** * The default hex color codes used for the individual data series displayed on the chart. */ public static final String[] DEFAULT_SERIES_COLORS = { "ff0000", //red "00ff00", //green "0000ff", //blue "00ffff", //cyan "ff00ff", //magenta "ffff00", //yellow "000000", //black "d2b48c", //tan "ffa500", //orange "a020f0" //purple }; private StatsValueRetriever valueRetriever; private String baseUrl; private LinkedList<GroupedTimingStatistics> data = new LinkedList<GroupedTimingStatistics>(); private int width = DEFAULT_CHART_WIDTH; private int height = DEFAULT_CHART_HEIGHT; private int maxDataPoints = DEFAULT_MAX_DATA_POINTS; private Set<String> enabledTags = null; // --- Constructors --- /** * Default constructor creates a chart that displays mean execution values and uses the default Google Chart URL. */ public GoogleChartGenerator() { this(StatsValueRetriever.MEAN_VALUE_RETRIEVER, DEFAULT_BASE_URL); } /** * Creates a chart that uses the specified StatsValueRetriever to determine which values from the * TimingStatistic object to display. For example, a chart could be used to display mean values, transactions * per second, etc. * * @param statsValueRetriever The StatsPerTagDataValueExtractor that determines which value to display. */ public GoogleChartGenerator(StatsValueRetriever statsValueRetriever) { this(statsValueRetriever, DEFAULT_BASE_URL); } /** * Creates a chart that uses the specified StatsValueRetriever to determine which values from the * StatsPerTag object to display, and also allows the base chart URL to be overridden from the Google default. * * @param valueRetriever Determines which value (such as mean/min/max/etc) from the TimingStatistic to display on * the chart * @param baseUrl A value to override for the default base URL of "http://chart.apis.google.com/chart?" */ public GoogleChartGenerator(StatsValueRetriever valueRetriever, String baseUrl) { this.valueRetriever = valueRetriever; this.baseUrl = baseUrl; } // --- Bean properties --- /** * Gets the width of the chart that will be displayed * * @return The width of the chart in pixels, defaults to 750. */ public int getWidth() { return width; } /** * Sets the width of the chart in pixels. Note that the Google Charting API currently only supports a maximum * of 300,000 pixels for display, so width X height must be less than 300,000. * * @param width the width of the chart in pixels. */ public void setWidth(int width) { this.width = width; } /** * Gets the height of the chart that will be displayed * * @return The height of the chart in pixels, defaults to 400. */ public int getHeight() { return height; } /** * Sets the height of the chart in pixels. Note that the Google Charting API currently only supports a maximum * of 300,000 pixels for display, so width X height must be less than 300,000. * * @param height the height of the chart in pixels. */ public void setHeight(int height) { this.height = height; } /** * Gets the set of tag names for which values will be displayed on the chart. Each tag is represented as a * separate series on the chart. * * @return The set of enabled tag names, or null if ALL tags found in the GroupedTimingStatistics data will be * displayed. */ public Set<String> getEnabledTags() { return enabledTags; } /** * Sets the set of tag names for which values will be displayed on the chart. * * @param enabledTags The set of enabled tag names. If this method is not called, or if enabledTags is null, * then ALL tags from the GroupedTimingStatistics data will be displayed on the chart. */ public void setEnabledTags(Set<String> enabledTags) { this.enabledTags = enabledTags; } /** * Gets the maximum number of data points to display on a chart. If <tt>appendData</tt> is called more than * this number of times, then only the last maxDataPoints data items will be shown in any generated charts. * * @return the maximum number of data points that will be displayed */ public int getMaxDataPoints() { return maxDataPoints; } /** * Sets the maximum number of data points to display on a chart. * * @param maxDataPoints The maximum number of data points. */ public void setMaxDataPoints(int maxDataPoints) { this.maxDataPoints = maxDataPoints; } // --- Data methods --- public List<GroupedTimingStatistics> getData() { return Collections.unmodifiableList(this.data); } public synchronized void appendData(GroupedTimingStatistics statistics) { if (this.data.size() >= this.maxDataPoints) { this.data.removeFirst(); } this.data.add(statistics); } public synchronized String getChartUrl() { if (width * height > MAX_POSSIBLE_CHART_SIZE || width * height <= 0) { throw new IllegalArgumentException("The chart size must be between 0 and " + MAX_POSSIBLE_CHART_SIZE + " pixels. Current size is " + width + " x " + height); } StringBuilder retVal = new StringBuilder(baseUrl); //we use an x/y chart retVal.append("cht=lxy"); //set the size and title retVal.append("&chtt=").append(encodeUrl(valueRetriever.getValueName())); retVal.append("&chs=").append(width).append("x").append(height); //specify the axes that will have labels retVal.append("&chxt=x,x,y"); //convert the data to google chart params retVal.append(generateGoogleChartParams()); return retVal.toString(); } // --- helper methods --- /** * Helper method takes the list of data values and converts them to a String suitable for appending to a Google * Chart URL. * * @return the chart parameters that encode all of the data necessary to display the chart. */ @SuppressWarnings("unchecked") protected String generateGoogleChartParams() { long minTimeValue = Long.MAX_VALUE; long maxTimeValue = Long.MIN_VALUE; double maxDataValue = Double.MIN_VALUE; //this map stores all the data series. The key is the tag name (each tag represents a single series) and the //value contains two lists of numbers - the first list contains the X values for each point (which is time in //milliseconds) and the second list contains the y values, which are the data values pulled from dataWindows. Map<String, List<Number>[]> tagsToXDataAndYData = new TreeMap<String, List<Number>[]>(); for (GroupedTimingStatistics groupedTimingStatistics : data) { Map<String, TimingStatistics> statsByTag = groupedTimingStatistics.getStatisticsByTag(); long windowStartTime = groupedTimingStatistics.getStartTime(); long windowLength = groupedTimingStatistics.getWindowLength(); //keep track of the min/max time value, this is needed for scaling the chart parameters minTimeValue = Math.min(minTimeValue, windowStartTime); maxTimeValue = Math.max(maxTimeValue, windowStartTime); for (Map.Entry<String, TimingStatistics> tagWithData : statsByTag.entrySet()) { String tag = tagWithData.getKey(); if (this.enabledTags == null || this.enabledTags.contains(tag)) { //get the corresponding value from tagsToXDataAndYData List<Number>[] xAndYData = tagsToXDataAndYData.get(tagWithData.getKey()); if (xAndYData == null) { tagsToXDataAndYData.put(tag, xAndYData = new List[]{new ArrayList<Number>(), new ArrayList<Number>()}); } //the x data is the start time of the window, the y data is the value Number yValue = this.valueRetriever.getStatsValue(tagWithData.getValue(), windowLength); xAndYData[0].add(windowStartTime); xAndYData[1].add(yValue); //update the max data value, which is needed for scaling maxDataValue = Math.max(maxDataValue, yValue.doubleValue()); } } } //if it's empty, there's nothing to display if (tagsToXDataAndYData.isEmpty()) { return ""; } //set up the axis labels - we use the US decimal format locale to ensure the decimal separator is . and not , DecimalFormat decimalFormat = new DecimalFormat("##0.0", new DecimalFormatSymbols(Locale.US)); SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss"); dateFormat.setTimeZone(GroupedTimingStatistics.getTimeZone()); //the y-axis label goes from 0 to the maximum data value String axisRangeParam = "&chxr=2,0," + decimalFormat.format(maxDataValue); //for the x-axis (time) labels, ideally we want one label for each data window, but support a maximum of 10 //labels so the chart doesn't get too crowded int stepSize = this.data.size() / 10 + 1; StringBuilder timeAxisLabels = new StringBuilder("&chxl=0:"); StringBuilder timeAxisLabelPositions = new StringBuilder("&chxp=0"); for (Iterator<GroupedTimingStatistics> iter = data.iterator(); iter.hasNext();) { GroupedTimingStatistics groupedTimingStatistics = iter.next(); long windowStartTime = groupedTimingStatistics.getStartTime(); String label = dateFormat.format(new Date(windowStartTime)); double position = 100.0 * (windowStartTime - minTimeValue) / (maxTimeValue - minTimeValue); timeAxisLabels.append("|").append(label); timeAxisLabelPositions.append(",").append(decimalFormat.format(position)); //skip over some windows if stepSize is greater than 1 for (int i = 1; i < stepSize && iter.hasNext(); i++) { iter.next(); } } //this next line appends a "Time" label in the middle of the bottom of the X axis timeAxisLabels.append("|1:|Time"); timeAxisLabelPositions.append("|1,50"); //display the gridlines double xAxisGridlineStepSize = this.data.size() > 2 ? 100.0 / (this.data.size() - 1) : 50.0; String gridlinesParam = "&chg=" + decimalFormat.format(xAxisGridlineStepSize) + ",10"; //at this point we should be able to normalize the data to 0 - 100 as required by the google chart API StringBuilder chartDataParam = new StringBuilder("&chd=t:"); StringBuilder chartColorsParam = new StringBuilder("&chco="); StringBuilder chartShapeMarkerParam = new StringBuilder("&chm="); StringBuilder chartLegendParam = new StringBuilder("&chdl="); //this loop is run once for each tag, i.e. each data series to be displayed on the chart int i = 0; for (Iterator<Map.Entry<String, List<Number>[]>> iter = tagsToXDataAndYData.entrySet().iterator(); iter.hasNext(); i++) { Map.Entry<String, List<Number>[]> tagWithXAndYData = iter.next(); //data param List<Number> xValues = tagWithXAndYData.getValue()[0]; chartDataParam.append(numberValuesToGoogleDataSeriesParam(xValues, minTimeValue, maxTimeValue)); chartDataParam.append("|"); List<Number> yValues = tagWithXAndYData.getValue()[1]; chartDataParam.append(numberValuesToGoogleDataSeriesParam(yValues, 0, maxDataValue)); //color param String color = DEFAULT_SERIES_COLORS[i % DEFAULT_SERIES_COLORS.length]; chartColorsParam.append(color); //the shape marker param puts a diamond (the d) at each data point (the -1) of size 5 pixels. chartShapeMarkerParam.append("d,").append(color).append(",").append(i).append(",-1,5.0"); //legend param chartLegendParam.append(tagWithXAndYData.getKey()); if (iter.hasNext()) { chartDataParam.append("|"); chartColorsParam.append(","); chartShapeMarkerParam.append("|"); chartLegendParam.append("|"); } } return chartDataParam.toString() + chartColorsParam + chartShapeMarkerParam + chartLegendParam + axisRangeParam + timeAxisLabels + timeAxisLabelPositions + gridlinesParam; } /** * This helper method is used to normalize a list of data values from 0 - 100 as required by the Google Chart * Data API, and from this data it constructs the series data URL param. * * @param values the values to be normalized * @param minPossibleValue the minimum possible value for the values * @param maxPossibleValue the maximmum possible value for the values * @return A Google Chart API data series using normal text encoding (see the Chart API docs) */ protected String numberValuesToGoogleDataSeriesParam(List<Number> values, double minPossibleValue, double maxPossibleValue) { StringBuilder retVal = new StringBuilder(); double valueRange = maxPossibleValue - minPossibleValue; DecimalFormat formatter = new DecimalFormat("##0.0", new DecimalFormatSymbols(Locale.US)); for (Iterator<Number> iter = values.iterator(); iter.hasNext();) { Number value = iter.next(); double normalizedNumber = 100.0 * (value.doubleValue() - minPossibleValue) / valueRange; retVal.append(formatter.format(normalizedNumber)); if (iter.hasNext()) { retVal.append(","); } } return retVal.toString(); } /** * Helper method encodes a string use as a URL parameter value. * * @param string the string to encode * @return the encoded string */ protected String encodeUrl(String string) { try { return URLEncoder.encode(string, "UTF-8"); } catch (UnsupportedEncodingException uee) { //can't happen; return string; } } }
/* * Copyright 2003 - 2018 The eFaps Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.efaps.esjp.admin.access; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.UUID; import org.efaps.admin.access.AccessType; import org.efaps.admin.access.AccessTypeEnums; import org.efaps.admin.datamodel.Attribute; import org.efaps.admin.datamodel.Type; import org.efaps.admin.event.Parameter; import org.efaps.admin.event.Parameter.ParameterValues; import org.efaps.admin.program.esjp.EFapsApplication; import org.efaps.admin.program.esjp.EFapsUUID; import org.efaps.db.Context; import org.efaps.db.Instance; import org.efaps.db.transaction.ConnectionResource; import org.efaps.util.EFapsException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author The eFaps Team */ @EFapsUUID("ef7dcf84-99ec-4caf-8422-513bfb7eab39") @EFapsApplication("eFaps-Kernel") public abstract class Linked2ObjectAccessCheck_Base extends AbstractAccessCheck { /** * Logging instance used in this class. */ private static final Logger LOG = LoggerFactory.getLogger(Linked2ObjectAccessCheck.class); /** * {@inheritDoc} */ @Override protected boolean checkAccess(final Parameter _parameter, final Instance _instance, final AccessType _accessType) throws EFapsException { boolean ret = false; // for create the accesscheck is send directly to SimpleAccessCheckOnType if (AccessTypeEnums.CREATE.getAccessType().equals(_accessType)) { final SimpleAccessCheckOnType accessCheck = new SimpleAccessCheckOnType(); ret = accessCheck.checkAccess(_parameter, _instance, _accessType); } else { final Map<?, ?> properties = (Map<?, ?>) _parameter.get(ParameterValues.PROPERTIES); final String middleTypeUUID = (String) properties.get("MiddleTypeUUID"); final String fromAttributeName = (String) properties.get("FromAttribute"); final String toAttributeName = (String) properties.get("ToAttribute"); final Type type = Type.get(UUID.fromString(middleTypeUUID)); final Attribute fromAttribute = type.getAttribute(fromAttributeName); final Attribute toAttribute = type.getAttribute(toAttributeName); final StringBuilder cmd = new StringBuilder(); cmd.append("select ").append(toAttribute.getSqlColNames().get(0)) .append(" from ").append(type.getMainTable().getSqlTable()) .append(" where ").append(fromAttribute.getSqlColNames().get(0)).append("=").append(_instance.getId()); if (type.getMainTable().getSqlColType() != null) { cmd.append(" and ").append(type.getMainTable().getSqlColType()).append("=").append(type.getId()); } long id = 0; try { final ConnectionResource con = Context.getThreadContext().getConnectionResource(); Linked2ObjectAccessCheck_Base.LOG.debug("Checking access with: {}", cmd); Statement stmt = null; try { stmt = con.createStatement(); final ResultSet rs = stmt.executeQuery(cmd.toString()); if (rs.next()) { id = rs.getLong(1); } rs.close(); } finally { if (stmt != null) { stmt.close(); } } } catch (final SQLException e) { Linked2ObjectAccessCheck_Base.LOG.error("sql statement '" + cmd.toString() + "' not executable!", e); } if (id > 0) { final Instance instance = Instance.get(toAttribute.getLink(), id); if (instance.getType().hasAccess(instance, AccessTypeEnums.READ.getAccessType())) { final SimpleAccessCheckOnType accessCheck = new SimpleAccessCheckOnType(); ret = accessCheck.checkAccess(_parameter, _instance, _accessType); } } } return ret; } /** * {@inheritDoc} */ @Override protected Map<Instance, Boolean> checkAccess(final Parameter _parameter, final Collection<Instance> _instances, final AccessType _accessType) throws EFapsException { final Map<Instance, Boolean> ret = new HashMap<>(); final Map<?, ?> properties = (Map<?, ?>) _parameter.get(ParameterValues.PROPERTIES); final String middleTypeUUID = (String) properties.get("MiddleTypeUUID"); final String fromAttributeName = (String) properties.get("FromAttribute"); final String toAttributeName = (String) properties.get("ToAttribute"); final Type type = Type.get(UUID.fromString(middleTypeUUID)); final Attribute fromAttribute = type.getAttribute(fromAttributeName); final Attribute toAttribute = type.getAttribute(toAttributeName); final StringBuilder cmd = new StringBuilder(); cmd.append("select ").append(fromAttribute.getSqlColNames().get(0)) .append(", ").append(toAttribute.getSqlColNames().get(0)) .append(" from ").append(type.getMainTable().getSqlTable()) .append(" where "); if (type.getMainTable().getSqlColType() != null) { cmd.append(type.getMainTable().getSqlColType()).append("=").append(type.getId()).append(" and "); } cmd.append(fromAttribute.getSqlColNames().get(0)).append(" in ("); for (final Object instObj : _instances) { cmd.append(((Instance) instObj).getId()).append(","); } cmd.append("0)"); final Map<Long, Set<Long>> relMap = new HashMap<>(); try { final ConnectionResource con = Context.getThreadContext().getConnectionResource(); Linked2ObjectAccessCheck_Base.LOG.debug("Checking access with: {}", cmd); Statement stmt = null; try { stmt = con.createStatement(); final ResultSet rs = stmt.executeQuery(cmd.toString()); while (rs.next()) { final long fromId = rs.getLong(1); final long toId = rs.getLong(2); final Set<Long> froms; if (relMap.containsKey(toId)) { froms = relMap.get(toId); } else { froms = new HashSet<>(); relMap.put(toId, froms); } froms.add(fromId); } rs.close(); } finally { if (stmt != null) { stmt.close(); } } } catch (final SQLException e) { Linked2ObjectAccessCheck_Base.LOG.error("sql statement '" + cmd.toString() + "' not executable!", e); } final SimpleAccessCheckOnType accessCheck = new SimpleAccessCheckOnType(); for (final Entry<Long, Set<Long>> entry : relMap.entrySet()) { final Instance instance = Instance.get(toAttribute.getLink(), entry.getKey()); final List<Instance>tmpInsts = new ArrayList<>(); for (final Object instObj : _instances) { if (entry.getValue().contains(((Instance) instObj).getId())) { tmpInsts.add((Instance) instObj); } } if (instance.getType().hasAccess(instance, AccessTypeEnums.READ.getAccessType())) { ret.putAll(accessCheck.checkAccess(_parameter, tmpInsts, _accessType)); } else { for (final Instance instTmp : tmpInsts) { ret.put(instTmp, false); } } } return ret; } }
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import java.io.File; import java.util.Collection; import java.util.List; import java.util.Map; import azkaban.flow.Flow; import azkaban.project.ProjectLogEvent.EventType; import azkaban.user.Permission; import azkaban.user.User; import azkaban.utils.Props; import azkaban.utils.Triple; public interface ProjectLoader { /** * Returns all projects which are active * * @return * @throws ProjectManagerException */ public List<Project> fetchAllActiveProjects() throws ProjectManagerException; /** * Loads whole project, including permissions, by the project id. * * @param id * @return * @throws ProjectManagerException */ public Project fetchProjectById(int id) throws ProjectManagerException; /** * Should create an empty project with the given name and user and adds it to * the data store. It will auto assign a unique id for this project if * successful. * * If an active project of the same name exists, it will throw an exception. * If the name and description of the project exceeds the store's constraints, * it will throw an exception. * * @param name * @return * @throws ProjectManagerException if an active project of the same name * exists. */ public Project createNewProject(String name, String description, User creator) throws ProjectManagerException; /** * Removes the project by marking it inactive. * * @param project * @throws ProjectManagerException */ public void removeProject(Project project, String user) throws ProjectManagerException; /** * Adds and updates the user permissions. Does not check if the user is valid. * If the permission doesn't exist, it adds. If the permission exists, it * updates. * * @param project * @param name * @param perm * @param isGroup * @throws ProjectManagerException */ public void updatePermission(Project project, String name, Permission perm, boolean isGroup) throws ProjectManagerException; public void removePermission(Project project, String name, boolean isGroup) throws ProjectManagerException; /** * Modifies and commits the project description. * * @param project * @param description * @throws ProjectManagerException */ public void updateDescription(Project project, String description, String user) throws ProjectManagerException; /** * Stores logs for a particular project. Will soft fail rather than throw * exception. * * @param project * @param type * @param message return true if the posting was success. */ public boolean postEvent(Project project, EventType type, String user, String message); /** * Returns all the events for a project sorted * * @param project * @return */ public List<ProjectLogEvent> getProjectEvents(Project project, int num, int skip) throws ProjectManagerException; /** * Will upload the files and return the version number of the file uploaded. */ public void uploadProjectFile(Project project, int version, String filetype, String filename, File localFile, String user) throws ProjectManagerException; /** * Get file that's uploaded. * * @return */ public ProjectFileHandler getUploadedFile(Project project, int version) throws ProjectManagerException; /** * Get file that's uploaded. * * @return */ public ProjectFileHandler getUploadedFile(int projectId, int version) throws ProjectManagerException; /** * Changes and commits different project version. * * @param project * @param version * @throws ProjectManagerException */ public void changeProjectVersion(Project project, int version, String user) throws ProjectManagerException; public void updateFlow(Project project, int version, Flow flow) throws ProjectManagerException; /** * Uploads all computed flows * * @param project * @param version * @param flows * @throws ProjectManagerException */ public void uploadFlows(Project project, int version, Collection<Flow> flows) throws ProjectManagerException; /** * Upload just one flow. * * @param project * @param version * @param flow * @throws ProjectManagerException */ public void uploadFlow(Project project, int version, Flow flow) throws ProjectManagerException; /** * Fetches one particular flow. * * @param project * @param version * @param flowId * @throws ProjectManagerException */ public Flow fetchFlow(Project project, String flowId) throws ProjectManagerException; /** * Fetches all flows. * * @param project * @param version * @param flowId * @throws ProjectManagerException */ public List<Flow> fetchAllProjectFlows(Project project) throws ProjectManagerException; /** * Gets the latest upload version. */ public int getLatestProjectVersion(Project project) throws ProjectManagerException; /** * Upload Project properties * * @param project * @param path * @param properties * @throws ProjectManagerException */ public void uploadProjectProperty(Project project, Props props) throws ProjectManagerException; /** * Upload Project properties. Map contains key value of path and properties * * @param project * @param path * @param properties * @throws ProjectManagerException */ public void uploadProjectProperties(Project project, List<Props> properties) throws ProjectManagerException; /** * Fetch project properties * * @param project * @param propsName * @return * @throws ProjectManagerException */ public Props fetchProjectProperty(Project project, String propsName) throws ProjectManagerException; /** * Fetch all project properties * * @param project * @return * @throws ProjectManagerException */ public Map<String, Props> fetchProjectProperties(int projectId, int version) throws ProjectManagerException; /** * Cleans all project versions less tha * * @param projectId * @param version * @throws ProjectManagerException */ public void cleanOlderProjectVersion(int projectId, int version) throws ProjectManagerException; public void updateProjectProperty(Project project, Props props) throws ProjectManagerException; Props fetchProjectProperty(int projectId, int projectVer, String propsName) throws ProjectManagerException; List<Triple<String, Boolean, Permission>> getProjectPermissions(int projectId) throws ProjectManagerException; void updateProjectSettings(Project project) throws ProjectManagerException; }
package uk.ac.susx.tag.classificationframework.clusters.clusteranalysis; import it.unimi.dsi.fastutil.ints.Int2IntMap; import it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap; import it.unimi.dsi.fastutil.ints.IntArrayList; import it.unimi.dsi.fastutil.ints.IntList; import it.unimi.dsi.fastutil.ints.IntSet; import uk.ac.susx.tag.classificationframework.clusters.ClusteredProcessedInstance; import uk.ac.susx.tag.classificationframework.datastructures.Instance; import uk.ac.susx.tag.classificationframework.featureextraction.inference.FeatureInferrer; import uk.ac.susx.tag.classificationframework.featureextraction.pipelines.FeatureExtractionPipeline; import java.io.Serializable; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; /** * Class for gathering statistics about features in clustered documents. * * User: Andrew D. Robertson * Date: 19/10/2015 * Time: 12:11 */ public abstract class FeatureClusterJointCounter implements Serializable { private static final long serialVersionUID = 0L; protected double featureSmoothingAlpha = 0.1; public void count(Collection<ClusteredProcessedInstance> documents, ClusterMembershipTest t, FeatureExtractionPipeline pipeline){ count(documents, t, pipeline, true); } public void count(Collection<ClusteredProcessedInstance> documents, Iterable<Instance> backgroundDocuments, ClusterMembershipTest t, FeatureExtractionPipeline pipeline){ count(documents, backgroundDocuments, t, pipeline, true); } public abstract void count(Collection<ClusteredProcessedInstance> documents, ClusterMembershipTest t, FeatureExtractionPipeline pipeline, boolean reInitialise); public abstract void count(Collection<ClusteredProcessedInstance> documents, Iterable<Instance> backgroundDocuments, ClusterMembershipTest t, FeatureExtractionPipeline pipeline, boolean reInitialise); // P(feature) public double featurePrior(int feature) { return featurePrior(feature, FeatureType.WORD); } public abstract double featurePrior(int feature, FeatureType t); // P(feature | cluster) public double likelihoodFeatureGivenCluster(int feature, int cluster) { return likelihoodFeatureGivenCluster(feature, cluster, FeatureType.WORD); } public abstract double likelihoodFeatureGivenCluster(int feature, int cluster, FeatureType t); // P(feature | !cluster) public double likelihoodFeatureGivenNotCluster(int feature, int cluster) { return likelihoodFeatureGivenNotCluster(feature, cluster, FeatureType.WORD); } public abstract double likelihoodFeatureGivenNotCluster(int feature, int cluster, FeatureType t); public IntSet getFeatures(){ return getFeatures(FeatureType.WORD); } public abstract IntSet getFeatures(FeatureType t); public IntSet getFeaturesInCluster(int clusterIndex){ return getFeaturesInCluster(clusterIndex, FeatureType.WORD); } public abstract IntSet getFeaturesInCluster(int clusterIndex, FeatureType t); public int getFeatureCount(int feature) { return getFeatureCount(feature, FeatureType.WORD); } public abstract int getFeatureCount(int feature, FeatureType t); public int getJointCount(int feature, int cluster) { return getJointCount(feature, cluster, FeatureType.WORD);} public abstract int getJointCount(int feature, int cluster, FeatureType t); public abstract void pruneFeaturesWithCountLessThan(int n); public abstract void pruneOnlyBackgroundFeaturesWithCountLessThan(int n); public abstract void pruneOnlyClusterFeaturesWithCountLessThan(int n); public double getFeatureSmoothingAlpha() { return featureSmoothingAlpha; } public void setFeatureSmoothingAlpha(double featureSmoothingAlpha) { this.featureSmoothingAlpha = featureSmoothingAlpha; } // /** // * A count of 1 for a feature means that the feature occurred at least once in exactly 1 document. // * // * A joint count of 1 for a feature in a cluster means that the feature occurred at least once // * in exactly 1 document in the given cluster. // */ // public static class DocumentBasedCounts extends FeatureClusterJointCounter { // // public int numDocuments; // public int[] numDocumentsPerCluster; // public Int2IntOpenHashMap featureCounts; // public Int2IntOpenHashMap[] jointCounts; // // @Override // public void count(Collection<ClusteredProcessedInstance> documents, ClusterMembershipTest t, FeatureExtractionPipeline pipeline) { // // Initialise the counting data structures // int numClusters = documents.iterator().next().getClusterVector().length; // // numDocumentsPerCluster = new int[numClusters]; // featureCounts = new Int2IntOpenHashMap(); // jointCounts = new Int2IntOpenHashMap[numClusters]; // for (int i = 0; i < jointCounts.length; i++) // jointCounts[i] = new Int2IntOpenHashMap(); // // // Obtain feature counts, and joint counts of features per cluster // numDocuments = documents.size(); // // for (ClusteredProcessedInstance instance : documents) { // // t.setup(instance); // // IntSet features = new IntOpenHashSet(instance.getDocument().features); // // for (int feature : features) // featureCounts.addTo(feature, 1); // // for (int clusterIndex=0; clusterIndex < numClusters; clusterIndex++){ // if (t.isDocumentInCluster(instance, clusterIndex)){ // numDocumentsPerCluster[clusterIndex]++; // for (int feature : features) { // jointCounts[clusterIndex].addTo(feature, 1); // } // } // } // } // } // // @Override // public void count(Collection<ClusteredProcessedInstance> documents, Iterable<Instance> backgroundDocuments, ClusterMembershipTest t, FeatureExtractionPipeline pipeline) { // // Initialise the counting data structures // int numClusters = documents.iterator().next().getClusterVector().length; // // numDocumentsPerCluster = new int[numClusters]; // featureCounts = new Int2IntOpenHashMap(); // jointCounts = new Int2IntOpenHashMap[numClusters]; // for (int i = 0; i < jointCounts.length; i++) // jointCounts[i] = new Int2IntOpenHashMap(); // // // Obtain feature counts, and joint counts of features per cluster // numDocuments = documents.size(); // // for (ClusteredProcessedInstance instance : documents) { // // t.setup(instance); // // IntSet features = new IntOpenHashSet(instance.getDocument().features); // // for (int clusterIndex=0; clusterIndex < numClusters; clusterIndex++){ // if (t.isDocumentInCluster(instance, clusterIndex)){ // numDocumentsPerCluster[clusterIndex]++; // for (int feature : features) { // jointCounts[clusterIndex].addTo(feature, 1); // } // } // } // } // // for (ProcessedInstance instance : backgroundDocuments) { // // IntSet features = new IntOpenHashSet(instance.features); // // for (int feature : features) // featureCounts.addTo(feature, 1); // } // } // // @Override // public double featurePrior(int feature) { // return (featureCounts.get(feature)+1) / ((double)numDocuments + 1); // } // // @Override // public double likelihoodFeatureGivenCluster(int feature, int cluster) { // return jointCounts[cluster].get(feature) / (double)numDocumentsPerCluster[cluster]; // } // //// @Override //// public double likelihoodFeatureGivenNotCluster(int feature, int cluster) { //// int countInOtherClusters = featureCounts.get(feature) - jointCounts[cluster].get(feature); //// int totalDocsInOtherClusters = numDocuments - numDocumentsPerCluster[cluster]; //// return countInOtherClusters / (double)totalDocsInOtherClusters; //// } // // @Override // public IntSet getFeatures() { // return featureCounts.keySet(); // } // // @Override // public IntSet getFeaturesInCluster(int clusterIndex, FEATURE_TYPE t) { // // TODO // return jointCounts[clusterIndex].keySet(); // } // // @Override // public int getFeatureCount(int feature) { // return featureCounts.get(feature); // } // // @Override // public int getJointCount(int feature, int cluster) { // return jointCounts[cluster].get(feature); // } // // @Override // public void pruneFeaturesWithCountLessThan(int n) { // Iterator<Int2IntMap.Entry> iter = featureCounts.int2IntEntrySet().fastIterator(); // while(iter.hasNext()) { // Int2IntMap.Entry e = iter.next(); // int feature = e.getIntKey(); // int count = e.getIntValue(); // if (count < n) { // iter.remove(); // for (Int2IntMap jointCount : jointCounts){ // jointCount.remove(feature); // } // } // } // } // // @Override // public void pruneOnlyBackgroundFeaturesWithCountLessThan(int n) { // // } // // @Override // public void pruneOnlyClusterFeaturesWithCountLessThan(int n) { // // } // // } /** * A count of N for a feature means that the feature occurred exactly N times in the corpus, * the occurrences were in 1 or more documents. */ public static class FeatureBasedCounts extends FeatureClusterJointCounter { public int numClusters; public int totalFeatureCount; public int totalHashTagCount; public int totalAccountTagCount; public Int2IntOpenHashMap featureCounts; public Int2IntOpenHashMap hashTagCounts; public Int2IntOpenHashMap accountTagCounts; public Int2IntOpenHashMap[] jointCounts; public Int2IntOpenHashMap[] hashTagJointCounts; public Int2IntOpenHashMap[] accountTagJointCounts; private int[] totalFeatureCountPerCluster; private int[] totalHashTagCountPerCluster; private int[] totalAccountTagCountPerCluster; public FeatureBasedCounts() { this(1); } public FeatureBasedCounts(int numClusters){ initialise(numClusters); } public void initialise(int numClusters){ this.numClusters = numClusters; totalFeatureCount = 0; totalHashTagCount = 0; totalAccountTagCount = 0; totalFeatureCountPerCluster = new int[numClusters]; totalHashTagCountPerCluster = new int[numClusters]; totalAccountTagCountPerCluster = new int[numClusters]; featureCounts = new Int2IntOpenHashMap(); hashTagCounts = new Int2IntOpenHashMap(); accountTagCounts = new Int2IntOpenHashMap(); jointCounts = new Int2IntOpenHashMap[numClusters]; hashTagJointCounts = new Int2IntOpenHashMap[numClusters]; accountTagJointCounts = new Int2IntOpenHashMap[numClusters]; for (int i = 0; i < jointCounts.length; i++) { jointCounts[i] = new Int2IntOpenHashMap(); hashTagJointCounts[i] = new Int2IntOpenHashMap(); accountTagJointCounts[i] = new Int2IntOpenHashMap(); } } @Override public void count(Collection<ClusteredProcessedInstance> documents, ClusterMembershipTest t, FeatureExtractionPipeline pipeline, boolean reInitialise) { if (reInitialise) { int numClusters = documents.iterator().next().getClusterVector().length; initialise(numClusters); } // Obtain feature counts, and joint counts of features per cluster for (ClusteredProcessedInstance instance : documents) { t.setup(instance); int[] features = instance.getDocument().features; IntList words = new IntArrayList(); IntList hashTags = new IntArrayList(); IntList accountTags = new IntArrayList(); for (int feature : features) { String f = pipeline.featureString(feature, "**UNKNOWN**"); if (f.startsWith("#")){ hashTags.add(feature); } else if (f.startsWith("@")){ accountTags.add(feature); } else { words.add(feature); } } totalFeatureCount += words.size(); for (int feature : features) featureCounts.addTo(feature, 1); totalHashTagCount += hashTags.size(); for (int hashTag : hashTags) hashTagCounts.addTo(hashTag, 1); totalAccountTagCount += accountTags.size(); for (int accountTag : accountTags) accountTagCounts.addTo(accountTag, 1); for (int clusterIndex=0; clusterIndex < numClusters; clusterIndex++){ if (t.isDocumentInCluster(instance, clusterIndex)){ totalFeatureCountPerCluster[clusterIndex] += features.length; for (int feature : features) { jointCounts[clusterIndex].addTo(feature, 1); } } } } } @Override public void count(Collection<ClusteredProcessedInstance> documents, Iterable<Instance> backgroundDocuments, ClusterMembershipTest t, FeatureExtractionPipeline pipeline, boolean reInitialise) { //TODO: inspect the hashtag issue, why do we count separately? Why do all clustered document counts go togther ? // Initialise the counting data structures if (reInitialise) { int numClusters = documents.iterator().next().getClusterVector().length; initialise(numClusters); } // joint counts of features per cluster for (ClusteredProcessedInstance instance : documents) { t.setup(instance); int[] features = instance.getDocument().features; IntList words = new IntArrayList(); IntList hashTags = new IntArrayList(); IntList accountTags = new IntArrayList(); for (int feature : features) { String f = pipeline.featureString(feature, "**UNKNOWN**"); if (f.startsWith("#")){ hashTags.add(feature); } else if (f.startsWith("@")){ accountTags.add(feature); } else { words.add(feature); } } for (int clusterIndex=0; clusterIndex < numClusters; clusterIndex++){ if (t.isDocumentInCluster(instance, clusterIndex)){ // Add joint counts and cluster totals for the words totalFeatureCountPerCluster[clusterIndex] += words.size(); for (int word : words) { jointCounts[clusterIndex].addTo(word, 1); } // Add joint counts, cluster totals, and background counts for hashtags/accounttags totalAccountTagCountPerCluster[clusterIndex] += accountTags.size(); for(int accountTag : accountTags){ accountTagJointCounts[clusterIndex].addTo(accountTag, 1); } totalHashTagCountPerCluster[clusterIndex] += hashTags.size(); for (int hashTag : hashTags){ hashTagJointCounts[clusterIndex].addTo(hashTag, 1); } } } // Use all clusters as background data for hash and account tags totalHashTagCount += hashTags.size(); for (int hashTag : hashTags){ hashTagCounts.addTo(hashTag, 1); } totalAccountTagCount += accountTags.size(); for (int accountTag : accountTags){ accountTagCounts.addTo(accountTag, 1); } } for (Instance instance : backgroundDocuments) { List<String> words = pipeline.extractUnindexedFeatures(instance).stream() .map(FeatureInferrer.Feature::value) .filter(f -> !f.startsWith("#") && !f.startsWith("@")) .collect(Collectors.toList()); totalFeatureCount += words.size(); for (String word : words) featureCounts.addTo(pipeline.featureIndex(word), 1); } } @Override public double featurePrior(int feature, FeatureType t){ switch (t) { case WORD: return (featureCounts.get(feature) + getFeatureSmoothingAlpha()) / ((double)totalFeatureCount + getFeatureSmoothingAlpha()*featureCounts.size()); case HASH_TAG: return hashTagCounts.get(feature) / (double) totalHashTagCount; case ACCOUNT_TAG: return accountTagCounts.get(feature) / (double) totalAccountTagCount; default: throw new RuntimeException("Invalid feature type"); } } public double likelihoodFeatureGivenNotCluster(int feature, int cluster, FeatureType t) { Int2IntOpenHashMap[] counts; switch(t){ case WORD: counts = jointCounts; break; case HASH_TAG: counts = hashTagJointCounts; break; case ACCOUNT_TAG: counts = accountTagJointCounts; break; default: throw new RuntimeException("Invalid feature type"); } int count = 0; for (int otherCluster = 0; otherCluster < counts.length; otherCluster++){ if (cluster != otherCluster){ count += counts[otherCluster].get(feature); } } return count; } @Override public double likelihoodFeatureGivenCluster(int feature, int cluster, FeatureType t) { switch (t){ case WORD: return jointCounts[cluster].get(feature) / (double)totalFeatureCountPerCluster[cluster]; case HASH_TAG: return hashTagJointCounts[cluster].get(feature) / (double)totalHashTagCountPerCluster[cluster]; case ACCOUNT_TAG: return accountTagJointCounts[cluster].get(feature) / (double)totalAccountTagCountPerCluster[cluster]; default: throw new RuntimeException("Invalid feature type."); } } @Override public IntSet getFeatures(FeatureType t) { switch (t){ case WORD: return featureCounts.keySet(); case HASH_TAG: return hashTagCounts.keySet(); case ACCOUNT_TAG: return accountTagCounts.keySet(); default: throw new RuntimeException("Invalid enum value for feature type."); } } @Override public IntSet getFeaturesInCluster(int clusterIndex, FeatureType t) { switch (t){ case WORD: return jointCounts[clusterIndex].keySet(); case HASH_TAG: return hashTagJointCounts[clusterIndex].keySet(); case ACCOUNT_TAG: return accountTagJointCounts[clusterIndex].keySet(); default: throw new RuntimeException("Invalid enum value for feature type."); } } @Override public int getFeatureCount(int feature, FeatureType t) { switch (t){ case WORD: return featureCounts.get(feature); case HASH_TAG: return hashTagCounts.get(feature); case ACCOUNT_TAG: return accountTagCounts.get(feature); default: throw new RuntimeException("Invalid feature type."); } } @Override public int getJointCount(int feature, int cluster, FeatureType t) { switch (t) { case WORD: return jointCounts[cluster].get(feature); case HASH_TAG: return hashTagJointCounts[cluster].get(feature); case ACCOUNT_TAG: return accountTagJointCounts[cluster].get(feature); default: throw new RuntimeException("Invalid feature type"); } } @Override public void pruneFeaturesWithCountLessThan(int n) { Iterator<Int2IntMap.Entry> iter = featureCounts.int2IntEntrySet().fastIterator(); while (iter.hasNext()){ Int2IntMap.Entry e = iter.next(); int feature = e.getIntKey(); int count = e.getIntValue(); if (count < n) { iter.remove(); totalFeatureCount -= count; for (int i = 0; i < jointCounts.length; i++) { totalFeatureCountPerCluster[i] -= jointCounts[i].get(feature); jointCounts[i].remove(feature); } } } } @Override public void pruneOnlyBackgroundFeaturesWithCountLessThan(int n) { Iterator<Int2IntMap.Entry> iter = featureCounts.int2IntEntrySet().fastIterator(); while (iter.hasNext()){ Int2IntMap.Entry e = iter.next(); // int feature = e.getIntKey(); int count = e.getIntValue(); if (count < n) { totalFeatureCount -= count; iter.remove(); } } } @Override public void pruneOnlyClusterFeaturesWithCountLessThan(int n) { for (int c = 0; c < jointCounts.length; c++){ Iterator<Int2IntMap.Entry> iter = jointCounts[c].int2IntEntrySet().fastIterator(); while(iter.hasNext()){ Int2IntMap.Entry e = iter.next(); int count = e.getIntValue(); if (count < n) { totalFeatureCountPerCluster[c] -= count; iter.remove(); } } } } } /** * Cluster membership testing. * * In order to produce the counts of how many times a feature occurs within a particular cluster, * we must be able to decide whether or not a document is in a cluster. * * A class implementing the ClusterMembershipTest gets to decide whether a document is considered * part of a cluster. * * For example, the HighestProbabilityOnly implementation allows the document to only be part of the * cluster with which it has the highest probability of membership (clusterVector being treated as * vector of membership probabilities). */ public interface ClusterMembershipTest { /** * Called once per document. Perform any setup required before the cluster membership * testing happens for each cluster. */ void setup(ClusteredProcessedInstance instance); /** * Called N times per document, where N is the number of clusters. * Must return true if the document is considered to belong to the specified cluster. */ boolean isDocumentInCluster(ClusteredProcessedInstance instance, int clusterIndex); } /** * Allows the document to only be part of the cluster with which it has the highest * probability of membership (clusterVector being treated as vector of membership probabilities). */ public static class HighestProbabilityOnly implements ClusterMembershipTest{ private int highestClusterIndex = 0; public void setup(ClusteredProcessedInstance instance){ double[] clusterVector = instance.getClusterVector(); highestClusterIndex = 0; double currentMax = clusterVector[0]; for (int i = 0; i < clusterVector.length; i++) { if (clusterVector[i] > currentMax) { highestClusterIndex = i; currentMax = clusterVector[i]; } } } public boolean isDocumentInCluster(ClusteredProcessedInstance instance, int clusterIndex) { return clusterIndex == highestClusterIndex; } } /** * Allows the document to be part of any cluster for which the document's membership * probability is equal to or higher than some threshold. */ public static class ProbabilityAboveThreshold implements ClusterMembershipTest{ private final double threshold; public ProbabilityAboveThreshold(double threshold){ this.threshold = threshold; } public void setup(ClusteredProcessedInstance instance){ // None required } public boolean isDocumentInCluster(ClusteredProcessedInstance instance, int clusterIndex) { return instance.getClusterVector()[clusterIndex] >= threshold; } } public static class ProbabilityAboveUniform extends ProbabilityAboveThreshold { public ProbabilityAboveUniform(int numClusters) { super(1 / numClusters); } } }
/* Copyright (c) 2013 OpenPlans. All rights reserved. * This code is licensed under the BSD New License, available at the root * application directory. */ package org.geogit.geotools.cli.porcelain; import static org.mockito.Matchers.anyMapOf; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import java.io.IOException; import java.io.Serializable; import org.geotools.data.AbstractDataStoreFactory; import org.geotools.data.memory.MemoryDataStore; import org.geotools.feature.simple.SimpleFeatureBuilder; import org.geotools.feature.simple.SimpleFeatureTypeBuilder; import org.geotools.referencing.CRS; import org.opengis.feature.simple.SimpleFeature; import org.opengis.feature.simple.SimpleFeatureType; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.Point; public class TestHelper { public static AbstractDataStoreFactory createTestFactory() throws Exception { SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.setCRS(CRS.decode("EPSG:4326")); builder.add("geom", Point.class); builder.add("label", String.class); builder.setName("table1"); SimpleFeatureType type = builder.buildFeatureType(); SimpleFeatureTypeBuilder builder2 = new SimpleFeatureTypeBuilder(); builder2.setCRS(CRS.decode("EPSG:4326")); builder2.add("geom", Point.class); builder2.add("name", String.class); builder2.setName("table2"); SimpleFeatureType type2 = builder2.buildFeatureType(); SimpleFeatureTypeBuilder builder3 = new SimpleFeatureTypeBuilder(); builder3.setCRS(CRS.decode("EPSG:4326")); builder3.add("geom", Point.class); builder3.add("name", String.class); builder3.add("number", Long.class); builder3.setName("table3"); SimpleFeatureTypeBuilder builder4 = new SimpleFeatureTypeBuilder(); builder4.setCRS(CRS.decode("EPSG:4326")); builder4.add("geom", Point.class); builder4.add("number", Double.class); builder4.setName("table4"); // A table with a shp-like structure SimpleFeatureTypeBuilder builderShp = new SimpleFeatureTypeBuilder(); builderShp.setCRS(CRS.decode("EPSG:4326")); builderShp.add("the_geom", Point.class); builderShp.add("number", Double.class); builderShp.add("number2", Double.class); builderShp.setName("shpLikeTable"); SimpleFeatureTypeBuilder builderShp2 = new SimpleFeatureTypeBuilder(); builderShp2.setCRS(CRS.decode("EPSG:4326")); builderShp2.add("the_geom", Point.class); builderShp2.add("number", Double.class); builderShp2.add("number2", Integer.class); builderShp2.setName("shpLikeTable2"); // A table with a geojson-like structure SimpleFeatureTypeBuilder builderGeoJson = new SimpleFeatureTypeBuilder(); builderGeoJson.setCRS(CRS.decode("EPSG:4326")); builderGeoJson.add("number", Double.class); builderGeoJson.add("number2", Double.class); builderGeoJson.add("geom", Point.class); builderGeoJson.setName("GeoJsonLikeTable"); SimpleFeatureTypeBuilder builderGeoJson2 = new SimpleFeatureTypeBuilder(); builderGeoJson2.setCRS(CRS.decode("EPSG:23030")); builderGeoJson2.add("number", Double.class); builderGeoJson2.add("number2", Double.class); builderGeoJson2.add("geom", Point.class); builderGeoJson2.setName("GeoJsonLikeTable2"); SimpleFeatureType type3 = builder3.buildFeatureType(); SimpleFeatureType typeShp = builderShp.buildFeatureType(); SimpleFeatureType typeShp2 = builderShp2.buildFeatureType(); SimpleFeatureType typeGeoJson = builderGeoJson.buildFeatureType(); SimpleFeatureType typeGeoJson2 = builderGeoJson2.buildFeatureType(); GeometryFactory gf = new GeometryFactory(); SimpleFeature f1 = SimpleFeatureBuilder.build(type, new Object[] { gf.createPoint(new Coordinate(5, 8)), "feature1" }, "table1.feature1"); SimpleFeature f2 = SimpleFeatureBuilder.build(type, new Object[] { gf.createPoint(new Coordinate(5, 4)), "feature2" }, "table1.feature2"); SimpleFeature f3 = SimpleFeatureBuilder.build(type2, new Object[] { gf.createPoint(new Coordinate(3, 2)), "feature3" }, "table2.feature3"); SimpleFeature f4 = SimpleFeatureBuilder.build(type3, new Object[] { gf.createPoint(new Coordinate(0, 5)), "feature4", 1000 }, "table2.feature4"); SimpleFeature f5 = SimpleFeatureBuilder.build(typeShp, new Object[] { gf.createPoint(new Coordinate(0, 6)), 2.2, 1000 }, "feature1"); SimpleFeature f6 = SimpleFeatureBuilder.build(typeShp2, new Object[] { gf.createPoint(new Coordinate(0, 7)), 3.2, 1100.0 }, "feature1"); SimpleFeature f7 = SimpleFeatureBuilder.build(typeGeoJson, new Object[] { 4.2, 1200, gf.createPoint(new Coordinate(0, 8)) }, "feature1"); SimpleFeature f8 = SimpleFeatureBuilder.build(typeGeoJson2, new Object[] { 4.2, 1200, gf.createPoint(new Coordinate(0, 9)) }, "feature1"); MemoryDataStore testDataStore = new MemoryDataStore(); testDataStore.addFeature(f1); testDataStore.addFeature(f2); testDataStore.addFeature(f3); testDataStore.addFeature(f4); testDataStore.addFeature(f5); testDataStore.addFeature(f6); testDataStore.addFeature(f7); testDataStore.addFeature(f8); testDataStore.createSchema(builder4.buildFeatureType()); final AbstractDataStoreFactory factory = mock(AbstractDataStoreFactory.class); when(factory.createDataStore(anyMapOf(String.class, Serializable.class))).thenReturn( testDataStore); when(factory.canProcess(anyMapOf(String.class, Serializable.class))).thenReturn(true); return factory; } public static AbstractDataStoreFactory createEmptyTestFactory() throws Exception { MemoryDataStore testDataStore = new MemoryDataStore(); final AbstractDataStoreFactory factory = mock(AbstractDataStoreFactory.class); when(factory.createDataStore(anyMapOf(String.class, Serializable.class))).thenReturn( testDataStore); when(factory.canProcess(anyMapOf(String.class, Serializable.class))).thenReturn(true); return factory; } public static AbstractDataStoreFactory createNullTestFactory() throws Exception { final AbstractDataStoreFactory factory = mock(AbstractDataStoreFactory.class); when(factory.createDataStore(anyMapOf(String.class, Serializable.class))).thenReturn(null); when(factory.canProcess(anyMapOf(String.class, Serializable.class))).thenReturn(true); return factory; } public static AbstractDataStoreFactory createFactoryWithGetNamesException() throws Exception { MemoryDataStore testDataStore = mock(MemoryDataStore.class); when(testDataStore.getNames()).thenThrow(new IOException()); when(testDataStore.getTypeNames()).thenThrow(new RuntimeException()); when(testDataStore.getSchema(anyString())).thenThrow(new IOException()); final AbstractDataStoreFactory factory = mock(AbstractDataStoreFactory.class); when(factory.createDataStore(anyMapOf(String.class, Serializable.class))).thenReturn( testDataStore); when(factory.canProcess(anyMapOf(String.class, Serializable.class))).thenReturn(true); return factory; } public static AbstractDataStoreFactory createFactoryWithGetFeatureSourceException() throws Exception { SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.setCRS(CRS.decode("EPSG:4326")); builder.add("geom", Point.class); builder.add("label", String.class); builder.setName("table1"); SimpleFeatureType type = builder.buildFeatureType(); SimpleFeatureTypeBuilder builder2 = new SimpleFeatureTypeBuilder(); builder2.setCRS(CRS.decode("EPSG:4326")); builder2.add("geom", Point.class); builder2.add("name", String.class); builder2.setName("table2"); SimpleFeatureType type2 = builder2.buildFeatureType(); GeometryFactory gf = new GeometryFactory(); SimpleFeature f1 = SimpleFeatureBuilder.build(type, new Object[] { gf.createPoint(new Coordinate(5, 8)), "feature1" }, null); SimpleFeature f2 = SimpleFeatureBuilder.build(type, new Object[] { gf.createPoint(new Coordinate(5, 4)), "feature2" }, null); SimpleFeature f3 = SimpleFeatureBuilder.build(type2, new Object[] { gf.createPoint(new Coordinate(3, 2)), "feature3" }, null); MemoryDataStore testDataStore = new MemoryDataStore(); testDataStore.addFeature(f1); testDataStore.addFeature(f2); testDataStore.addFeature(f3); MemoryDataStore spyDataStore = spy(testDataStore); when(spyDataStore.getFeatureSource("table1")).thenThrow(new IOException("Exception")); final AbstractDataStoreFactory factory = mock(AbstractDataStoreFactory.class); when(factory.createDataStore(anyMapOf(String.class, Serializable.class))).thenReturn( spyDataStore); when(factory.canProcess(anyMapOf(String.class, Serializable.class))).thenReturn(true); return factory; } }
/* * Copyright (c) 2014 Spotify AB. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.helios.common.descriptors; import com.google.common.base.Optional; import com.google.common.collect.Maps; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import org.jetbrains.annotations.Nullable; import java.util.Map; import static com.google.common.base.Preconditions.checkNotNull; import static java.util.Collections.emptyMap; /** * The state of the deployed job (aka a task). * * A typical JSON representation might be: * <pre> * { * "containerId" : "e890d827e802934a29c97d7e9e3c96a55ca049e519ab0c28be8020621a0a3750", * "env" : { * "SYSLOG_HOST_PORT" : "10.99.0.1:514" * }, * "goal" : "START", * "job" : { #... see the definition of Job }, * "ports" : { * "http" : { * "externalPort" : 8080, * "internalPort" : 8080, * "protocol" : "tcp" * }, * "http-admin" : { * "externalPort" : 8081, * "internalPort" : 8081, * "protocol" : "tcp" * } * }, * "state" : "RUNNING", * "throttled" : "NO", * "containerError": "Something broke starting the container!" * }, * </pre> */ @JsonIgnoreProperties(ignoreUnknown = true) public class TaskStatus extends Descriptor { private static final Map<String, PortMapping> EMPTY_PORTS = emptyMap(); public enum State { PULLING_IMAGE, CREATING, STARTING, HEALTHCHECKING, RUNNING, EXITED, STOPPING, STOPPED, FAILED, UNKNOWN } private final Job job; private final Goal goal; private final State state; private final String containerId; private final ThrottleState throttled; private final Map<String, PortMapping> ports; private final Map<String, String> env; private final String containerError; /** * @param job The job the task is running. * @param goal The desired state of the task. * @param state The state of the task. * @param containerId The containerId, if the task has one (yet). * @param throttled The throttle state of the task. * @param ports The ports actually assigned to the task. * @param env The environment passed to the container. * @param containerError The last Docker error encountered while starting the container. */ public TaskStatus(@JsonProperty("job") final Job job, @Nullable @JsonProperty("goal") final Goal goal, @JsonProperty("state") final State state, @Nullable @JsonProperty("containerId") final String containerId, @JsonProperty("throttled") final ThrottleState throttled, @JsonProperty("ports") final Map<String, PortMapping> ports, @Nullable @JsonProperty("env") final Map<String, String> env, @Nullable @JsonProperty("containerError") final String containerError) { this.job = checkNotNull(job, "job"); this.goal = goal; // TODO (dano): add null check when all masters are upgraded this.state = checkNotNull(state, "state"); // Optional this.containerId = containerId; this.throttled = Optional.fromNullable(throttled).or(ThrottleState.NO); this.ports = Optional.fromNullable(ports).or(EMPTY_PORTS); this.env = Optional.fromNullable(env).or(Maps.<String, String>newHashMap()); this.containerError = Optional.fromNullable(containerError).or(""); } public Builder asBuilder() { return newBuilder() .setJob(job) .setGoal(goal) .setState(state) .setContainerId(containerId) .setThrottled(throttled) .setPorts(ports) .setEnv(env) .setContainerError(containerError); } private TaskStatus(final Builder builder) { this.job = checkNotNull(builder.job, "job"); this.goal = checkNotNull(builder.goal, "goal"); this.state = checkNotNull(builder.state, "state"); // Optional this.containerId = builder.containerId; this.throttled = Optional.fromNullable(builder.throttled).or(ThrottleState.NO); this.ports = Optional.fromNullable(builder.ports).or(EMPTY_PORTS); this.env = Optional.fromNullable(builder.env).or(Maps.<String, String>newHashMap()); this.containerError = Optional.fromNullable(builder.containerError).or(""); } public ThrottleState getThrottled() { return throttled; } @Nullable public String getContainerId() { return containerId; } public Goal getGoal() { return goal; } public State getState() { return state; } public Job getJob() { return job; } public Map<String, PortMapping> getPorts() { return ports; } public Map<String, String> getEnv() { return env; } public String getContainerError() { return containerError; } @Override public String toString() { return "TaskStatus{" + "job=" + job + ", goal=" + goal + ", state=" + state + ", containerId='" + containerId + '\'' + ", throttled=" + throttled + ", ports=" + ports + ", env=" + env + ", containerError='" + containerError + '\'' + "} " + super.toString(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final TaskStatus that = (TaskStatus) o; if (containerId != null ? !containerId.equals(that.containerId) : that.containerId != null) { return false; } if (env != null ? !env.equals(that.env) : that.env != null) { return false; } if (goal != that.goal) { return false; } if (job != null ? !job.equals(that.job) : that.job != null) { return false; } if (ports != null ? !ports.equals(that.ports) : that.ports != null) { return false; } if (state != that.state) { return false; } if (throttled != that.throttled) { return false; } if (containerError != null ? !containerError.equals(that.containerError) : that.containerError != null) { return false; } return true; } @Override public int hashCode() { int result = job != null ? job.hashCode() : 0; result = 31 * result + (goal != null ? goal.hashCode() : 0); result = 31 * result + (state != null ? state.hashCode() : 0); result = 31 * result + (containerId != null ? containerId.hashCode() : 0); result = 31 * result + (throttled != null ? throttled.hashCode() : 0); result = 31 * result + (ports != null ? ports.hashCode() : 0); result = 31 * result + (env != null ? env.hashCode() : 0); result = 31 * result + (containerError != null ? containerError.hashCode() : 0); return result; } public static Builder newBuilder() { return new Builder(); } public static class Builder { Builder() {} private Job job; private Goal goal; private State state; private String containerId; private Map<String, PortMapping> ports; private ThrottleState throttled; private Map<String, String> env; private String containerError; public Builder setJob(final Job job) { this.job = job; return this; } public Builder setGoal(Goal goal) { this.goal = goal; return this; } public Builder setState(final State state) { this.state = state; return this; } public Builder setContainerId(final String containerId) { this.containerId = containerId; return this; } public Builder setPorts(final Map<String, PortMapping> ports) { this.ports = ports; return this; } public Builder setThrottled(final ThrottleState throttled) { this.throttled = throttled; return this; } public Builder setEnv(final Map<String, String> env) { this.env = env; return this; } public Builder setContainerError(final String containerError) { this.containerError = containerError; return this; } public TaskStatus build() { return new TaskStatus(this); } } }
package org.jgroups.tests; import org.jgroups.Global; import org.jgroups.util.FastArray; import org.testng.annotations.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; /** * Tests {@link org.jgroups.util.FastArray} * @author Bela Ban * @since 5.2 */ @Test(groups=Global.FUNCTIONAL) public class FastArrayTest { public void testCreation() { FastArray<Integer> fa=new FastArray<>(1); assert fa.isEmpty(); assert fa.capacity() == 1; } public void testAdd() { FastArray<Integer> fa=new FastArray<>(5); int added=fa.add(1); assert added == 1; assert fa.capacity() == 5; assert fa.size() == 1; assert !fa.isEmpty(); added=fa.add(2, 3); assert added == 2; assert fa.capacity() == 5; assert fa.size() == 3; assert !fa.isEmpty(); } public void testAddNoResize() { FastArray<Integer> fa=new FastArray<>(5); List<Integer> list=Arrays.asList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9); for(int i: list) fa.add(i, false); assert fa.size() == 5; assert fa.capacity() == 5; } public void testAddList() { FastArray<Integer> fa=create(3); List<Integer> list=Arrays.asList(3, 4, 5, 6, 7, 8, 9); assert fa.size() == 3; int added=fa.add(list); assert added == list.size(); assert fa.size() == 10; assert fa.capacity() == 10 + fa.increment(); fa=new FastArray<>(10); added=fa.add(Arrays.asList(0,1,2)); assert added == 3; added=fa.add(list); assert added == list.size(); assert fa.size() == 10; assert fa.capacity() == 10; added=fa.add(11); assert added == 1; assert fa.size() == 11; assert fa.capacity() == 10 + fa.increment(); list=new ArrayList<>(); added=fa.add(list); assert added == 0; assert fa.size() == 11; assert fa.capacity() == 10 + fa.increment(); } public void testAddFastArray() { FastArray<Integer> fa=create(10); FastArray<Integer> fa2=new FastArray<>(5); int added=fa2.add(Arrays.asList(10,11,12,13,14)); assert added == 5; added=fa.add(fa2); assert added == fa2.size(); assert fa.size() == 15; assert fa.capacity() == fa.size() + fa.increment(); fa=new FastArray<>(15); added=fa.add(Arrays.asList(0,1,2,3,4,5,6,7,8,9)); assert added == 10; fa2=new FastArray<>(10); added=fa2.add(Arrays.asList(10,11,12,13,14,15,16,17,18,19)); assert added == 10; added=fa.add(fa2, false); assert added == 5; assert fa.size() == 15; assert fa.capacity() == fa.size(); fa=new FastArray<>(20); added=fa.add(Arrays.asList(0,1,2,3,4,5,6,7,8,9)); assert added == 10; fa2=new FastArray<>(10); added=fa2.add(Arrays.asList(10,11,12,13,14,15,16,17,18,19)); assert added == 10; added=fa.add(fa2, false); assert added == 10; assert fa.size() == 20; assert fa.capacity() == fa.size(); } public void testAddArray() { FastArray<Integer> fa=new FastArray<>(5); Integer[] arr=createArray(5); int added=fa.add(arr, 10); assert added == arr.length; assert fa.size() == 5; assert fa.index() == 5; fa=new FastArray<>(5); arr=createArray(10); added=fa.add(arr, 8); assert added == 8; assert fa.size() == 8; assert fa.index() == 8; // add sparse array fa=new FastArray<>(5); arr[0]=arr[3]=arr[9]=null; added=fa.add(arr, arr.length); assert added == 7; assert fa.size() == 7; assert fa.index() == 10; } public void testTransfer() { FastArray<Integer> other=new FastArray<>(3); List<Integer> l=Arrays.asList(0,1,2,3,4,5,6,7,8,9); other.add(l); int other_size=other.size(); FastArray<Integer> fa=new FastArray<>(5); int num=fa.transferFrom(other, true); assert num == other_size; assert fa.size() == other_size; assert other.isEmpty(); } public void testTransfer2() { FastArray<Integer> other=new FastArray<>(10); List<Integer> l=Arrays.asList(0,1,2,3,4,5,6,7,8,9); other.add(l); other.remove(0).remove(3).remove(4).remove(9); int other_size=other.size(); FastArray<Integer> fa=new FastArray<>(5); int num=fa.transferFrom(other, false); assert num == other_size; assert fa.size() == other_size; assert !other.isEmpty(); assertSameElements(fa, other); fa=new FastArray<>(15); num=fa.transferFrom(other, false); assert num == other_size; assert fa.size() == other_size; assert !other.isEmpty(); assertSameElements(fa, other); fa=create(15); num=fa.transferFrom(other, false); assert num == other_size; assert fa.size() == other_size; assert !other.isEmpty(); assertSameElements(fa, other); } public void testTransfer3() { FastArray<Integer> other=new FastArray<>(3); List<Integer> l=Arrays.asList(0,1,2,3,4,5,6,7,8,9); other.add(l); int other_size=other.size(); FastArray<Integer> fa=new FastArray<>(5); l.forEach(fa::add); l.forEach(fa::add); System.out.println("fa = " + fa); int num=fa.transferFrom(other, true); assert num == other_size; assert fa.size() == other_size; assert other.isEmpty(); } public void testTransfer4() { FastArray<Integer> other=new FastArray<>(30); FastArray<Integer> fa=new FastArray<>(10); int num=fa.transferFrom(other, true); assert num == 0; assert fa.capacity() == 10; } public void testSet() { FastArray<Integer> fa=create(10); Integer[] arr={1,3,5,7,9,10}; fa.set(arr); assert fa.size() == 6; fa.add(11); assert fa.size() == 7; Integer el=fa.get(2); assert el == 5; assert fa.size() == 7; fa.set(2, null); assert fa.size() == 6; assert fa.get(2) == null; fa.set(2, null); assert fa.size() == 6; assert fa.get(2) == null; fa.set(2, 5); assert fa.size() == 7; assert fa.get(2) == 5; fa.set(2, 5); assert fa.size() == 7; assert fa.get(2) == 5; } public void testSet2() { FastArray<Integer> fa=create(10); fa.set(9, 90).set(8,80); assert fa.size() == 10; assert fa.get(9) == 90; assert fa.get(8) == 80; } public void testSet3() { FastArray<Integer> fa=create(10); for(int i=0; i <= 8; i++) { if(i != 5) fa.remove(i); } assert fa.size() == 2; fa.set(9, null); assert fa.size() == 1; fa.set(9, 90); assert fa.size() == 2; } public void testRemove() { FastArray<Integer> fa=create(10); fa.remove(5).remove(0).remove(9); assert fa.size() == 7; assert fa.count() == 7; } public void testRemove2() { FastArray<Integer> fa=new FastArray<>(10); fa.remove(9); assert fa.isEmpty(); } public void testReplaceIf() { FastArray<Integer> fa=create(10); fa.replaceIf(null, 1, true); assert fa.size() == 10; int i=0; for(Integer n: fa) assert n == i++; } /** Replaces all elements */ public void testReplaceIfReplaceAll() { FastArray<Integer> fa=create(10); fa.replaceIf(el -> true, 1, true); assert fa.size() == 10; for(Integer n: fa) assert n == 1; } /** Replaces all elements */ public void testReplaceIfReplaceNone() { FastArray<Integer> fa=create(10); fa.replaceIf(el -> false, 1, true); assert fa.size() == 10; int i=0; for(Integer n: fa) assert n == i++; } public void testReplaceIfRemoveEvenNumbers() { FastArray<Integer> fa=create(10); fa.replaceIf(el -> el %2 == 0, null, true); assert fa.size() == 5; for(Integer n: fa) assert n % 2 != 0; } public void testReplaceIfRemoveEvenNumbers2() { FastArray<Integer> fa=create(10).remove(0).remove(1).remove(5).remove(6).remove(7).remove(8).remove(9); assert fa.size() == 3; fa.replaceIf(el -> el %2 == 0, null, true); assert fa.size() == 1; for(Integer n: fa) assert n % 2 != 0; } public void testGet() { FastArray<Integer> fa=create(10); fa.remove(5).remove(0).remove(9); Integer num=fa.get(3); assert num == 3; num=fa.get(9); assert num == null; } public void testResize() { FastArray<Integer> fa=create(2); int old_cap=fa.capacity(); assert fa.capacity() == old_cap; fa.add(3); assert fa.capacity() == old_cap + fa.increment(); } public void testSimpleIteration() { FastArray<Integer> fa=create(10); List<Integer> l=new ArrayList<>(10); for(Iterator<Integer> it=fa.iterator(); it.hasNext();) { Integer el=it.next(); l.add(el); } assert l.size() == fa.size(); List<Integer> l2=fa.stream().collect(Collectors.toList()); assert l.equals(l2); } public void testIteration() { FastArray<Integer> fa=create(10); int i=0; for(Iterator<Integer> it=fa.iterator(); it.hasNext();) { Integer el=it.next(); assert el == i++; } } public void testIteration2() { FastArray<Integer> fa=create(10); for(Iterator<Integer> it=fa.iterator(); it.hasNext();) { Integer el=it.next(); if(el % 2 == 0) it.remove(); } assert fa.size() == 5; } public void testIteration3() { FastArray<Integer> fa=create(10); for(Iterator<Integer> it=fa.iterator(); it.hasNext();) { it.next(); it.remove(); } assert fa.isEmpty(); } public void testIteration4() { FastArray<Integer> fa=create(10); for(int i=0; i < 4; i++) fa.remove(i); assert fa.size() == 6; for(int i=6; i < 10; i++) fa.remove(i); assert fa.size() == 2; // iterator should stop after encountering 5 for(Iterator<Integer> it=fa.iterator(); it.hasNext();) { Integer el=it.next(); assert el == 4; el=it.next(); assert el == 5; } } public void testIterationWithFilter() { FastArray<Integer> fa=create(10); List<Integer> l=new ArrayList<>(5); for(Iterator<Integer> it=fa.iteratorWithFilter(i -> i %2 == 0); it.hasNext();) { Integer el=it.next(); l.add(el); } assert l.size() == 5; for(int i: l) assert i % 2 == 0; } /** Not really supported, but should nevertheless work... */ public void testIterationWithConcurrentAddition() { FastArray<Integer> fa=create(10); List<Integer> l=new ArrayList<>(); for(Iterator<Integer> it=fa.iterator(); it.hasNext();) { Integer el=it.next(); l.add(el); if(el == 8) fa.add(10,11); } assert l.size() == 12; List<Integer> l2=IntStream.rangeClosed(0, 11).boxed().collect(Collectors.toList()); assert l.equals(l2); } /** Iteration stops when N non-null elements have been reached where N == size */ public void testStopIterationWhenSizeReached() { FastArray<Integer> fa=create(10); IntStream.rangeClosed(0,9).filter(i -> i != 4 && i != 5).forEach(fa::remove); assert fa.size() == 2; int count=0; FastArray<Integer>.FastIterator it=fa.iterator(); while(it.hasNext()) { Integer el=it.next(); System.out.println("el = " + el); count++; } assert count == 2; assert it.currentIndex() == 5; assert it.hitCount() == 2; } public void testStopIterationWhenSizeReachedUsingFilter() { FastArray<Integer> fa=create(10); IntStream.rangeClosed(0,9).filter(i -> i != 3 && i != 4 && i != 5).forEach(fa::remove); assert fa.size() == 3; int count=0; FastArray<Integer>.FastIterator it=fa.iteratorWithFilter(el -> el < 3 || el > 5); while(it.hasNext()) { Integer el=it.next(); System.out.println("el = " + el); count++; } assert count == 0; assert it.currentIndex() == 5; assert it.hitCount() == 3; fa=create(10); IntStream.rangeClosed(0,9).filter(i -> i != 3 && i != 4 && i != 5).forEach(fa::remove); assert fa.size() == 3; count=0; it=fa.iteratorWithFilter(el -> el != 4); while(it.hasNext()) { Integer el=it.next(); System.out.println("el = " + el); count++; } assert count == 2; assert it.currentIndex() == 5; assert it.hitCount() == 3; } public void testStream() { FastArray<Integer> fa=create(10); testStream(fa); } public void testStream2() { FastArray<Integer> fa=create(10); testStream2(fa); } public void testClear() { FastArray<Integer> fa=create(10); assert fa.size() == 10; fa.remove(0).remove(1).remove(9).remove(10); assert fa.size() == 7; fa.clear(true); assert fa.isEmpty(); } public void testAnyMatch() { FastArray<Integer> fa=create(10); boolean match=fa.anyMatch(num -> num > 3); assert match; } protected static void assertSameElements(FastArray<Integer> fa, FastArray<Integer> fa2) { assert fa.size() == fa2.size(); List<Integer> l=fa.stream().collect(Collectors.toList()), l2=fa2.stream().collect(Collectors.toList()); assert l.equals(l2); } protected static void testStream(FastArray<Integer> fa) { List<Integer> list=fa.stream().collect(Collectors.toList()); System.out.println("list = " + list); for(int i=0; i < fa.size(); i++) assert i == list.get(i); } protected static void testStream2(FastArray<Integer> fa) { int i=0; for(Integer num: fa) { assert num == i++; } } protected static FastArray<Integer> create(int num) { FastArray<Integer> fa=new FastArray<>(num); for(int i=0; i < num; i++) fa.add(i); return fa; } protected static Integer[] createArray(int num) { return IntStream.range(0, num).boxed().toArray(Integer[]::new); } }
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import com.facebook.buck.io.MorePaths; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.step.ExecutionContext; import com.facebook.buck.step.Step; import com.facebook.buck.util.XmlDomParser; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.Maps; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import java.io.IOException; import java.nio.file.Path; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * This {@link Step} takes a list of string resource files (strings.xml), groups them by locales, * and for each locale generates a file with all the string resources for that locale. * Strings.xml files without a resource qualifier are mapped to the "en" locale. * * <p>A typical strings.xml file looks like: * <pre> * {@code * <?xml version="1.0" encoding="utf-8"?> * <resources> * <string name="resource_name1">I am a string.</string> * <string name="resource_name2">I am another string.</string> * <plurals name="time_hours_ago"> * <item quantity="one">1 minute ago</item> * <item quantity="other">%d minutes ago</item> * </plurals> * <string-array name="logging_levels"> * <item>Default</item> * <item>Verbose</item> * <item>Debug</item> * </string-array> * </resources> * } * </pre> * * <p>For more information on the xml file format, refer to: * <a href="http://developer.android.com/guide/topics/resources/string-resource.html"> * String Resources - Android Developers * </a></p> * * <p>So for each supported locale in a project, this step goes through all such xml files for that * locale, and builds a map of resource name to resource value, where resource value is either: * <ol> * <li> a string </li> * <li> a map of plurals </li> * <li> a list of strings </li> * </ol> * and dumps this map into the output file. See {@link StringResources} for the file format. */ public class CompileStringsStep implements Step { private static final String ENGLISH_STRING_PATH_SUFFIX = "res/values/strings.xml"; private static final String ENGLISH_LOCALE = "en"; @VisibleForTesting static final Pattern NON_ENGLISH_STRING_FILE_PATTERN = Pattern.compile( ".*res/values-([a-z]{2})(?:-r([A-Z]{2}))*/strings.xml"); @VisibleForTesting static final Pattern R_DOT_TXT_STRING_RESOURCE_PATTERN = Pattern.compile( "^int (string|plurals|array) (\\w+) 0x([0-9a-f]+)$"); private final ImmutableList<Path> stringFiles; private final Path rDotTxtDir; private final Map<String, String> regionSpecificToBaseLocaleMap; private final Map<String, Integer> resourceNameToIdMap; private final Function<String, Path> pathBuilder; /** * Note: The ordering of files in the input list determines which resource value ends up in the * output .fbstr file, in the event of multiple xml files of a locale sharing the same string * resource name - file that appears first in the list wins. * * @param stringFiles Set containing paths to strings.xml files matching * {@link GetStringsFilesStep#STRINGS_FILE_PATH} * @param rDotTxtDir Path to the directory where aapt generates R.txt file along with the * final R.java files per package. * @param pathBuilder Builds a path to store a .fbstr file at. */ public CompileStringsStep( ImmutableList<Path> stringFiles, Path rDotTxtDir, Function<String, Path> pathBuilder) { this.stringFiles = stringFiles; this.rDotTxtDir = rDotTxtDir; this.pathBuilder = pathBuilder; this.regionSpecificToBaseLocaleMap = Maps.newHashMap(); this.resourceNameToIdMap = Maps.newHashMap(); } @Override public int execute(ExecutionContext context) { ProjectFilesystem filesystem = context.getProjectFilesystem(); try { buildResourceNameToIdMap(filesystem, rDotTxtDir.resolve("R.txt"), resourceNameToIdMap); } catch (IOException e) { context.logError(e, "Failure parsing R.txt file."); return 1; } ImmutableMultimap<String, Path> filesByLocale = groupFilesByLocale(stringFiles); Map<String, StringResources> resourcesByLocale = Maps.newHashMap(); for (String locale : filesByLocale.keySet()) { try { resourcesByLocale.put(locale, compileStringFiles(filesystem, filesByLocale.get(locale))); } catch (IOException | SAXException e) { context.logError(e, "Error parsing string file for locale: %s", locale); return 1; } } // Merge region specific locale resources with the corresponding base locale resources. // // For example, if there are separate string resources in an android project for locale // "es" and "es_US", when an application running on a device with locale set to "Spanish // (United States)" requests for a string, the Android runtime first looks for the string in // "es_US" set of resources, and if not found, returns the resource from the "es" set. // We merge these because we want the individual .fbstr files to be self contained for // simplicity. for (String regionSpecificLocale : regionSpecificToBaseLocaleMap.keySet()) { String baseLocale = regionSpecificToBaseLocaleMap.get(regionSpecificLocale); if (!resourcesByLocale.containsKey(baseLocale)) { continue; } resourcesByLocale.put(regionSpecificLocale, resourcesByLocale.get(regionSpecificLocale) .getMergedResources(resourcesByLocale.get(baseLocale))); } for (String locale : filesByLocale.keySet()) { try { filesystem.writeBytesToPath( Preconditions.checkNotNull(resourcesByLocale.get(locale)).getBinaryFileContent(), pathBuilder.apply(locale)); } catch (IOException e) { context.logError(e, "Error creating binary file for locale: %s", locale); return 1; } } return 0; } /** * Groups a list of strings.xml files by locale. * String files with no resource qualifier (eg. values/strings.xml) are mapped to the "en" locale * * eg. given the following list: * * ImmutableList.of( * Paths.get("one/res/values-es/strings.xml"), * Paths.get("two/res/values-es/strings.xml"), * Paths.get("three/res/values-pt-rBR/strings.xml"), * Paths.get("four/res/values-pt-rPT/strings.xml"), * Paths.get("five/res/values/strings.xml")); * * returns: * * ImmutableMap.of( * "es", ImmutableList.of(Paths.get("one/res/values-es/strings.xml"), * Paths.get("two/res/values-es/strings.xml")), * "pt_BR", ImmutableList.of(Paths.get("three/res/values-pt-rBR/strings.xml'), * "pt_PT", ImmutableList.of(Paths.get("four/res/values-pt-rPT/strings.xml"), * "en", ImmutableList.of(Paths.get("five/res/values/strings.xml"))); */ @VisibleForTesting ImmutableMultimap<String, Path> groupFilesByLocale(ImmutableList<Path> files) { ImmutableMultimap.Builder<String, Path> localeToFiles = ImmutableMultimap.builder(); for (Path filepath : files) { String path = MorePaths.pathWithUnixSeparators(filepath); Matcher matcher = NON_ENGLISH_STRING_FILE_PATTERN.matcher(path); if (matcher.matches()) { String baseLocale = matcher.group(1); String country = matcher.group(2); String locale = country == null ? baseLocale : baseLocale + "_" + country; if (country != null && !regionSpecificToBaseLocaleMap.containsKey(locale)) { regionSpecificToBaseLocaleMap.put(locale, baseLocale); } localeToFiles.put(locale, filepath); } else { Preconditions.checkState( path.endsWith(ENGLISH_STRING_PATH_SUFFIX), "Invalid path passed to compile strings: " + path); localeToFiles.put(ENGLISH_LOCALE, filepath); } } return localeToFiles.build(); } /** * Parses the R.txt file generated by aapt, looks for resources of type {@code string}, * {@code plurals} and {@code array}, and builds a map of resource names to their corresponding * ids. */ public static void buildResourceNameToIdMap( ProjectFilesystem filesystem, Path pathToRDotTxtFile, Map<String, Integer> resultMap ) throws IOException { List<String> fileLines = filesystem.readLines(pathToRDotTxtFile); for (String line : fileLines) { Matcher matcher = R_DOT_TXT_STRING_RESOURCE_PATTERN.matcher(line); if (!matcher.matches()) { continue; } resultMap.put(matcher.group(2), Integer.parseInt(matcher.group(3), 16)); } } private StringResources compileStringFiles( ProjectFilesystem filesystem, Collection<Path> filepaths) throws IOException, SAXException { TreeMap<Integer, String> stringsMap = Maps.newTreeMap(); TreeMap<Integer, ImmutableMap<String, String>> pluralsMap = Maps.newTreeMap(); TreeMap<Integer, ImmutableList<String>> arraysMap = Maps.newTreeMap(); for (Path stringFilePath : filepaths) { Document dom = XmlDomParser.parse(filesystem.getPathForRelativePath(stringFilePath)); NodeList stringNodes = dom.getElementsByTagName("string"); scrapeStringNodes(stringNodes, stringsMap); NodeList pluralNodes = dom.getElementsByTagName("plurals"); scrapePluralsNodes(pluralNodes, pluralsMap); NodeList arrayNodes = dom.getElementsByTagName("string-array"); scrapeStringArrayNodes(arrayNodes, arraysMap); } return new StringResources(stringsMap, pluralsMap, arraysMap); } /** * Scrapes string resource names and values from the list of xml nodes passed and populates * {@code stringsMap}, ignoring resource names that are already present in the map. * * @param stringNodes A list of {@code <string></string>} nodes. * @param stringsMap Map from string resource name to its value. */ @VisibleForTesting void scrapeStringNodes(NodeList stringNodes, Map<Integer, String> stringsMap) { for (int i = 0; i < stringNodes.getLength(); ++i) { Node node = stringNodes.item(i); String resourceName = node.getAttributes().getNamedItem("name").getNodeValue(); if (!resourceNameToIdMap.containsKey(resourceName)) { continue; } int resourceId = Preconditions.checkNotNull(resourceNameToIdMap.get(resourceName)); // Ignore a resource if it has already been found. if (!stringsMap.containsKey(resourceId)) { stringsMap.put(resourceId, node.getTextContent()); } } } /** * Similar to {@code scrapeStringNodes}, but for plurals nodes. */ @VisibleForTesting void scrapePluralsNodes( NodeList pluralNodes, Map<Integer, ImmutableMap<String, String>> pluralsMap) { for (int i = 0; i < pluralNodes.getLength(); ++i) { Node node = pluralNodes.item(i); String resourceName = node.getAttributes().getNamedItem("name").getNodeValue(); if (!resourceNameToIdMap.containsKey(resourceName)) { continue; } int resourceId = Preconditions.checkNotNull(resourceNameToIdMap.get(resourceName)); // Ignore a resource if it has already been found. if (pluralsMap.containsKey(resourceId)) { continue; } ImmutableMap.Builder<String, String> quantityToStringBuilder = ImmutableMap.builder(); NodeList itemNodes = ((Element) node).getElementsByTagName("item"); for (int j = 0; j < itemNodes.getLength(); ++j) { Node itemNode = itemNodes.item(j); String quantity = itemNode.getAttributes().getNamedItem("quantity").getNodeValue(); quantityToStringBuilder.put(quantity, itemNode.getTextContent()); } pluralsMap.put(resourceId, quantityToStringBuilder.build()); } } /** * Similar to {@code scrapeStringNodes}, but for string array nodes. */ @VisibleForTesting void scrapeStringArrayNodes(NodeList arrayNodes, Map<Integer, ImmutableList<String>> arraysMap) { for (int i = 0; i < arrayNodes.getLength(); ++i) { Node node = arrayNodes.item(i); String resourceName = node.getAttributes().getNamedItem("name").getNodeValue(); // Ignore a resource if R.txt does not contain an entry for it. if (!resourceNameToIdMap.containsKey(resourceName)) { continue; } int resourceId = Preconditions.checkNotNull(resourceNameToIdMap.get(resourceName)); // Ignore a resource if it has already been found. if (arraysMap.containsKey(resourceId)) { continue; } ImmutableList.Builder<String> arrayValues = ImmutableList.builder(); NodeList itemNodes = ((Element) node).getElementsByTagName("item"); if (itemNodes.getLength() == 0) { continue; } for (int j = 0; j < itemNodes.getLength(); ++j) { arrayValues.add(itemNodes.item(j).getTextContent()); } arraysMap.put(resourceId, arrayValues.build()); } } /** * Used in unit tests to inject the resource name to id map. */ @VisibleForTesting void addResourceNameToIdMap(Map<String, Integer> nameToIdMap) { resourceNameToIdMap.putAll(nameToIdMap); } @Override public String getShortName() { return "compile_strings"; } @Override public String getDescription(ExecutionContext context) { return "Combine, parse string resource xml files into one binary file per locale."; } }
package org.oors; import java.util.Collection; import java.util.LinkedList; import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.Id; import javax.persistence.IdClass; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.NoResultException; import javax.persistence.Query; import javax.persistence.Table; import javax.persistence.Transient; @Entity @IdClass(ProjectBranchElementId.class) @Table(name = "OBJS") public class Obj extends Base { @Id @Column(name = "PROJECT_BRANCH_ID",insertable=false,updatable=false) protected long projectBranchId; @Id @Column(name = "ID",insertable=false,updatable=false) protected long id; @Column(name="TEXT") protected String text; @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "PROJECT_BRANCH_ID",insertable=false,updatable=false) ProjectBranch projectBranch; @Column(name="DOCUMENT_ID") protected long documentId; @Transient Document document; @Column(name="OBJ_ORDER") protected int order; @Transient protected List<Obj> children; @Column(name="PARENT_OBJ_ID") protected long parentObjId; @Transient protected Obj parent; @Transient protected List<Link> linksTo; @Transient protected List<Link> linksFrom; protected Obj() {} Obj(ProjectBranch projectBranch, Document d ) { this.projectBranchId = projectBranch.id; this.projectBranch = projectBranch; Query q = DataSource.getInstance() .entityManager.createQuery ("SELECT MAX(o.id) FROM Obj o"); try { Number result = (Number) q.getSingleResult (); if ( result==null ) this.id = 1; else this.id = result.longValue() + 1; } catch ( NoResultException nrex ) { this.id = 1; } text = ""; documentId = d.id; order = -1; parentObjId = -1; children = new LinkedList<Obj>(); linksTo = new LinkedList<Link>(); linksFrom = new LinkedList<Link>(); } public String toString() { return "Obj[projectBranchId="+this.projectBranchId+",id="+this.id+",document="+this.documentId+",parent="+this.parentObjId+",order="+this.order+",text="+this.text+"]"; } public String getText() { return text; } public void setText( String text ) { this.text = text; } protected int getOrder() { return order; } protected void setOrder( int order ) { this.order = order; } public ProjectBranch getProjectBranch() { return projectBranch; } protected void setProjectBranch( ProjectBranch projectBranch ) { this.projectBranch = projectBranch; } public Document getDocument() { if ( document == null ) { Collection<Document> ds = DataSource.getInstance() .entityManager.createQuery("FROM Document d WHERE d.id = "+this.documentId+" AND d.projectBranchId = "+this.projectBranchId, Document.class) .getResultList(); if ( !ds.isEmpty() ) document = ds.iterator().next(); } return document; } protected void setDocument( Document document ) { this.document = document; } public Obj getParent() { if ( parent == null ) { Collection<Obj> os = DataSource.getInstance() .entityManager.createQuery("FROM Obj o WHERE o.id = "+this.parentObjId+" AND o.projectBranchId = "+this.projectBranchId, Obj.class) .getResultList(); if ( !os.isEmpty() ) parent = os.iterator().next(); } return parent; } protected void setParent( Obj parent ) { this.parent = parent; } public List<Obj> getChildren() { if ( children == null ) { children = DataSource.getInstance() .entityManager.createQuery("FROM Obj o WHERE DOCUMENT_ID = "+this.documentId+" AND PROJECT_BRANCH_ID = "+this.projectBranchId+" AND PARENT_OBJ_ID = "+this.id+" ORDER BY OBJ_ORDER", Obj.class) .getResultList(); } return children; } public Obj next() { if ( this.getParent() != null ) { parent.getChildren(); if ( this.order+1 == parent.children.size() ) return null; return parent.children.get(this.order+1); } else { document.getObjs(); if ( this.order+1 == document.objs.size() ) return null; return document.objs.get(this.order+1); } } public Obj previous() { if ( this.order==0 ) return null; if ( this.getParent() != null ) { parent.getChildren(); return parent.children.get(this.order-1); } else { document.getObjs(); return document.objs.get(this.order-1); } } public Obj createBefore() { return createSiblingAt(this.order); } public Obj createAfter() { return createSiblingAt(this.order + 1); } public Obj appendChild() { Obj o = new Obj(this.getProjectBranch(),this.getDocument()); o.order = getChildren().size(); o.parentObjId = this.id; DataSource.getInstance().persist(o); children.add(o); fireCreatedEvent(o); return o; } private Obj createSiblingAt( int index ) { Obj o = new Obj(this.getProjectBranch(),this.getDocument()); o.order = index; o.parentObjId = this.id; List<Obj> os; if ( this.getParent()!=null ) { parent.getChildren(); os = parent.children; } else { this.getDocument().getObjs(); os = document.objs; } os.add(index,o); DataSource.getInstance().persist(o); for ( int i=index+1 ; i<os.size() ; i++ ) { Obj osi = os.get(i); osi.setOrder( osi.getOrder()+1); } fireCreatedEvent(o); return o; } public List<Link> getLinksTo() { if ( linksTo == null ) { linksTo = DataSource.getInstance() .entityManager.createQuery("FROM Link l WHERE DEST_OBJ_ID = "+this.id+" AND PROJECT_BRANCH_ID = "+this.projectBranchId, Link.class) .getResultList(); } return linksTo; } public List<Link> getLinksFrom() { if ( linksFrom == null ) { linksFrom = DataSource.getInstance() .entityManager.createQuery("FROM Link l WHERE SOURCE_OBJ_ID = "+this.id+" AND PROJECT_BRANCH_ID = "+this.projectBranchId, Link.class) .getResultList(); } return linksFrom; } public Link linkTo( Obj destination ) { Link link = new Link(this.projectBranch,this,destination); DataSource.getInstance().persist(link); this.linksFrom.add(link); fireCreatedEvent(link); return link; } public Link linkFrom( Obj source ) { Link link = new Link(this.projectBranch,source,this); DataSource.getInstance().persist(link); this.linksTo.add(link); fireCreatedEvent(link); return link; } @Override long getId() { return this.id; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.markup.resolver; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.apache.wicket.Component; import org.apache.wicket.MarkupContainer; import org.apache.wicket.Page; import org.apache.wicket.application.IClassResolver; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.IMarkupFragment; import org.apache.wicket.markup.MarkupStream; import org.apache.wicket.markup.html.TransparentWebMarkupContainer; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.link.BookmarkablePageLink; import org.apache.wicket.markup.html.link.DisabledAttributeLinkBehavior; import org.apache.wicket.protocol.http.RequestUtils; import org.apache.wicket.request.handler.resource.ResourceReferenceRequestHandler; import org.apache.wicket.request.mapper.parameter.PageParameters; import org.apache.wicket.request.resource.PackageResource; import org.apache.wicket.request.resource.PackageResourceReference; import org.apache.wicket.request.resource.ResourceReference; import org.apache.wicket.util.lang.Packages; import org.apache.wicket.util.string.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The AutoLinkResolver is responsible to handle automatic link resolution. Tags are marked * "autolink" by the MarkupParser for all tags with href attribute, such as anchor and link tags * with no explicit wicket id. E.g. &lt;a href="Home.html"&gt; * <p> * If href points to a *.html file, a BookmarkablePageLink will automatically be created, except * for absolute paths, where an ExternalLink is created. * <p> * If href points to a *.html file, it resolves the given URL by searching for a page class, either * relative or absolute, specified by the href attribute of the tag. If relative the href URL must * be relative to the package containing the associated page. An exception is thrown if no Page * class was found. * <p> * If href is no *.html file a static reference to the resource is created. * * @see org.apache.wicket.markup.parser.filter.WicketLinkTagHandler * * @author Juergen Donnerstag * @author Eelco Hillenius */ public final class AutoLinkResolver implements IComponentResolver { /** * Abstract implementation that has a helper method for creating a resource reference. */ public static abstract class AbstractAutolinkResolverDelegate implements IAutolinkResolverDelegate { /** * Creates a new auto component that references a package resource. * * @param autoId * the automatically generated id for the auto component * @param pathInfo * the path info object that contains information about the link reference * @param attribute * the attribute to replace the value of * @return a new auto component or null if the path was absolute */ protected final Component newPackageResourceReferenceAutoComponent( final String autoId, final PathInfo pathInfo, final String attribute) { final MarkupContainer container = pathInfo.getContainer(); if (!pathInfo.absolute && (pathInfo.path != null) && (pathInfo.path.length() > 0)) { // Href is relative. Create a resource reference pointing at this file // <wicket:head> components are handled differently. We can // not use the container, because it is the container the // header has been added to (e.g. the Page). What we need // however, is the component (e.g. a Panel) which // contributed it. MarkupStream markupStream = pathInfo.getMarkupStream(); Class<? extends Component> clazz = markupStream.getContainerClass(); // However if the markup stream is a merged markup stream (inheritance), than we // need the class of the markup file which contained the tag. if ((markupStream.get() instanceof ComponentTag) && (markupStream.getTag().getMarkupClass() != null)) { clazz = markupStream.getTag().getMarkupClass(); } // Create the component implementing the link ResourceReferenceAutolink autoLink = new ResourceReferenceAutolink(autoId, clazz, pathInfo.reference, attribute, container); if (autoLink.resourceReference != null) { // if the resource reference is null, it means that it the // reference was not found as a package resource return autoLink; } } // else we can't have absolute resource references, at least not at // this time // fall back on default processing return null; } } /** * Autolink components delegate component resolution to their parent components. Reason: * autolink tags don't have wicket:id and users wouldn't know where to add the component to. * * @author Juergen Donnerstag * @param <T> * type of model object */ public final static class AutolinkBookmarkablePageLink<T> extends BookmarkablePageLink<T> implements IComponentResolver { private static final long serialVersionUID = 1L; private final String anchor; /** * When using &lt;wicket:link&gt; to let Wicket lookup for pages and create the related links, * it's not possible to change the "setAutoEnable" property, which defaults to true. This * affects the prototype because, sometimes designers _want_ links to be enabled. */ public static boolean autoEnable = true; /** * Construct * * @param <C> * * @see BookmarkablePageLink#BookmarkablePageLink(String, Class, PageParameters) * * @param id * @param pageClass * @param parameters * @param anchor */ public <C extends Page> AutolinkBookmarkablePageLink(final String id, final Class<C> pageClass, final PageParameters parameters, final String anchor) { super(id, pageClass, parameters); this.anchor = anchor; setAutoEnable(autoEnable); add(new DisabledAttributeLinkBehavior()); } /** * * @see org.apache.wicket.markup.html.link.BookmarkablePageLink#getURL() */ @Override protected CharSequence getURL() { CharSequence url = super.getURL(); if (anchor != null) { url = url + anchor; } return url; } /** * @see org.apache.wicket.markup.resolver.IComponentResolver#resolve(org.apache.wicket.MarkupContainer, * org.apache.wicket.markup.MarkupStream, org.apache.wicket.markup.ComponentTag) */ @Override public Component resolve(final MarkupContainer container, final MarkupStream markupStream, ComponentTag tag) { return getParent().get(tag.getId()); } } /** * Interface to delegate the actual resolving of auto components to. */ public interface IAutolinkResolverDelegate { /** * Returns a new auto component based on the pathInfo object. The auto component must have * the autoId assigned as it's id. Should return null in case the component could not be * created as expected and the default resolving should take place. * * @param autoId * the automatically generated id for the auto component * @param pathInfo * the path info object that contains information about the link reference * @return a new auto component or null in case this method couldn't resolve to a proper * auto component */ Component newAutoComponent(final String autoId, final PathInfo pathInfo); } /** * Encapsulates different aspects of a path. For instance, the path * <code>org.apache.wicket.markup.html.tree.Tree/tree.css</code> has extension <code>css</code>, * is relative (absolute == true) and has no page parameters. */ public static final class PathInfo { /** whether the reference is absolute. */ private final boolean absolute; /** An optional anchor like #top */ private final String anchor; /** The extension if any. */ private final String extension; /** The optional page parameters. */ private final PageParameters pageParameters; /** The path excluding any parameters. */ private final String path; /** The original reference (e.g the full value of a href attribute). */ private final String reference; /** The container for this path */ private final MarkupContainer container; /** Parent markup stream */ private final MarkupStream markupStream; /** * Construct. * * @param reference * the original reference (e.g the full value of a href attribute) */ public PathInfo(final String reference, MarkupContainer container, MarkupStream markupStream) { this.reference = reference; this.container = container; this.markupStream = markupStream; // If href contains URL query parameters .. String infoPath; // get the query string int queryStringPos = reference.indexOf("?"); if (queryStringPos != -1) { final String queryString = reference.substring(queryStringPos + 1); pageParameters = new PageParameters(); RequestUtils.decodeParameters(queryString, pageParameters); infoPath = reference.substring(0, queryStringPos); } else { pageParameters = null; infoPath = reference; } absolute = (infoPath.startsWith("/") || infoPath.startsWith("\\")); // remove file extension, but remember it String extension = null; int pos = infoPath.lastIndexOf("."); if (pos != -1) { extension = infoPath.substring(pos + 1); infoPath = infoPath.substring(0, pos); } String anchor = null; if (extension != null) { pos = extension.indexOf('#'); if (pos != -1) { anchor = extension.substring(pos); extension = extension.substring(0, pos); } } // Anchors without path, e.g. "#link" if (anchor == null) { pos = infoPath.indexOf("#"); if (pos != -1) { anchor = infoPath.substring(pos); infoPath = infoPath.substring(0, pos); } } path = infoPath; this.extension = extension; this.anchor = anchor; } /** * Gets the anchor (e.g. #top) * * @return anchor */ public final String getAnchor() { return anchor; } /** * Gets extension. * * @return extension */ public final String getExtension() { return extension; } /** * Gets pageParameters. * * @return pageParameters */ public final PageParameters getPageParameters() { return pageParameters; } /** * Gets path. * * @return path */ public final String getPath() { return path; } /** * Gets reference. * * @return reference */ public final String getReference() { return reference; } /** * Gets absolute. * * @return absolute */ public final boolean isAbsolute() { return absolute; } /** * Gets container. * * @return container */ public MarkupContainer getContainer() { return container; } /** * Gets markup stream * * @return markup stream */ public MarkupStream getMarkupStream() { return markupStream; } } /** * Resolves to anchor/ link components. */ private static final class AnchorResolverDelegate extends AbstractAutolinkResolverDelegate { /** the attribute to fetch. */ private static final String attribute = "href"; /** * Set of supported extensions for creating bookmarkable page links. Anything that is not in * this list will be handled as a resource reference. */ private final Set<String> supportedPageExtensions = new HashSet<>(4); /** * Construct. */ public AnchorResolverDelegate() { // Initialize supported list of file name extension which'll create // bookmarkable pages supportedPageExtensions.add("html"); supportedPageExtensions.add("xml"); supportedPageExtensions.add("wml"); supportedPageExtensions.add("svg"); } /** * @see org.apache.wicket.markup.resolver.AutoLinkResolver.IAutolinkResolverDelegate#newAutoComponent(java.lang.String, * org.apache.wicket.markup.resolver.AutoLinkResolver.PathInfo) */ @Override @SuppressWarnings("unchecked") public Component newAutoComponent(final String autoId, PathInfo pathInfo) { final MarkupContainer container = pathInfo.getContainer(); if ((pathInfo.extension != null) && supportedPageExtensions.contains(pathInfo.extension)) { // Obviously a href like href="myPkg.MyLabel.html" will do as // well. Wicket will not throw an exception. It accepts it. Page page = container.getPage(); final IClassResolver defaultClassResolver = page.getApplication() .getApplicationSettings() .getClassResolver(); String className = Packages.absolutePath(page.getClass(), pathInfo.path); className = Strings.replaceAll(className, "/", ".").toString(); if (className.startsWith(".")) { className = className.substring(1); } try { final Class<? extends Page> clazz = (Class<? extends Page>)defaultClassResolver.resolveClass(className); return new AutolinkBookmarkablePageLink<Void>(autoId, clazz, pathInfo.pageParameters, pathInfo.anchor); } catch (ClassNotFoundException ex) { log.warn("Did not find corresponding java class: " + className); // fall through } // Make sure base markup pages (inheritance) are handled correct MarkupContainer parentWithContainer = container; if (container.getParent() != null) { parentWithContainer = container.findParentWithAssociatedMarkup(); } if ((parentWithContainer instanceof Page) && !pathInfo.path.startsWith("/") && new MarkupStream(page.getMarkup()).isMergedMarkup()) { IMarkupFragment containerMarkup = container.getMarkup(); MarkupStream containerMarkupStream = new MarkupStream(containerMarkup); if (containerMarkupStream.atTag()) { ComponentTag tag = containerMarkupStream.getTag(); Class<? extends Page> clazz = (Class<? extends Page>)tag.getMarkupClass(); if (clazz != null) { // Href is relative. Resolve the url given relative to // the current page className = Packages.absolutePath(clazz, pathInfo.path); className = Strings.replaceAll(className, "/", ".").toString(); if (className.startsWith(".")) { className = className.substring(1); } try { clazz = (Class<? extends Page>)defaultClassResolver.resolveClass(className); return new AutolinkBookmarkablePageLink<Void>(autoId, clazz, pathInfo.getPageParameters(), pathInfo.anchor); } catch (ClassNotFoundException ex) { log.warn("Did not find corresponding java class: " + className); // fall through } } } } } else { // not a registered type for bookmarkable pages; create a link // to a resource instead return newPackageResourceReferenceAutoComponent(autoId, pathInfo, attribute); } // fallthrough return null; } } /** * Resolver that returns the proper attribute value from a component tag reflecting a URL * reference such as src or href. */ private interface ITagReferenceResolver { /** * Gets the reference attribute value of the tag depending on the type of the tag. For * instance, anchors use the <code>href</code> attribute but script and image references use * the <code>src</code> attribute. * * @param tag * The component tag. Not for modification. * @return the tag value that constitutes the reference */ String getReference(final ComponentTag tag); } /** * Autolink component that points to a {@link ResourceReference}. Autolink component delegate * component resolution to their parent components. Reason: autolink tags don't have wicket:id * and users wouldn't know where to add the component to. */ private final static class ResourceReferenceAutolink extends WebMarkupContainer implements IComponentResolver { private static final long serialVersionUID = 1L; private final String attribute; /** Resource reference */ private final ResourceReference resourceReference; private final MarkupContainer parent; /** * @param id * @param clazz * @param href * @param attribute * @param parent */ public ResourceReferenceAutolink(final String id, final Class<?> clazz, final String href, final String attribute, MarkupContainer parent) { super(id); this.parent = parent; this.attribute = attribute; // Check whether it is a valid resource reference if (PackageResource.exists(clazz, href, getLocale(), getStyle(), getVariation())) { // Create the component implementing the link resourceReference = new PackageResourceReference(clazz, href, null, null, null); } else { // The resource does not exist. Set to null and ignore when // rendering. resourceReference = null; } } /** * @see org.apache.wicket.Component#getVariation() */ @Override public String getVariation() { if (parent != null) { return parent.getVariation(); } return super.getVariation(); } /** * Handles this link's tag. * * @param tag * the component tag * @see org.apache.wicket.Component#onComponentTag(ComponentTag) */ @Override protected final void onComponentTag(final ComponentTag tag) { // Default handling for tag super.onComponentTag(tag); // only set the href attribute when the resource exists if (resourceReference != null) { // Set href to link to this link's linkClicked method ResourceReferenceRequestHandler handler = new ResourceReferenceRequestHandler( resourceReference); CharSequence url = getRequestCycle().urlFor(handler); // generate the href attribute tag.put(attribute, url); } } /** * @see org.apache.wicket.markup.resolver.IComponentResolver#resolve(org.apache.wicket.MarkupContainer, * org.apache.wicket.markup.MarkupStream, org.apache.wicket.markup.ComponentTag) */ @Override public Component resolve(MarkupContainer container, MarkupStream markupStream, ComponentTag tag) { return getParent().get(tag.getId()); } } /** * Resolves to {@link ResourceReference} link components. Typically used for header * contributions like javascript and css files. */ private static final class ResourceReferenceResolverDelegate extends AbstractAutolinkResolverDelegate { private final String attribute; /** * Construct. * * @param attribute */ public ResourceReferenceResolverDelegate(final String attribute) { this.attribute = attribute; } /** * @see org.apache.wicket.markup.resolver.AutoLinkResolver.IAutolinkResolverDelegate#newAutoComponent(java.lang.String, * org.apache.wicket.markup.resolver.AutoLinkResolver.PathInfo) */ @Override public Component newAutoComponent(final String autoId, final PathInfo pathInfo) { return newPackageResourceReferenceAutoComponent(autoId, pathInfo, attribute); } } /** * Resolver object that returns the proper attribute value from component tags. */ private static final class TagReferenceResolver implements ITagReferenceResolver { /** the attribute to fetch. */ private final String attribute; /** * Construct. * * @param attribute * the attribute to fetch */ public TagReferenceResolver(final String attribute) { this.attribute = attribute; } /** * Gets the reference attribute value of the tag depending on the type of the tag. For * instance, anchors use the <code>href</code> attribute but script and image references use * the <code>src</code> attribute. * * @param tag * The component tag. Not for modification. * @return the tag value that constitutes the reference */ @Override public String getReference(final ComponentTag tag) { return tag.getAttributes().getString(attribute); } } /** * If no specific resolver is found, always use the href attribute for references. */ private static final TagReferenceResolver DEFAULT_ATTRIBUTE_RESOLVER = new TagReferenceResolver( "href"); /** Logging */ private static final Logger log = LoggerFactory.getLogger(AutoLinkResolver.class); private static final long serialVersionUID = 1L; /** * Autolink resolver delegates for constructing new autolinks reference keyed on tag name (such * as &lt;script&gt; or &lt;a&gt;. */ private final Map<String, IAutolinkResolverDelegate> tagNameToAutolinkResolverDelegates = new HashMap<>(); /** * Resolver objects that know what attribute to read for getting the reference keyed on tag name * (such as &lt;script&gt; or &lt;a&gt;. */ private final Map<String, ITagReferenceResolver> tagNameToTagReferenceResolvers = new HashMap<>(); /** * Construct. */ public AutoLinkResolver() { // register tag reference resolvers TagReferenceResolver hrefTagReferenceResolver = new TagReferenceResolver("href"); TagReferenceResolver srcTagReferenceResolver = new TagReferenceResolver("src"); tagNameToTagReferenceResolvers.put("a", hrefTagReferenceResolver); tagNameToTagReferenceResolvers.put("link", hrefTagReferenceResolver); tagNameToTagReferenceResolvers.put("script", srcTagReferenceResolver); tagNameToTagReferenceResolvers.put("img", srcTagReferenceResolver); tagNameToTagReferenceResolvers.put("input", srcTagReferenceResolver); tagNameToTagReferenceResolvers.put("embed", srcTagReferenceResolver); // register autolink resolver delegates tagNameToAutolinkResolverDelegates.put("a", new AnchorResolverDelegate()); tagNameToAutolinkResolverDelegates.put("link", new ResourceReferenceResolverDelegate("href")); ResourceReferenceResolverDelegate srcResRefResolver = new ResourceReferenceResolverDelegate( "src"); tagNameToAutolinkResolverDelegates.put("script", srcResRefResolver); tagNameToAutolinkResolverDelegates.put("img", srcResRefResolver); tagNameToAutolinkResolverDelegates.put("input", srcResRefResolver); tagNameToAutolinkResolverDelegates.put("embed", srcResRefResolver); } /** * Register (add or replace) a new resolver with the tagName and attributeName. The resolver * will be invoked each time an appropriate tag and attribute is found. * * @param tagName * The tag name * @param attributeName * The attribute name * @param resolver * Implements what to do based on the tag and the attribute */ public final void addTagReferenceResolver(final String tagName, final String attributeName, final IAutolinkResolverDelegate resolver) { TagReferenceResolver tagReferenceResolver = new TagReferenceResolver(attributeName); tagNameToTagReferenceResolvers.put(tagName, tagReferenceResolver); tagNameToAutolinkResolverDelegates.put(tagName, resolver); } /** * Get the resolver registered for 'tagName' * * @param tagName * The tag's name * @return The resolver found. Null, if none registered */ public final IAutolinkResolverDelegate getAutolinkResolverDelegate(final String tagName) { return tagNameToAutolinkResolverDelegates.get(tagName); } /** * @see org.apache.wicket.markup.resolver.IComponentResolver#resolve(org.apache.wicket.MarkupContainer, * org.apache.wicket.markup.MarkupStream, org.apache.wicket.markup.ComponentTag) */ @Override public final Component resolve(final MarkupContainer container, final MarkupStream markupStream, final ComponentTag tag) { // Must be marked as autolink tag if (tag.isAutolinkEnabled()) { // get the reference resolver ITagReferenceResolver referenceResolver = tagNameToTagReferenceResolvers.get(tag.getName()); if (referenceResolver == null) { // fallback on default referenceResolver = DEFAULT_ATTRIBUTE_RESOLVER; } // get the reference, which is typically the value of e.g. a href or src // attribute String reference = referenceResolver.getReference(tag); // create the path info object PathInfo pathInfo = new PathInfo(reference, container, markupStream); // Try to find the Page matching the href // Note: to not use tag.getId() because it will be modified while // resolving the link and hence the 2nd render will fail. Component link = resolveAutomaticLink(pathInfo, tag); if (log.isDebugEnabled()) { log.debug("Added autolink " + link); } // Tell the container, we resolved the id return link; } // We were not able to resolve the id return null; } /** * Resolves the given tag's page class and page parameters by parsing the tag component name and * then searching for a page class at the absolute or relative URL specified by the href * attribute of the tag. * <p> * None html references are treated similar. * * @param pathInfo * The container where the link is * @param tag * the component tag * @return A BookmarkablePageLink<?> to handle the href */ private Component resolveAutomaticLink(final PathInfo pathInfo, final ComponentTag tag) { final String componentId = tag.getId(); // get the tag name, which is something like 'a' or 'script' final String tagName = tag.getName(); // By setting the component name, the tag becomes a Wicket component // tag, which must have a associated Component. if (tag.getId() == null) { tag.setAutoComponentTag(true); } // now get the resolver delegate IAutolinkResolverDelegate autolinkResolverDelegate = tagNameToAutolinkResolverDelegates.get(tagName); Component autoComponent = null; if (autolinkResolverDelegate != null) { autoComponent = autolinkResolverDelegate.newAutoComponent(componentId, pathInfo); } if (autoComponent == null) { // resolving didn't have the desired result or there was no delegate // found; fallback on the default resolving which is a simple // component that leaves the tag unchanged autoComponent = new TransparentWebMarkupContainer(componentId); } return autoComponent; } }
package com.palantir.atlasdb.performance.schema.generated; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.SortedMap; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.function.Supplier; import java.util.stream.Stream; import javax.annotation.Generated; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.MoreObjects; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Collections2; import com.google.common.collect.ComparisonChain; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import com.google.common.collect.Sets; import com.google.common.hash.Hashing; import com.google.common.primitives.Bytes; import com.google.common.primitives.UnsignedBytes; import com.google.protobuf.InvalidProtocolBufferException; import com.palantir.atlasdb.compress.CompressionUtils; import com.palantir.atlasdb.encoding.PtBytes; import com.palantir.atlasdb.keyvalue.api.BatchColumnRangeSelection; import com.palantir.atlasdb.keyvalue.api.Cell; import com.palantir.atlasdb.keyvalue.api.ColumnRangeSelection; import com.palantir.atlasdb.keyvalue.api.ColumnRangeSelections; import com.palantir.atlasdb.keyvalue.api.ColumnSelection; import com.palantir.atlasdb.keyvalue.api.Namespace; import com.palantir.atlasdb.keyvalue.api.Prefix; import com.palantir.atlasdb.keyvalue.api.RangeRequest; import com.palantir.atlasdb.keyvalue.api.RowResult; import com.palantir.atlasdb.keyvalue.api.TableReference; import com.palantir.atlasdb.keyvalue.impl.Cells; import com.palantir.atlasdb.ptobject.EncodingUtils; import com.palantir.atlasdb.table.api.AtlasDbDynamicMutablePersistentTable; import com.palantir.atlasdb.table.api.AtlasDbMutablePersistentTable; import com.palantir.atlasdb.table.api.AtlasDbNamedMutableTable; import com.palantir.atlasdb.table.api.AtlasDbNamedPersistentSet; import com.palantir.atlasdb.table.api.ColumnValue; import com.palantir.atlasdb.table.api.TypedRowResult; import com.palantir.atlasdb.table.description.ColumnValueDescription.Compression; import com.palantir.atlasdb.table.description.ValueType; import com.palantir.atlasdb.table.generation.ColumnValues; import com.palantir.atlasdb.table.generation.Descending; import com.palantir.atlasdb.table.generation.NamedColumnValue; import com.palantir.atlasdb.transaction.api.AtlasDbConstraintCheckingMode; import com.palantir.atlasdb.transaction.api.ConstraintCheckingTransaction; import com.palantir.atlasdb.transaction.api.ImmutableGetRangesQuery; import com.palantir.atlasdb.transaction.api.Transaction; import com.palantir.common.base.AbortingVisitor; import com.palantir.common.base.AbortingVisitors; import com.palantir.common.base.BatchingVisitable; import com.palantir.common.base.BatchingVisitableView; import com.palantir.common.base.BatchingVisitables; import com.palantir.common.base.Throwables; import com.palantir.common.collect.IterableView; import com.palantir.common.persist.Persistable; import com.palantir.common.persist.Persistable.Hydrator; import com.palantir.common.persist.Persistables; import com.palantir.util.AssertUtils; import com.palantir.util.crypto.Sha256Hash; @Generated("com.palantir.atlasdb.table.description.render.TableRenderer") @SuppressWarnings("all") public final class ValueStreamIdxTable implements AtlasDbDynamicMutablePersistentTable<ValueStreamIdxTable.ValueStreamIdxRow, ValueStreamIdxTable.ValueStreamIdxColumn, ValueStreamIdxTable.ValueStreamIdxColumnValue, ValueStreamIdxTable.ValueStreamIdxRowResult> { private final Transaction t; private final List<ValueStreamIdxTrigger> triggers; private final static String rawTableName = "blob_stream_idx"; private final TableReference tableRef; private final static ColumnSelection allColumns = ColumnSelection.all(); static ValueStreamIdxTable of(Transaction t, Namespace namespace) { return new ValueStreamIdxTable(t, namespace, ImmutableList.<ValueStreamIdxTrigger>of()); } static ValueStreamIdxTable of(Transaction t, Namespace namespace, ValueStreamIdxTrigger trigger, ValueStreamIdxTrigger... triggers) { return new ValueStreamIdxTable(t, namespace, ImmutableList.<ValueStreamIdxTrigger>builder().add(trigger).add(triggers).build()); } static ValueStreamIdxTable of(Transaction t, Namespace namespace, List<ValueStreamIdxTrigger> triggers) { return new ValueStreamIdxTable(t, namespace, triggers); } private ValueStreamIdxTable(Transaction t, Namespace namespace, List<ValueStreamIdxTrigger> triggers) { this.t = t; this.tableRef = TableReference.create(namespace, rawTableName); this.triggers = triggers; } public static String getRawTableName() { return rawTableName; } public TableReference getTableRef() { return tableRef; } public String getTableName() { return tableRef.getQualifiedName(); } public Namespace getNamespace() { return tableRef.getNamespace(); } /** * <pre> * ValueStreamIdxRow { * {@literal Long id}; * } * </pre> */ public static final class ValueStreamIdxRow implements Persistable, Comparable<ValueStreamIdxRow> { private final long id; public static ValueStreamIdxRow of(long id) { return new ValueStreamIdxRow(id); } private ValueStreamIdxRow(long id) { this.id = id; } public long getId() { return id; } public static Function<ValueStreamIdxRow, Long> getIdFun() { return new Function<ValueStreamIdxRow, Long>() { @Override public Long apply(ValueStreamIdxRow row) { return row.id; } }; } public static Function<Long, ValueStreamIdxRow> fromIdFun() { return new Function<Long, ValueStreamIdxRow>() { @Override public ValueStreamIdxRow apply(Long row) { return ValueStreamIdxRow.of(row); } }; } @Override public byte[] persistToBytes() { byte[] idBytes = EncodingUtils.encodeUnsignedVarLong(id); return EncodingUtils.add(idBytes); } public static final Hydrator<ValueStreamIdxRow> BYTES_HYDRATOR = new Hydrator<ValueStreamIdxRow>() { @Override public ValueStreamIdxRow hydrateFromBytes(byte[] __input) { int __index = 0; Long id = EncodingUtils.decodeUnsignedVarLong(__input, __index); __index += EncodingUtils.sizeOfUnsignedVarLong(id); return new ValueStreamIdxRow(id); } }; @Override public String toString() { return MoreObjects.toStringHelper(getClass().getSimpleName()) .add("id", id) .toString(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } ValueStreamIdxRow other = (ValueStreamIdxRow) obj; return Objects.equals(id, other.id); } @SuppressWarnings("ArrayHashCode") @Override public int hashCode() { return Objects.hashCode(id); } @Override public int compareTo(ValueStreamIdxRow o) { return ComparisonChain.start() .compare(this.id, o.id) .result(); } } /** * <pre> * ValueStreamIdxColumn { * {@literal byte[] reference}; * } * </pre> */ public static final class ValueStreamIdxColumn implements Persistable, Comparable<ValueStreamIdxColumn> { private final byte[] reference; public static ValueStreamIdxColumn of(byte[] reference) { return new ValueStreamIdxColumn(reference); } private ValueStreamIdxColumn(byte[] reference) { this.reference = reference; } public byte[] getReference() { return reference; } public static Function<ValueStreamIdxColumn, byte[]> getReferenceFun() { return new Function<ValueStreamIdxColumn, byte[]>() { @Override public byte[] apply(ValueStreamIdxColumn row) { return row.reference; } }; } public static Function<byte[], ValueStreamIdxColumn> fromReferenceFun() { return new Function<byte[], ValueStreamIdxColumn>() { @Override public ValueStreamIdxColumn apply(byte[] row) { return ValueStreamIdxColumn.of(row); } }; } @Override public byte[] persistToBytes() { byte[] referenceBytes = EncodingUtils.encodeSizedBytes(reference); return EncodingUtils.add(referenceBytes); } public static final Hydrator<ValueStreamIdxColumn> BYTES_HYDRATOR = new Hydrator<ValueStreamIdxColumn>() { @Override public ValueStreamIdxColumn hydrateFromBytes(byte[] __input) { int __index = 0; byte[] reference = EncodingUtils.decodeSizedBytes(__input, __index); __index += EncodingUtils.sizeOfSizedBytes(reference); return new ValueStreamIdxColumn(reference); } }; @Override public String toString() { return MoreObjects.toStringHelper(getClass().getSimpleName()) .add("reference", reference) .toString(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } ValueStreamIdxColumn other = (ValueStreamIdxColumn) obj; return Arrays.equals(reference, other.reference); } @SuppressWarnings("ArrayHashCode") @Override public int hashCode() { return Objects.hashCode(reference); } @Override public int compareTo(ValueStreamIdxColumn o) { return ComparisonChain.start() .compare(this.reference, o.reference, UnsignedBytes.lexicographicalComparator()) .result(); } } public interface ValueStreamIdxTrigger { public void putValueStreamIdx(Multimap<ValueStreamIdxRow, ? extends ValueStreamIdxColumnValue> newRows); } /** * <pre> * Column name description { * {@literal byte[] reference}; * } * Column value description { * type: Long; * } * </pre> */ public static final class ValueStreamIdxColumnValue implements ColumnValue<Long> { private final ValueStreamIdxColumn columnName; private final Long value; public static ValueStreamIdxColumnValue of(ValueStreamIdxColumn columnName, Long value) { return new ValueStreamIdxColumnValue(columnName, value); } private ValueStreamIdxColumnValue(ValueStreamIdxColumn columnName, Long value) { this.columnName = columnName; this.value = value; } public ValueStreamIdxColumn getColumnName() { return columnName; } @Override public Long getValue() { return value; } @Override public byte[] persistColumnName() { return columnName.persistToBytes(); } @Override public byte[] persistValue() { byte[] bytes = EncodingUtils.encodeUnsignedVarLong(value); return CompressionUtils.compress(bytes, Compression.NONE); } public static Long hydrateValue(byte[] bytes) { bytes = CompressionUtils.decompress(bytes, Compression.NONE); return EncodingUtils.decodeUnsignedVarLong(bytes, 0); } public static Function<ValueStreamIdxColumnValue, ValueStreamIdxColumn> getColumnNameFun() { return new Function<ValueStreamIdxColumnValue, ValueStreamIdxColumn>() { @Override public ValueStreamIdxColumn apply(ValueStreamIdxColumnValue columnValue) { return columnValue.getColumnName(); } }; } public static Function<ValueStreamIdxColumnValue, Long> getValueFun() { return new Function<ValueStreamIdxColumnValue, Long>() { @Override public Long apply(ValueStreamIdxColumnValue columnValue) { return columnValue.getValue(); } }; } @Override public String toString() { return MoreObjects.toStringHelper(getClass().getSimpleName()) .add("ColumnName", this.columnName) .add("Value", this.value) .toString(); } } public static final class ValueStreamIdxRowResult implements TypedRowResult { private final ValueStreamIdxRow rowName; private final ImmutableSet<ValueStreamIdxColumnValue> columnValues; public static ValueStreamIdxRowResult of(RowResult<byte[]> rowResult) { ValueStreamIdxRow rowName = ValueStreamIdxRow.BYTES_HYDRATOR.hydrateFromBytes(rowResult.getRowName()); Set<ValueStreamIdxColumnValue> columnValues = Sets.newHashSetWithExpectedSize(rowResult.getColumns().size()); for (Entry<byte[], byte[]> e : rowResult.getColumns().entrySet()) { ValueStreamIdxColumn col = ValueStreamIdxColumn.BYTES_HYDRATOR.hydrateFromBytes(e.getKey()); Long value = ValueStreamIdxColumnValue.hydrateValue(e.getValue()); columnValues.add(ValueStreamIdxColumnValue.of(col, value)); } return new ValueStreamIdxRowResult(rowName, ImmutableSet.copyOf(columnValues)); } private ValueStreamIdxRowResult(ValueStreamIdxRow rowName, ImmutableSet<ValueStreamIdxColumnValue> columnValues) { this.rowName = rowName; this.columnValues = columnValues; } @Override public ValueStreamIdxRow getRowName() { return rowName; } public Set<ValueStreamIdxColumnValue> getColumnValues() { return columnValues; } public static Function<ValueStreamIdxRowResult, ValueStreamIdxRow> getRowNameFun() { return new Function<ValueStreamIdxRowResult, ValueStreamIdxRow>() { @Override public ValueStreamIdxRow apply(ValueStreamIdxRowResult rowResult) { return rowResult.rowName; } }; } public static Function<ValueStreamIdxRowResult, ImmutableSet<ValueStreamIdxColumnValue>> getColumnValuesFun() { return new Function<ValueStreamIdxRowResult, ImmutableSet<ValueStreamIdxColumnValue>>() { @Override public ImmutableSet<ValueStreamIdxColumnValue> apply(ValueStreamIdxRowResult rowResult) { return rowResult.columnValues; } }; } @Override public String toString() { return MoreObjects.toStringHelper(getClass().getSimpleName()) .add("RowName", getRowName()) .add("ColumnValues", getColumnValues()) .toString(); } } @Override public void delete(ValueStreamIdxRow row, ValueStreamIdxColumn column) { delete(ImmutableMultimap.of(row, column)); } @Override public void delete(Iterable<ValueStreamIdxRow> rows) { Multimap<ValueStreamIdxRow, ValueStreamIdxColumn> toRemove = HashMultimap.create(); Multimap<ValueStreamIdxRow, ValueStreamIdxColumnValue> result = getRowsMultimap(rows); for (Entry<ValueStreamIdxRow, ValueStreamIdxColumnValue> e : result.entries()) { toRemove.put(e.getKey(), e.getValue().getColumnName()); } delete(toRemove); } @Override public void delete(Multimap<ValueStreamIdxRow, ValueStreamIdxColumn> values) { t.delete(tableRef, ColumnValues.toCells(values)); } @Override public void put(ValueStreamIdxRow rowName, Iterable<ValueStreamIdxColumnValue> values) { put(ImmutableMultimap.<ValueStreamIdxRow, ValueStreamIdxColumnValue>builder().putAll(rowName, values).build()); } @Override public void put(ValueStreamIdxRow rowName, ValueStreamIdxColumnValue... values) { put(ImmutableMultimap.<ValueStreamIdxRow, ValueStreamIdxColumnValue>builder().putAll(rowName, values).build()); } @Override public void put(Multimap<ValueStreamIdxRow, ? extends ValueStreamIdxColumnValue> values) { t.useTable(tableRef, this); t.put(tableRef, ColumnValues.toCellValues(values)); for (ValueStreamIdxTrigger trigger : triggers) { trigger.putValueStreamIdx(values); } } @Override public void touch(Multimap<ValueStreamIdxRow, ValueStreamIdxColumn> values) { Multimap<ValueStreamIdxRow, ValueStreamIdxColumnValue> currentValues = get(values); put(currentValues); Multimap<ValueStreamIdxRow, ValueStreamIdxColumn> toDelete = HashMultimap.create(values); for (Map.Entry<ValueStreamIdxRow, ValueStreamIdxColumnValue> e : currentValues.entries()) { toDelete.remove(e.getKey(), e.getValue().getColumnName()); } delete(toDelete); } public static ColumnSelection getColumnSelection(Collection<ValueStreamIdxColumn> cols) { return ColumnSelection.create(Collections2.transform(cols, Persistables.persistToBytesFunction())); } public static ColumnSelection getColumnSelection(ValueStreamIdxColumn... cols) { return getColumnSelection(Arrays.asList(cols)); } @Override public Multimap<ValueStreamIdxRow, ValueStreamIdxColumnValue> get(Multimap<ValueStreamIdxRow, ValueStreamIdxColumn> cells) { Set<Cell> rawCells = ColumnValues.toCells(cells); Map<Cell, byte[]> rawResults = t.get(tableRef, rawCells); Multimap<ValueStreamIdxRow, ValueStreamIdxColumnValue> rowMap = HashMultimap.create(); for (Entry<Cell, byte[]> e : rawResults.entrySet()) { if (e.getValue().length > 0) { ValueStreamIdxRow row = ValueStreamIdxRow.BYTES_HYDRATOR.hydrateFromBytes(e.getKey().getRowName()); ValueStreamIdxColumn col = ValueStreamIdxColumn.BYTES_HYDRATOR.hydrateFromBytes(e.getKey().getColumnName()); Long val = ValueStreamIdxColumnValue.hydrateValue(e.getValue()); rowMap.put(row, ValueStreamIdxColumnValue.of(col, val)); } } return rowMap; } @Override public List<ValueStreamIdxColumnValue> getRowColumns(ValueStreamIdxRow row) { return getRowColumns(row, allColumns); } @Override public List<ValueStreamIdxColumnValue> getRowColumns(ValueStreamIdxRow row, ColumnSelection columns) { byte[] bytes = row.persistToBytes(); RowResult<byte[]> rowResult = t.getRows(tableRef, ImmutableSet.of(bytes), columns).get(bytes); if (rowResult == null) { return ImmutableList.of(); } else { List<ValueStreamIdxColumnValue> ret = Lists.newArrayListWithCapacity(rowResult.getColumns().size()); for (Entry<byte[], byte[]> e : rowResult.getColumns().entrySet()) { ValueStreamIdxColumn col = ValueStreamIdxColumn.BYTES_HYDRATOR.hydrateFromBytes(e.getKey()); Long val = ValueStreamIdxColumnValue.hydrateValue(e.getValue()); ret.add(ValueStreamIdxColumnValue.of(col, val)); } return ret; } } @Override public Multimap<ValueStreamIdxRow, ValueStreamIdxColumnValue> getRowsMultimap(Iterable<ValueStreamIdxRow> rows) { return getRowsMultimapInternal(rows, allColumns); } @Override public Multimap<ValueStreamIdxRow, ValueStreamIdxColumnValue> getRowsMultimap(Iterable<ValueStreamIdxRow> rows, ColumnSelection columns) { return getRowsMultimapInternal(rows, columns); } private Multimap<ValueStreamIdxRow, ValueStreamIdxColumnValue> getRowsMultimapInternal(Iterable<ValueStreamIdxRow> rows, ColumnSelection columns) { SortedMap<byte[], RowResult<byte[]>> results = t.getRows(tableRef, Persistables.persistAll(rows), columns); return getRowMapFromRowResults(results.values()); } private static Multimap<ValueStreamIdxRow, ValueStreamIdxColumnValue> getRowMapFromRowResults(Collection<RowResult<byte[]>> rowResults) { Multimap<ValueStreamIdxRow, ValueStreamIdxColumnValue> rowMap = HashMultimap.create(); for (RowResult<byte[]> result : rowResults) { ValueStreamIdxRow row = ValueStreamIdxRow.BYTES_HYDRATOR.hydrateFromBytes(result.getRowName()); for (Entry<byte[], byte[]> e : result.getColumns().entrySet()) { ValueStreamIdxColumn col = ValueStreamIdxColumn.BYTES_HYDRATOR.hydrateFromBytes(e.getKey()); Long val = ValueStreamIdxColumnValue.hydrateValue(e.getValue()); rowMap.put(row, ValueStreamIdxColumnValue.of(col, val)); } } return rowMap; } @Override public Map<ValueStreamIdxRow, BatchingVisitable<ValueStreamIdxColumnValue>> getRowsColumnRange(Iterable<ValueStreamIdxRow> rows, BatchColumnRangeSelection columnRangeSelection) { Map<byte[], BatchingVisitable<Map.Entry<Cell, byte[]>>> results = t.getRowsColumnRange(tableRef, Persistables.persistAll(rows), columnRangeSelection); Map<ValueStreamIdxRow, BatchingVisitable<ValueStreamIdxColumnValue>> transformed = Maps.newHashMapWithExpectedSize(results.size()); for (Entry<byte[], BatchingVisitable<Map.Entry<Cell, byte[]>>> e : results.entrySet()) { ValueStreamIdxRow row = ValueStreamIdxRow.BYTES_HYDRATOR.hydrateFromBytes(e.getKey()); BatchingVisitable<ValueStreamIdxColumnValue> bv = BatchingVisitables.transform(e.getValue(), result -> { ValueStreamIdxColumn col = ValueStreamIdxColumn.BYTES_HYDRATOR.hydrateFromBytes(result.getKey().getColumnName()); Long val = ValueStreamIdxColumnValue.hydrateValue(result.getValue()); return ValueStreamIdxColumnValue.of(col, val); }); transformed.put(row, bv); } return transformed; } @Override public Iterator<Map.Entry<ValueStreamIdxRow, ValueStreamIdxColumnValue>> getRowsColumnRange(Iterable<ValueStreamIdxRow> rows, ColumnRangeSelection columnRangeSelection, int batchHint) { Iterator<Map.Entry<Cell, byte[]>> results = t.getRowsColumnRange(getTableRef(), Persistables.persistAll(rows), columnRangeSelection, batchHint); return Iterators.transform(results, e -> { ValueStreamIdxRow row = ValueStreamIdxRow.BYTES_HYDRATOR.hydrateFromBytes(e.getKey().getRowName()); ValueStreamIdxColumn col = ValueStreamIdxColumn.BYTES_HYDRATOR.hydrateFromBytes(e.getKey().getColumnName()); Long val = ValueStreamIdxColumnValue.hydrateValue(e.getValue()); ValueStreamIdxColumnValue colValue = ValueStreamIdxColumnValue.of(col, val); return Maps.immutableEntry(row, colValue); }); } @Override public Map<ValueStreamIdxRow, Iterator<ValueStreamIdxColumnValue>> getRowsColumnRangeIterator(Iterable<ValueStreamIdxRow> rows, BatchColumnRangeSelection columnRangeSelection) { Map<byte[], Iterator<Map.Entry<Cell, byte[]>>> results = t.getRowsColumnRangeIterator(tableRef, Persistables.persistAll(rows), columnRangeSelection); Map<ValueStreamIdxRow, Iterator<ValueStreamIdxColumnValue>> transformed = Maps.newHashMapWithExpectedSize(results.size()); for (Entry<byte[], Iterator<Map.Entry<Cell, byte[]>>> e : results.entrySet()) { ValueStreamIdxRow row = ValueStreamIdxRow.BYTES_HYDRATOR.hydrateFromBytes(e.getKey()); Iterator<ValueStreamIdxColumnValue> bv = Iterators.transform(e.getValue(), result -> { ValueStreamIdxColumn col = ValueStreamIdxColumn.BYTES_HYDRATOR.hydrateFromBytes(result.getKey().getColumnName()); Long val = ValueStreamIdxColumnValue.hydrateValue(result.getValue()); return ValueStreamIdxColumnValue.of(col, val); }); transformed.put(row, bv); } return transformed; } private ColumnSelection optimizeColumnSelection(ColumnSelection columns) { if (columns.allColumnsSelected()) { return allColumns; } return columns; } public BatchingVisitableView<ValueStreamIdxRowResult> getAllRowsUnordered() { return getAllRowsUnordered(allColumns); } public BatchingVisitableView<ValueStreamIdxRowResult> getAllRowsUnordered(ColumnSelection columns) { return BatchingVisitables.transform(t.getRange(tableRef, RangeRequest.builder() .retainColumns(optimizeColumnSelection(columns)).build()), new Function<RowResult<byte[]>, ValueStreamIdxRowResult>() { @Override public ValueStreamIdxRowResult apply(RowResult<byte[]> input) { return ValueStreamIdxRowResult.of(input); } }); } @Override public List<String> findConstraintFailures(Map<Cell, byte[]> writes, ConstraintCheckingTransaction transaction, AtlasDbConstraintCheckingMode constraintCheckingMode) { return ImmutableList.of(); } @Override public List<String> findConstraintFailuresNoRead(Map<Cell, byte[]> writes, AtlasDbConstraintCheckingMode constraintCheckingMode) { return ImmutableList.of(); } /** * This exists to avoid unused import warnings * {@link AbortingVisitor} * {@link AbortingVisitors} * {@link ArrayListMultimap} * {@link Arrays} * {@link AssertUtils} * {@link AtlasDbConstraintCheckingMode} * {@link AtlasDbDynamicMutablePersistentTable} * {@link AtlasDbMutablePersistentTable} * {@link AtlasDbNamedMutableTable} * {@link AtlasDbNamedPersistentSet} * {@link BatchColumnRangeSelection} * {@link BatchingVisitable} * {@link BatchingVisitableView} * {@link BatchingVisitables} * {@link BiFunction} * {@link Bytes} * {@link Callable} * {@link Cell} * {@link Cells} * {@link Collection} * {@link Collections2} * {@link ColumnRangeSelection} * {@link ColumnRangeSelections} * {@link ColumnSelection} * {@link ColumnValue} * {@link ColumnValues} * {@link ComparisonChain} * {@link Compression} * {@link CompressionUtils} * {@link ConstraintCheckingTransaction} * {@link Descending} * {@link EncodingUtils} * {@link Entry} * {@link EnumSet} * {@link Function} * {@link Generated} * {@link HashMultimap} * {@link HashSet} * {@link Hashing} * {@link Hydrator} * {@link ImmutableGetRangesQuery} * {@link ImmutableList} * {@link ImmutableMap} * {@link ImmutableMultimap} * {@link ImmutableSet} * {@link InvalidProtocolBufferException} * {@link IterableView} * {@link Iterables} * {@link Iterator} * {@link Iterators} * {@link Joiner} * {@link List} * {@link Lists} * {@link Map} * {@link Maps} * {@link MoreObjects} * {@link Multimap} * {@link Multimaps} * {@link NamedColumnValue} * {@link Namespace} * {@link Objects} * {@link Optional} * {@link Persistable} * {@link Persistables} * {@link Prefix} * {@link PtBytes} * {@link RangeRequest} * {@link RowResult} * {@link Set} * {@link Sets} * {@link Sha256Hash} * {@link SortedMap} * {@link Stream} * {@link Supplier} * {@link TableReference} * {@link Throwables} * {@link TimeUnit} * {@link Transaction} * {@link TypedRowResult} * {@link UUID} * {@link UnsignedBytes} * {@link ValueType} */ static String __CLASS_HASH = "k443j7b83kpeasfvaZjY9Q=="; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oodt.cas.filemgr.catalog.solr; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.HttpMethod; import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.httpclient.NameValuePair; import org.apache.commons.httpclient.methods.GetMethod; import org.apache.commons.httpclient.methods.PostMethod; import org.apache.commons.httpclient.methods.StringRequestEntity; import org.apache.oodt.cas.filemgr.structs.ProductType; import org.apache.oodt.cas.filemgr.structs.exceptions.CatalogException; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.concurrent.ConcurrentHashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; /** * Class containing client-side functionality for interacting with a Solr server. * This class uses an {@link HttpClient} for all HTTP communication. * * @author Luca Cinquini * */ public class SolrClient { // base URL of Solr server private String solrUrl; private final Logger LOG = Logger.getLogger(this.getClass().getName()); /** * Constructor initializes the Solr URL * @param url */ public SolrClient(final String url) { solrUrl = url; } /** * Method to send one or more documents to be indexed to Solr. * * @param docs * @param commit * @param mimeType : the mime-type format of the documents * @return * @throws CatalogException */ public String index(List<String> docs, boolean commit, String mimeType) throws CatalogException { try { final String url = this.buildUpdateUrl(); // build message StringBuilder message = new StringBuilder("<add>"); for (String doc : docs) { message.append(doc); } message.append("</add>"); // send POST request LOG.info("Posting message:"+message+" to URL:"+url); String response = doPost(url, message.toString(), mimeType); LOG.info(response); // commit changes ? if (commit) { this.commit(); } LOG.info(response); return response; } catch(Exception e) { LOG.log(Level.SEVERE, e.getMessage()); throw new CatalogException(e.getMessage()); } } /** * Method to send a message containing a 'delete' instruction to Solr. * @param id * @param commit * @return * @throws CatalogException */ public String delete(String id, boolean commit) throws CatalogException { try { // build POST request String url = this.buildUpdateUrl(); if (commit) { url += "?commit=true"; } String message = "<delete><query>id:"+id+"</query></delete>"; // send POST request LOG.info("Posting message:"+message+" to URL:"+url); return doPost(url, message, Parameters.MIME_TYPE_XML); } catch(Exception e) { LOG.log(Level.SEVERE, e.getMessage()); throw new CatalogException(e.getMessage()); } } /** * Method to query the Solr index for a product with the specified id. * @param id * @return */ public String queryProductById(String id, String mimeType) throws CatalogException { ConcurrentHashMap<String, String[]> params = new ConcurrentHashMap<String, String[]>(); params.put("q", new String[]{Parameters.PRODUCT_ID+":"+id} ); return query(params, mimeType); } /** * Method to query the Solr index for a product with the specified name. * @param name * @param mimeType * @return */ public String queryProductByName(String name, String mimeType) throws CatalogException { ConcurrentHashMap<String, String[]> params = new ConcurrentHashMap<String, String[]>(); params.put("q", new String[]{Parameters.PRODUCT_NAME+":"+name} ); return query(params, mimeType); } /** * Method to query Solr for the most recent 'n' products. * @param n * @return * @throws CatalogException */ public String queryProductsByDate(int n, String mimeType) throws CatalogException { ConcurrentHashMap<String, String[]> params = new ConcurrentHashMap<String, String[]>(); params.put("q", new String[]{ "*:*"} ); params.put("rows", new String[]{ ""+n} ); params.put("sort", new String[]{ Parameters.PRODUCT_RECEIVED_TIME+" desc"} ); return query(params, mimeType); } /** * Method to query Solr for the most recent 'n' products of a specified type. * @param n * @return * @throws CatalogException */ public String queryProductsByDateAndType(int n, ProductType type, String mimeType) throws CatalogException { ConcurrentHashMap<String, String[]> params = new ConcurrentHashMap<String, String[]>(); params.put("q", new String[]{ Parameters.PRODUCT_TYPE_NAME+type.getName() } ); params.put("rows", new String[]{ ""+n} ); params.put("sort", new String[]{ Parameters.PRODUCT_RECEIVED_TIME+" desc"} ); return query(params, mimeType); } /** * Method to commit the current changes to the Solr index. * @throws Exception */ public void commit() throws IOException, CatalogException { String message = "<commit waitSearcher=\"true\"/>"; String url = this.buildUpdateUrl(); doPost(url, message, Parameters.MIME_TYPE_XML); } /** * Method to send a generic query to the Solr server. * * @param parameters * @param mimeType : the desired mime type for the results (XML, JSON, ...) * @return */ public String query(Map<String, String[]> parameters, String mimeType) throws CatalogException { try { // build HTTP request String url = this.buildSelectUrl(); // execute request return this.doGet(url, parameters, mimeType); } catch(Exception e) { LOG.log(Level.SEVERE, e.getMessage()); throw new CatalogException(e.getMessage()); } } /** * Method to execute a GET request to the given URL with the given parameters. * @param url * @param parameters * @return */ private String doGet(String url, Map<String, String[]> parameters, String mimeType) throws IOException, CatalogException { // build HTTP/GET request GetMethod method = new GetMethod(url); List<NameValuePair> nvps = new ArrayList<NameValuePair>(); for (Map.Entry<String, String[]> key : parameters.entrySet()) { for (String value : key.getValue()) { nvps.add(new NameValuePair(key.getKey(), value)); } } // request results in JSON format if (mimeType.equals(Parameters.MIME_TYPE_JSON)) { nvps.add(new NameValuePair("wt", "json")); } method.setQueryString( nvps.toArray( new NameValuePair[nvps.size()] ) ); LOG.info("GET url: "+url+" query string: "+method.getQueryString()); // send HTTP/GET request, return response return doHttp(method); } /** * Method to execute a POST request to the given URL. * @param url * @param document * @return */ private String doPost(String url, String document, String mimeType) throws IOException, CatalogException { // build HTTP/POST request PostMethod method = new PostMethod(url); StringRequestEntity requestEntity = new StringRequestEntity(document, mimeType, "UTF-8"); method.setRequestEntity(requestEntity); // send HTTP/POST request, return response return doHttp(method); } /** * Common functionality for HTTP GET and POST requests. * @param method * @return * @throws Exception */ private String doHttp(HttpMethod method) throws IOException, CatalogException { StringBuilder response = new StringBuilder(); BufferedReader br = null; try { // send request HttpClient httpClient = new HttpClient(); // OODT-719 Prevent httpclient from spawning closewait tcp connections method.setRequestHeader("Connection", "close"); int statusCode = httpClient.executeMethod(method); // read response if (statusCode != HttpStatus.SC_OK) { // still consume the response method.getResponseBodyAsString(); throw new CatalogException("HTTP method failed: " + method.getStatusLine()); } else { // read the response body. br = new BufferedReader(new InputStreamReader(method.getResponseBodyAsStream())); String readLine; while(((readLine = br.readLine()) != null)) { response.append(readLine); } } } finally { // must release the connection even if an exception occurred method.releaseConnection(); if (br!=null) { try { br.close(); } catch (Exception ignored) { } } } return response.toString(); } /** * Builds the URL used to update the Solr index. * * Example: http://localhost:8983/solr/update? * @return */ private String buildUpdateUrl() { return solrUrl + (solrUrl.endsWith("/") ? "" : "/") + "update"; } /** * Builds the URL used to query the Solr index. * * Example: http://localhost:8983/solr/select? * @return */ private String buildSelectUrl() { return solrUrl + (solrUrl.endsWith("/") ? "" : "/") + "select"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.state; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.state.State; import org.apache.flink.api.common.state.StateDescriptor; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.core.fs.CloseableRegistry; import org.apache.flink.runtime.checkpoint.CheckpointOptions; import org.apache.flink.runtime.query.TaskKvStateRegistry; import org.apache.flink.runtime.state.internal.InternalKvState; import org.apache.flink.runtime.state.ttl.TtlStateFactory; import org.apache.flink.runtime.state.ttl.TtlTimeProvider; import org.apache.flink.util.IOUtils; import org.apache.flink.util.Preconditions; import java.io.Closeable; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.stream.Stream; import static org.apache.flink.util.Preconditions.checkNotNull; /** * Base implementation of KeyedStateBackend. The state can be checkpointed * to streams using {@link #snapshot(long, long, CheckpointStreamFactory, CheckpointOptions)}. * * @param <K> Type of the key by which state is keyed. */ public abstract class AbstractKeyedStateBackend<K> implements KeyedStateBackend<K>, Snapshotable<SnapshotResult<KeyedStateHandle>, Collection<KeyedStateHandle>>, Closeable, CheckpointListener { /** {@link TypeSerializer} for our key. */ protected final TypeSerializer<K> keySerializer; /** The currently active key. */ private K currentKey; /** The key group of the currently active key. */ private int currentKeyGroup; /** So that we can give out state when the user uses the same key. */ private final HashMap<String, InternalKvState<K, ?, ?>> keyValueStatesByName; /** For caching the last accessed partitioned state. */ private String lastName; @SuppressWarnings("rawtypes") private InternalKvState lastState; /** The number of key-groups aka max parallelism. */ protected final int numberOfKeyGroups; /** Range of key-groups for which this backend is responsible. */ protected final KeyGroupRange keyGroupRange; /** KvStateRegistry helper for this task. */ protected final TaskKvStateRegistry kvStateRegistry; /** Registry for all opened streams, so they can be closed if the task using this backend is closed. */ protected CloseableRegistry cancelStreamRegistry; protected final ClassLoader userCodeClassLoader; private final ExecutionConfig executionConfig; private final TtlTimeProvider ttlTimeProvider; /** Decorates the input and output streams to write key-groups compressed. */ protected final StreamCompressionDecorator keyGroupCompressionDecorator; public AbstractKeyedStateBackend( TaskKvStateRegistry kvStateRegistry, TypeSerializer<K> keySerializer, ClassLoader userCodeClassLoader, int numberOfKeyGroups, KeyGroupRange keyGroupRange, ExecutionConfig executionConfig, TtlTimeProvider ttlTimeProvider) { Preconditions.checkArgument(numberOfKeyGroups >= 1, "NumberOfKeyGroups must be a positive number"); Preconditions.checkArgument(numberOfKeyGroups >= keyGroupRange.getNumberOfKeyGroups(), "The total number of key groups must be at least the number in the key group range assigned to this backend"); this.kvStateRegistry = kvStateRegistry; this.keySerializer = Preconditions.checkNotNull(keySerializer); this.numberOfKeyGroups = numberOfKeyGroups; this.userCodeClassLoader = Preconditions.checkNotNull(userCodeClassLoader); this.keyGroupRange = Preconditions.checkNotNull(keyGroupRange); this.cancelStreamRegistry = new CloseableRegistry(); this.keyValueStatesByName = new HashMap<>(); this.executionConfig = executionConfig; this.keyGroupCompressionDecorator = determineStreamCompression(executionConfig); this.ttlTimeProvider = Preconditions.checkNotNull(ttlTimeProvider); } private StreamCompressionDecorator determineStreamCompression(ExecutionConfig executionConfig) { if (executionConfig != null && executionConfig.isUseSnapshotCompression()) { return SnappyStreamCompressionDecorator.INSTANCE; } else { return UncompressedStreamCompressionDecorator.INSTANCE; } } /** * Closes the state backend, releasing all internal resources, but does not delete any persistent * checkpoint data. * */ @Override public void dispose() { IOUtils.closeQuietly(cancelStreamRegistry); if (kvStateRegistry != null) { kvStateRegistry.unregisterAll(); } lastName = null; lastState = null; keyValueStatesByName.clear(); } /** * @see KeyedStateBackend */ @Override public void setCurrentKey(K newKey) { this.currentKey = newKey; this.currentKeyGroup = KeyGroupRangeAssignment.assignToKeyGroup(newKey, numberOfKeyGroups); } /** * @see KeyedStateBackend */ @Override public TypeSerializer<K> getKeySerializer() { return keySerializer; } /** * @see KeyedStateBackend */ @Override public K getCurrentKey() { return currentKey; } /** * @see KeyedStateBackend */ @Override public int getCurrentKeyGroupIndex() { return currentKeyGroup; } /** * @see KeyedStateBackend */ @Override public int getNumberOfKeyGroups() { return numberOfKeyGroups; } /** * @see KeyedStateBackend */ @Override public KeyGroupRange getKeyGroupRange() { return keyGroupRange; } /** * @see KeyedStateBackend */ @Override public <N, S extends State, T> void applyToAllKeys( final N namespace, final TypeSerializer<N> namespaceSerializer, final StateDescriptor<S, T> stateDescriptor, final KeyedStateFunction<K, S> function) throws Exception { try (Stream<K> keyStream = getKeys(stateDescriptor.getName(), namespace)) { final S state = getPartitionedState( namespace, namespaceSerializer, stateDescriptor); keyStream.forEach((K key) -> { setCurrentKey(key); try { function.process(key, state); } catch (Throwable e) { // we wrap the checked exception in an unchecked // one and catch it (and re-throw it) later. throw new RuntimeException(e); } }); } } /** * @see KeyedStateBackend */ @Override @SuppressWarnings("unchecked") public <N, S extends State, V> S getOrCreateKeyedState( final TypeSerializer<N> namespaceSerializer, StateDescriptor<S, V> stateDescriptor) throws Exception { checkNotNull(namespaceSerializer, "Namespace serializer"); checkNotNull(keySerializer, "State key serializer has not been configured in the config. " + "This operation cannot use partitioned state."); InternalKvState<K, ?, ?> kvState = keyValueStatesByName.get(stateDescriptor.getName()); if (kvState == null) { if (!stateDescriptor.isSerializerInitialized()) { stateDescriptor.initializeSerializerUnlessSet(executionConfig); } kvState = TtlStateFactory.createStateAndWrapWithTtlIfEnabled( namespaceSerializer, stateDescriptor, this, ttlTimeProvider); keyValueStatesByName.put(stateDescriptor.getName(), kvState); publishQueryableStateIfEnabled(stateDescriptor, kvState); } return (S) kvState; } private void publishQueryableStateIfEnabled( StateDescriptor<?, ?> stateDescriptor, InternalKvState<?, ?, ?> kvState) { if (stateDescriptor.isQueryable()) { if (kvStateRegistry == null) { throw new IllegalStateException("State backend has not been initialized for job."); } String name = stateDescriptor.getQueryableStateName(); kvStateRegistry.registerKvState(keyGroupRange, name, kvState); } } /** * TODO: NOTE: This method does a lot of work caching / retrieving states just to update the namespace. * This method should be removed for the sake of namespaces being lazily fetched from the keyed * state backend, or being set on the state directly. * * @see KeyedStateBackend */ @SuppressWarnings("unchecked") @Override public <N, S extends State> S getPartitionedState( final N namespace, final TypeSerializer<N> namespaceSerializer, final StateDescriptor<S, ?> stateDescriptor) throws Exception { checkNotNull(namespace, "Namespace"); if (lastName != null && lastName.equals(stateDescriptor.getName())) { lastState.setCurrentNamespace(namespace); return (S) lastState; } InternalKvState<K, ?, ?> previous = keyValueStatesByName.get(stateDescriptor.getName()); if (previous != null) { lastState = previous; lastState.setCurrentNamespace(namespace); lastName = stateDescriptor.getName(); return (S) previous; } final S state = getOrCreateKeyedState(namespaceSerializer, stateDescriptor); final InternalKvState<K, N, ?> kvState = (InternalKvState<K, N, ?>) state; lastName = stateDescriptor.getName(); lastState = kvState; kvState.setCurrentNamespace(namespace); return state; } @Override public void close() throws IOException { cancelStreamRegistry.close(); } @VisibleForTesting public boolean supportsAsynchronousSnapshots() { return false; } @VisibleForTesting StreamCompressionDecorator getKeyGroupCompressionDecorator() { return keyGroupCompressionDecorator; } /** * Returns the total number of state entries across all keys/namespaces. */ @VisibleForTesting public abstract int numKeyValueStateEntries(); // TODO remove this once heap-based timers are working with RocksDB incremental snapshots! public boolean requiresLegacySynchronousTimerSnapshots() { return false; } }
/******************************************************************************* * Copyright (c) 2005, 2008 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation * Anton Leherbauer (Wind River Systems) *******************************************************************************/ /* * Created on Jun 17, 2003 * by bnicolle */ package org.eclipse.cdt.core.model.tests; import java.util.ArrayList; import java.util.List; import junit.framework.Test; import junit.framework.TestSuite; import org.eclipse.cdt.core.model.CModelException; import org.eclipse.cdt.core.model.ICElement; import org.eclipse.cdt.core.model.IStructure; import org.eclipse.cdt.core.model.ITemplate; import org.eclipse.cdt.core.model.ITranslationUnit; import org.eclipse.cdt.core.model.IVariable; /** * Class for testing ITemplate interface * @author bnicolle * */ public class ITemplateTests extends IntegratedCModelTest { /** * @param name */ public ITemplateTests(String name) { super(name); } /* (non-Javadoc) * @see org.eclipse.cdt.core.model.tests.IntegratedCModelTest#getSourcefileSubdir() */ @Override public String getSourcefileSubdir() { return "resources/cmodel/"; } /* (non-Javadoc) * @see org.eclipse.cdt.core.model.tests.IntegratedCModelTest#getSourcefileResource() */ @Override public String getSourcefileResource() { return "ITemplate.cpp"; } /** * @returns a test suite named after this class * containing all its public members named "test*" */ public static Test suite() { TestSuite suite= new TestSuite( IStructureTests.class.getName() ); // Interface tests: suite.addTest( new ITemplateTests("testGetChildrenOfTypeTemplate")); suite.addTest( new ITemplateTests("testGetNumberOfTemplateParameters")); suite.addTest( new ITemplateTests("testGetTemplateParameterTypes")); suite.addTest( new ITemplateTests("testGetTemplateSignature")); // Language Specification tests: // TBD. return suite; } public List getTemplateMethods(ITranslationUnit tu) throws CModelException { IStructure myElem = null; try { myElem = (IStructure) tu.getElement("TemplateContainer"); } catch( CModelException c ) { assertNotNull( c ); } assertNotNull(myElem); List list = myElem.getChildrenOfType(ICElement.C_TEMPLATE_METHOD_DECLARATION); list.addAll((myElem.getChildrenOfType(ICElement.C_TEMPLATE_METHOD))); return list; } public void testGetChildrenOfTypeTemplate() throws CModelException { ITranslationUnit tu = getTU(); { List arrayElements = tu.getChildrenOfType(ICElement.C_TEMPLATE_STRUCT); String[] myExpectedValues = { "Map" }; assertEquals(myExpectedValues.length, arrayElements.size()); for(int i=0; i<myExpectedValues.length; i++) { ICElement celement = (ICElement) arrayElements.get(i); ITemplate myITemplate = (ITemplate)celement; assertNotNull( "Failed on "+i, myITemplate); assertEquals("Failed on "+i, myExpectedValues[i], celement.getElementName()); } } { List arrayElements = tu.getChildrenOfType(ICElement.C_TEMPLATE_CLASS); String[] myExpectedValues = { "nonVector" }; assertEquals(myExpectedValues.length, arrayElements.size()); for(int i=0; i<myExpectedValues.length; i++) { ICElement celement = (ICElement) arrayElements.get(i); ITemplate myITemplate = (ITemplate)celement; assertNotNull( "Failed on "+i, myITemplate); assertEquals("Failed on "+i, myExpectedValues[i], celement.getElementName()); } } { List arrayElements = tu.getChildrenOfType(ICElement.C_TEMPLATE_UNION); String[] myExpectedValues = { "ArrayOverlay" }; assertEquals(myExpectedValues.length, arrayElements.size()); for(int i=0; i<myExpectedValues.length; i++) { ICElement celement = (ICElement) arrayElements.get(i); ITemplate myITemplate = (ITemplate)celement; assertNotNull( "Failed on "+i, myITemplate); assertEquals("Failed on "+i, myExpectedValues[i], celement.getElementName()); } } { // Method from the TemplateContainer List arrayElements = getTemplateMethods(tu); String[] myExpectedValues = { "fum", "scrum", }; assertEquals(myExpectedValues.length, arrayElements.size()); // This test is no correct there is no guaranty on the order // for this particular case for(int i=0; i<myExpectedValues.length; i++) { ICElement celement = (ICElement) arrayElements.get(i); ITemplate myITemplate = (ITemplate)celement; assertNotNull( "Failed on "+i, myITemplate); assertEquals("Failed on "+i, myExpectedValues[i], celement.getElementName()); } } { // Check the template function List arrayElements = tu.getChildrenOfType(ICElement.C_TEMPLATE_FUNCTION); // actually, none of the two are function templates, but method templates String[] myExpectedValues = { // "nonVector<T>::first", // "Foo::fum", }; assertEquals(myExpectedValues.length, arrayElements.size()); // This test is no correct there is no guaranty on the order // for this particular case for(int i=0; i<myExpectedValues.length; i++) { ICElement celement = (ICElement) arrayElements.get(i); ITemplate myITemplate = (ITemplate)celement; assertNotNull( "Failed on "+i, myITemplate); assertEquals("Failed on "+i, myExpectedValues[i], celement.getElementName()); } } { // Check the template method List arrayElements = tu.getChildrenOfType(ICElement.C_TEMPLATE_METHOD); String[] myExpectedValues = { "nonVector<T>::first", "Foo::fum", }; assertEquals(myExpectedValues.length, arrayElements.size()); // This test is no correct there is no guaranty on the order // for this particular case for(int i=0; i<myExpectedValues.length; i++) { ICElement celement = (ICElement) arrayElements.get(i); ITemplate myITemplate = (ITemplate)celement; assertNotNull( "Failed on "+i, myITemplate); assertEquals("Failed on "+i, myExpectedValues[i], celement.getElementName()); } } { // Template function declation List arrayElements = tu.getChildrenOfType(ICElement.C_TEMPLATE_FUNCTION_DECLARATION); String[] myExpectedValues = { "IsGreaterThan" }; assertEquals(myExpectedValues.length, arrayElements.size()); // This test is no correct there is no guaranty on the order // for this particular case for(int i=0; i<myExpectedValues.length; i++) { ICElement celement = (ICElement) arrayElements.get(i); ITemplate myITemplate = (ITemplate)celement; assertNotNull( "Failed on "+i, myITemplate); assertEquals("Failed on "+i, myExpectedValues[i], celement.getElementName()); } } { // // Methods and Functions are tested together as // // Function declarations in Quick Parse mode // // are considered Method Declarations in Structural parse mode // List arrayElements = getTemplateMethods(tu); // arrayElements.addAll(tu.getChildrenOfType(ICElement.C_TEMPLATE_FUNCTION)); // arrayElements.addAll(tu.getChildrenOfType(ICElement.C_TEMPLATE_FUNCTION_DECLARATION)); // String[] myExpectedValues = { // "fum", // "scrum", // "nonVector<T>::first", // "Foo::fum", // "IsGreaterThan" // }; // assertEquals(myExpectedValues.length, arrayElements.size()); // // This test is no correct there is no guaranty on the order // // for this particular case // for(int i=0; i<myExpectedValues.length; i++) { // ICElement celement = (ICElement) arrayElements.get(i); // ITemplate myITemplate = (ITemplate)celement; // assertNotNull( "Failed on "+i, myITemplate); // assertEquals("Failed on "+i, myExpectedValues[i], celement.getElementName()); // } } { List arrayElements = tu.getChildrenOfType(ICElement.C_TEMPLATE_VARIABLE); String[] myExpectedValues = { "default_alloc_template<threads,inst>::S_start_free" }; assertEquals(myExpectedValues.length, arrayElements.size()); for(int i=0; i<myExpectedValues.length; i++) { IVariable myITemplate = (IVariable) arrayElements.get(i); assertNotNull( "Failed on "+i, myITemplate); assertEquals("Failed on "+i, myExpectedValues[i], myITemplate.getElementName()); } } } public void testGetNumberOfTemplateParameters() throws CModelException { ITranslationUnit tu = getTU(); ArrayList arrayElements = new ArrayList(); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_STRUCT ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_CLASS ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_UNION ) ); arrayElements.addAll( getTemplateMethods(tu) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_FUNCTION ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_METHOD ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_FUNCTION_DECLARATION ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_VARIABLE ) ); int[] myExpectedNumbers = { 3,1,3,1,1,1,1,1,2 }; assertEquals(myExpectedNumbers.length, arrayElements.size()); for(int i=0; i<myExpectedNumbers.length; i++) { ITemplate myTemplate = (ITemplate) arrayElements.get(i); assertNotNull( "Failed on "+i, myTemplate ); assertEquals( "Failed on "+i, myExpectedNumbers[i], myTemplate.getNumberOfTemplateParameters()); } } public void testGetTemplateParameterTypes() throws CModelException { ITranslationUnit tu = getTU(); ArrayList arrayElements = new ArrayList(); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_STRUCT ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_CLASS ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_UNION ) ); arrayElements.addAll( getTemplateMethods(tu) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_METHOD ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_FUNCTION ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_FUNCTION_DECLARATION ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_VARIABLE ) ); String[][] myExpectedValues = { //"Map" {"Key", "Value", "SortAlgorithm"}, //"nonVector" {"T"}, //"ArrayOverlay" {"X","Y","int=16"}, //"TemplateContainer::fum" {"Bar"}, //"TemplateParameter::scrum" {"int"}, //"nonVector::first" {"T"}, //"Foo::fum" {"Bar"}, //"IsGreaterThan" {"X"}, //"default_alloc_template::S_start_free" {"bool", "int"}, }; assertEquals(myExpectedValues.length, arrayElements.size()); for(int i=0; i<myExpectedValues.length; i++) { ITemplate myTemplate = (ITemplate) arrayElements.get(i); assertNotNull( "Failed on "+i, myTemplate ); String[] myExpectedParams = myExpectedValues[i]; String[] myParams = myTemplate.getTemplateParameterTypes(); assertEquals( "Failed on "+i, myExpectedParams.length, myParams.length ); for(int j=0; j<myExpectedParams.length; j++) { assertEquals( "Failed on "+i+","+j, myExpectedParams[j], myParams[j] ); } } } public void testGetTemplateSignature() throws CModelException { ITranslationUnit tu = getTU(); ArrayList arrayElements = new ArrayList(); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_STRUCT ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_CLASS ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_UNION ) ); arrayElements.addAll( getTemplateMethods(tu) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_METHOD ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_FUNCTION ) ); arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_FUNCTION_DECLARATION ) ); // TEMPLATE_VARIABLE moved to failed tests arrayElements.addAll( tu.getChildrenOfType(ICElement.C_TEMPLATE_VARIABLE ) ); String[] myExpectedValues = { "Map<Key, Value, SortAlgorithm>", "nonVector<T>", "ArrayOverlay<X, Y, int=16>", "fum<Bar>(int) : void", "scrum<int>(void) : void", // TODO: deduce the rules of () versus (void), compare below. "nonVector<T>::first<T>() const : const T&", // TODO: where should <T> be? "Foo::fum<Bar>(int) : void", // TODO: shouldn't signature indicate const function as well? "IsGreaterThan<X>(X, X) : bool", "default_alloc_template<threads,inst>::S_start_free<bool, int> : char*", }; assertEquals(myExpectedValues.length, arrayElements.size()); for(int i=0; i<myExpectedValues.length; i++) { ITemplate myTemplate = (ITemplate) arrayElements.get(i); assertNotNull( "Failed on "+i, myTemplate ); assertEquals( "Failed on "+i, myExpectedValues[i], myTemplate.getTemplateSignature() ); } } }
package com.wrapper.spotify.model_objects.specification; import java.util.Arrays; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.wrapper.spotify.enums.ModelObjectType; import com.wrapper.spotify.model_objects.AbstractModelObject; import com.wrapper.spotify.requests.data.personalization.interfaces.IArtistTrackModelObject; import com.wrapper.spotify.requests.data.search.interfaces.ISearchModelObject; /** * Retrieve information about <a href="https://developer.spotify.com/web-api/object-model/#artist-object-full"> * Artist objects</a> by building instances from this class. */ @JsonDeserialize(builder = Artist.Builder.class) public class Artist extends AbstractModelObject implements IArtistTrackModelObject, ISearchModelObject { private final ExternalUrl externalUrls; private final Followers followers; private final String[] genres; private final String href; private final String id; private final Image[] images; private final String name; private final Integer popularity; private final ModelObjectType type; private final String uri; private Artist(final Builder builder) { super(builder); this.externalUrls = builder.externalUrls; this.followers = builder.followers; this.genres = builder.genres; this.href = builder.href; this.id = builder.id; this.images = builder.images; this.name = builder.name; this.popularity = builder.popularity; this.type = builder.type; this.uri = builder.uri; } /** * Get the external URLs of the artist. <br> * Example: <a href="https://developer.spotify.com/web-api/user-guide/#spotify-uris-and-ids">Spotify-URL</a> * * @return An {@link ExternalUrl} object. */ public ExternalUrl getExternalUrls() { return externalUrls; } /** * Get information about the followers of the artist. <br> * Example: Follower count. * * @return A {@link Followers} object. */ public Followers getFollowers() { return followers; } /** * Get a list of all genres of the artist. <br> * A great amount of artists may contain no information about their genres. * * @return An array of genre names. */ public String[] getGenres() { return genres; } /** * Get the full Spotify Web API endpoint URL of the artist. * * @return A Spotify Web API endpoint URL. */ public String getHref() { return href; } /** * Get the Spotify ID of the artist. * * @return A <a href="https://developer.spotify.com/web-api/user-guide/#spotify-uris-and-ids">Spotify artist ID</a>. */ public String getId() { return id; } /** * Get all images of the artist (like header image) in different sizes. * * @return An array of {@link Image} objects. */ public Image[] getImages() { return images; } /** * Get the name of the artist. * * @return Artist name. */ public String getName() { return name; } /** * Get the popularity of the artist in a range between 0 and 100. (higher = more popular)<br> * The popularity of the artist is based on the popularity of its tracks. * * @return The popularity of the artist. */ public Integer getPopularity() { return popularity; } /** * Get the model object type. In this case "artist". * * @return A {@link ModelObjectType}. */ public ModelObjectType getType() { return type; } /** * Get the Spotify URI of the artist. * * @return <a href="https://developer.spotify.com/web-api/user-guide/#spotify-uris-and-ids">Spotify artist URI</a>. */ public String getUri() { return uri; } @Override public String toString() { return "Artist(name=" + name + ", externalUrls=" + externalUrls + ", followers=" + followers + ", genres=" + Arrays.toString(genres) + ", href=" + href + ", id=" + id + ", images=" + Arrays.toString(images) + ", popularity=" + popularity + ", type=" + type + ", uri=" + uri + ")"; } @Override public Builder builder() { return new Builder(); } /** * Builder class for building {@link Artist} instances. */ public static final class Builder extends AbstractModelObject.Builder { private ExternalUrl externalUrls; private Followers followers; private String[] genres; private String href; private String id; private Image[] images; private String name; private Integer popularity; private ModelObjectType type; private String uri; /** * Set external URLs of the artist to be built. * * @param externalUrls {@link ExternalUrl} object. * @return A {@link Artist.Builder}. */ public Builder setExternalUrls(ExternalUrl externalUrls) { this.externalUrls = externalUrls; return this; } /** * Set the followers object of the artist to be built. * * @param followers A {@link Followers} object. * @return A {@link Artist.Builder}. */ public Builder setFollowers(Followers followers) { this.followers = followers; return this; } /** * Set the genres of the artist to be built. * * @param genres Genre names. * @return A {@link Artist.Builder}. */ public Builder setGenres(String... genres) { this.genres = genres; return this; } /** * Set href of Spotify Web API endpoint of the artist to be built. * * @param href Spotify Web API endpoint URL. * @return A {@link Artist.Builder}. */ public Builder setHref(String href) { this.href = href; return this; } /** * Set artist ID of the artist to be built. * * @param id <a href="https://developer.spotify.com/web-api/user-guide/#spotify-uris-and-ids">Spotify artist ID</a>. * @return A {@link Artist.Builder}. */ public Builder setId(String id) { this.id = id; return this; } /** * Set the images of the artist to be built, like the header image. * * @param images {@link Image} objects. * @return A {@link Artist.Builder}. */ public Builder setImages(Image... images) { this.images = images; return this; } /** * Set the name of the artist to be built. * * @param name The artist name. * @return A {@link Artist.Builder}. */ public Builder setName(String name) { this.name = name; return this; } /** * Set the popularity of the artist to be built. * * @param popularity The popularity of the artist between 0 and 100. * @return A {@link Artist.Builder}. */ public Builder setPopularity(Integer popularity) { this.popularity = popularity; return this; } /** * Set the type of the model object. In this case "artist". * * @param type The {@link ModelObjectType}. * @return A {@link Artist.Builder}. */ public Builder setType(ModelObjectType type) { this.type = type; return this; } /** * Set the Spotify artist URI of the artist to be built. * * @param uri <a href="https://developer.spotify.com/web-api/user-guide/#spotify-uris-and-ids"> * Spotify artist URI</a>. * @return A {@link Artist.Builder}. */ public Builder setUri(String uri) { this.uri = uri; return this; } @Override public Artist build() { return new Artist(this); } } /** * JsonUtil class for building {@link Artist} instances. */ public static final class JsonUtil extends AbstractModelObject.JsonUtil<Artist> { public Artist createModelObject(JsonObject jsonObject) { if (jsonObject == null || jsonObject.isJsonNull()) { return null; } return new Artist.Builder() .setExternalUrls( hasAndNotNull(jsonObject, "external_urls") ? new ExternalUrl.JsonUtil().createModelObject( jsonObject.getAsJsonObject("external_urls")) : null) .setFollowers( hasAndNotNull(jsonObject, "followers") ? new Followers.JsonUtil().createModelObject( jsonObject.getAsJsonObject("followers")) : null) .setGenres( hasAndNotNull(jsonObject, "genres") ? new Gson().fromJson( jsonObject.getAsJsonArray("genres"), String[].class) : null) .setHref( hasAndNotNull(jsonObject, "href") ? jsonObject.get("href").getAsString() : null) .setId( hasAndNotNull(jsonObject, "id") ? jsonObject.get("id").getAsString() : null) .setImages( hasAndNotNull(jsonObject, "images") ? new Image.JsonUtil().createModelObjectArray( jsonObject.getAsJsonArray("images")) : null) .setName( hasAndNotNull(jsonObject, "name") ? jsonObject.get("name").getAsString() : null) .setPopularity( hasAndNotNull(jsonObject, "popularity") ? jsonObject.get("popularity").getAsInt() : null) .setType( hasAndNotNull(jsonObject, "type") ? ModelObjectType.keyOf( jsonObject.get("type").getAsString().toLowerCase()) : null) .setUri( hasAndNotNull(jsonObject, "uri") ? jsonObject.get("uri").getAsString() : null) .build(); } } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.ofbiz.minilang.method.callops; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.ofbiz.base.location.FlexibleLocation; import org.ofbiz.base.util.Debug; import org.ofbiz.base.util.UtilValidate; import org.ofbiz.base.util.UtilXml; import org.ofbiz.base.util.collections.FlexibleMapAccessor; import org.ofbiz.minilang.MiniLangException; import org.ofbiz.minilang.MiniLangRuntimeException; import org.ofbiz.minilang.MiniLangValidate; import org.ofbiz.minilang.SimpleMethod; import org.ofbiz.minilang.ValidationException; import org.ofbiz.minilang.artifact.ArtifactInfoContext; import org.ofbiz.minilang.method.MethodContext; import org.ofbiz.minilang.method.MethodOperation; import org.w3c.dom.Element; /** * Implements the &lt;call-simple-method&gt; element. * * @see <a href="https://cwiki.apache.org/OFBADMIN/mini-language-reference.html#Mini-languageReference-{{%3Ccallsimplemethod%3E}}">Mini-language Reference</a> */ public final class CallSimpleMethod extends MethodOperation { public static final String module = CallSimpleMethod.class.getName(); private final String methodName; private final String xmlResource; private final URL xmlURL; private final String scope; private final List<ResultToField> resultToFieldList; public CallSimpleMethod(Element element, SimpleMethod simpleMethod) throws MiniLangException { super(element, simpleMethod); if (MiniLangValidate.validationOn()) { MiniLangValidate.attributeNames(simpleMethod, element, "method-name", "xml-resource", "scope"); MiniLangValidate.requiredAttributes(simpleMethod, element, "method-name"); MiniLangValidate.constantAttributes(simpleMethod, element, "method-name", "xml-resource", "scope"); MiniLangValidate.childElements(simpleMethod, element, "result-to-field"); } this.methodName = element.getAttribute("method-name"); String xmlResourceAttribute = element.getAttribute("xml-resource"); if (xmlResourceAttribute.isEmpty()) { xmlResourceAttribute = simpleMethod.getFromLocation(); } this.xmlResource = xmlResourceAttribute; URL xmlURL = null; try { xmlURL = FlexibleLocation.resolveLocation(this.xmlResource); } catch (MalformedURLException e) { MiniLangValidate.handleError("Could not find SimpleMethod XML document in resource: " + this.xmlResource + "; error was: " + e.toString(), simpleMethod, element); } this.xmlURL = xmlURL; this.scope = element.getAttribute("scope"); List<? extends Element> resultToFieldElements = UtilXml.childElementList(element, "result-to-field"); if (UtilValidate.isNotEmpty(resultToFieldElements)) { if (!"function".equals(this.scope)) { MiniLangValidate.handleError("Inline scope cannot include <result-to-field> elements.", simpleMethod, element); } List<ResultToField> resultToFieldList = new ArrayList<ResultToField>(resultToFieldElements.size()); for (Element resultToFieldElement : resultToFieldElements) { resultToFieldList.add(new ResultToField(resultToFieldElement, simpleMethod)); } this.resultToFieldList = resultToFieldList; } else { this.resultToFieldList = null; } } @Override public boolean exec(MethodContext methodContext) throws MiniLangException { if (UtilValidate.isEmpty(this.methodName)) { throw new MiniLangRuntimeException("method-name attribute is empty", this); } SimpleMethod simpleMethodToCall = SimpleMethod.getSimpleMethod(this.xmlURL, this.methodName); if (simpleMethodToCall == null) { throw new MiniLangRuntimeException("Could not find <simple-method name=\"" + this.methodName + "\"> in XML document " + this.xmlResource, this); } MethodContext localContext = methodContext; if ("function".equals(this.scope)) { Map<String, Object> localEnv = new HashMap<String, Object>(); localEnv.putAll(methodContext.getEnvMap()); localEnv.remove(this.simpleMethod.getEventResponseCodeName()); localEnv.remove(this.simpleMethod.getServiceResponseMessageName()); localContext = new MethodContext(localEnv, methodContext.getLoader(), methodContext.getMethodType()); } String returnVal = simpleMethodToCall.exec(localContext); if (Debug.verboseOn()) Debug.logVerbose("Called simple-method named [" + this.methodName + "] in resource [" + this.xmlResource + "], returnVal is [" + returnVal + "]", module); if (simpleMethodToCall.getDefaultErrorCode().equals(returnVal)) { if (methodContext.getMethodType() == MethodContext.EVENT) { methodContext.putEnv(simpleMethod.getEventResponseCodeName(), simpleMethod.getDefaultErrorCode()); } else if (methodContext.getMethodType() == MethodContext.SERVICE) { methodContext.putEnv(simpleMethod.getServiceResponseMessageName(), simpleMethod.getDefaultErrorCode()); } return false; } if (methodContext.getMethodType() == MethodContext.EVENT) { // FIXME: This doesn't make sense. We are comparing the called method's response code with this method's // response code. Since response codes are configurable per method, this code will fail. String responseCode = (String) localContext.getEnv(this.simpleMethod.getEventResponseCodeName()); if (this.simpleMethod.getDefaultErrorCode().equals(responseCode)) { Debug.logWarning("Got error [" + responseCode + "] calling inline simple-method named [" + this.methodName + "] in resource [" + this.xmlResource + "], message is " + methodContext.getEnv(this.simpleMethod.getEventErrorMessageName()), module); return false; } } else if (methodContext.getMethodType() == MethodContext.SERVICE) { // FIXME: This doesn't make sense. We are comparing the called method's response message with this method's // response message. Since response messages are configurable per method, this code will fail. String responseMessage = (String) localContext.getEnv(this.simpleMethod.getServiceResponseMessageName()); if (this.simpleMethod.getDefaultErrorCode().equals(responseMessage)) { Debug.logWarning("Got error [" + responseMessage + "] calling inline simple-method named [" + this.methodName + "] in resource [" + this.xmlResource + "], message is " + methodContext.getEnv(this.simpleMethod.getServiceErrorMessageName()) + ", and the error message list is: " + methodContext.getEnv(this.simpleMethod.getServiceErrorMessageListName()), module); return false; } } if ("function".equals(this.scope) && this.resultToFieldList != null) { Map<String, Object> results = localContext.getResults(); if (results != null) { for (ResultToField resultToField : this.resultToFieldList) { resultToField.exec(methodContext.getEnvMap(), results); } } } return true; } @Override public void gatherArtifactInfo(ArtifactInfoContext aic) { SimpleMethod simpleMethodToCall; try { simpleMethodToCall = SimpleMethod.getSimpleMethod(this.xmlURL, this.methodName); if (simpleMethodToCall != null) { if (!aic.hasVisited(simpleMethodToCall)) { aic.addSimpleMethod(simpleMethodToCall); simpleMethodToCall.gatherArtifactInfo(aic); } } } catch (MiniLangException e) { Debug.logWarning("Could not find <simple-method name=\"" + this.methodName + "\"> in XML document " + this.xmlResource + ": " + e.toString(), module); } } public String getMethodName() { return this.methodName; } public SimpleMethod getSimpleMethodToCall(ClassLoader loader) throws MiniLangException { return SimpleMethod.getSimpleMethod(xmlResource, methodName, loader); } public String getXmlResource() { return this.xmlResource; } @Override public String toString() { StringBuilder sb = new StringBuilder("<call-simple-method "); if (this.methodName.length() > 0) { sb.append("method-name=\"").append(this.methodName).append("\" "); } if (this.xmlResource.length() > 0) { sb.append("xml-resource=\"").append(this.xmlResource).append("\" "); } if (this.scope.length() > 0) { sb.append("scope=\"").append(this.scope).append("\" "); } sb.append("/>"); return sb.toString(); } /** * A factory for the &lt;call-simple-method&gt; element. */ public static final class CallSimpleMethodFactory implements Factory<CallSimpleMethod> { @Override public CallSimpleMethod createMethodOperation(Element element, SimpleMethod simpleMethod) throws MiniLangException { return new CallSimpleMethod(element, simpleMethod); } @Override public String getName() { return "call-simple-method"; } } private final class ResultToField { private final FlexibleMapAccessor<Object> fieldFma; private final FlexibleMapAccessor<Object> resultNameFma; private ResultToField(Element element, SimpleMethod simpleMethod) throws ValidationException { if (MiniLangValidate.validationOn()) { MiniLangValidate.attributeNames(simpleMethod, element, "result-name", "field"); MiniLangValidate.requiredAttributes(simpleMethod, element, "result-name"); MiniLangValidate.expressionAttributes(simpleMethod, element, "result-name", "field"); MiniLangValidate.noChildElements(simpleMethod, element); } this.resultNameFma = FlexibleMapAccessor.getInstance(element.getAttribute("result-name")); String fieldAttribute = element.getAttribute("field"); if (fieldAttribute.length() == 0) { this.fieldFma = this.resultNameFma; } else { this.fieldFma = FlexibleMapAccessor.getInstance(fieldAttribute); } } private void exec(Map<String, Object> context, Map<String, Object> results) throws MiniLangException { Object value = this.resultNameFma.get(results); if (value != null) { this.fieldFma.put(context, value); } } } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl.dmn.entity.repository; import static org.camunda.bpm.engine.impl.util.EnsureUtil.ensureNotNull; import static org.camunda.bpm.engine.impl.util.EnsureUtil.ensurePositive; import java.util.List; import org.camunda.bpm.engine.exception.NotValidException; import org.camunda.bpm.engine.impl.AbstractQuery; import org.camunda.bpm.engine.impl.Page; import org.camunda.bpm.engine.impl.interceptor.CommandContext; import org.camunda.bpm.engine.impl.interceptor.CommandExecutor; import org.camunda.bpm.engine.repository.DecisionDefinition; import org.camunda.bpm.engine.repository.DecisionDefinitionQuery; public class DecisionDefinitionQueryImpl extends AbstractQuery<DecisionDefinitionQuery, DecisionDefinition> implements DecisionDefinitionQuery { private static final long serialVersionUID = 1L; protected String id; protected String[] ids; protected String category; protected String categoryLike; protected String name; protected String nameLike; protected String deploymentId; protected String key; protected String keyLike; protected String resourceName; protected String resourceNameLike; protected Integer version; protected boolean latest = false; protected String decisionRequirementsDefinitionId; protected String decisionRequirementsDefinitionKey; protected boolean withoutDecisionRequirementsDefinition = false; protected boolean isTenantIdSet = false; protected String[] tenantIds; protected boolean includeDefinitionsWithoutTenantId = false; protected String versionTag; protected String versionTagLike; public DecisionDefinitionQueryImpl() { } public DecisionDefinitionQueryImpl(CommandExecutor commandExecutor) { super(commandExecutor); } // Query parameter ////////////////////////////////////////////////////////////// public DecisionDefinitionQuery decisionDefinitionId(String decisionDefinitionId) { ensureNotNull(NotValidException.class, "decisionDefinitionId", decisionDefinitionId); this.id = decisionDefinitionId; return this; } public DecisionDefinitionQuery decisionDefinitionIdIn(String... ids) { this.ids = ids; return this; } public DecisionDefinitionQuery decisionDefinitionCategory(String decisionDefinitionCategory) { ensureNotNull(NotValidException.class, "category", decisionDefinitionCategory); this.category = decisionDefinitionCategory; return this; } public DecisionDefinitionQuery decisionDefinitionCategoryLike(String decisionDefinitionCategoryLike) { ensureNotNull(NotValidException.class, "categoryLike", decisionDefinitionCategoryLike); this.categoryLike = decisionDefinitionCategoryLike; return this; } public DecisionDefinitionQuery decisionDefinitionName(String decisionDefinitionName) { ensureNotNull(NotValidException.class, "name", decisionDefinitionName); this.name = decisionDefinitionName; return this; } public DecisionDefinitionQuery decisionDefinitionNameLike(String decisionDefinitionNameLike) { ensureNotNull(NotValidException.class, "nameLike", decisionDefinitionNameLike); this.nameLike = decisionDefinitionNameLike; return this; } public DecisionDefinitionQuery decisionDefinitionKey(String decisionDefinitionKey) { ensureNotNull(NotValidException.class, "key", decisionDefinitionKey); this.key = decisionDefinitionKey; return this; } public DecisionDefinitionQuery decisionDefinitionKeyLike(String decisionDefinitionKeyLike) { ensureNotNull(NotValidException.class, "keyLike", decisionDefinitionKeyLike); this.keyLike = decisionDefinitionKeyLike; return this; } public DecisionDefinitionQuery deploymentId(String deploymentId) { ensureNotNull(NotValidException.class, "deploymentId", deploymentId); this.deploymentId = deploymentId; return this; } public DecisionDefinitionQuery decisionDefinitionVersion(Integer decisionDefinitionVersion) { ensureNotNull(NotValidException.class, "version", decisionDefinitionVersion); ensurePositive(NotValidException.class, "version", decisionDefinitionVersion.longValue()); this.version = decisionDefinitionVersion; return this; } public DecisionDefinitionQuery latestVersion() { this.latest = true; return this; } public DecisionDefinitionQuery decisionDefinitionResourceName(String resourceName) { ensureNotNull(NotValidException.class, "resourceName", resourceName); this.resourceName = resourceName; return this; } public DecisionDefinitionQuery decisionDefinitionResourceNameLike(String resourceNameLike) { ensureNotNull(NotValidException.class, "resourceNameLike", resourceNameLike); this.resourceNameLike = resourceNameLike; return this; } public DecisionDefinitionQuery decisionRequirementsDefinitionId(String decisionRequirementsDefinitionId) { ensureNotNull(NotValidException.class, "decisionRequirementsDefinitionId", decisionRequirementsDefinitionId); this.decisionRequirementsDefinitionId = decisionRequirementsDefinitionId; return this; } public DecisionDefinitionQuery decisionRequirementsDefinitionKey(String decisionRequirementsDefinitionKey) { ensureNotNull(NotValidException.class, "decisionRequirementsDefinitionKey", decisionRequirementsDefinitionKey); this.decisionRequirementsDefinitionKey = decisionRequirementsDefinitionKey; return this; } @Override public DecisionDefinitionQuery versionTag(String versionTag) { ensureNotNull(NotValidException.class, "versionTag", versionTag); this.versionTag = versionTag; return this; } @Override public DecisionDefinitionQuery versionTagLike(String versionTagLike) { ensureNotNull(NotValidException.class, "versionTagLike", versionTagLike); this.versionTagLike = versionTagLike; return this; } public DecisionDefinitionQuery withoutDecisionRequirementsDefinition() { withoutDecisionRequirementsDefinition = true; return this; } public DecisionDefinitionQuery tenantIdIn(String... tenantIds) { ensureNotNull("tenantIds", (Object[]) tenantIds); this.tenantIds = tenantIds; isTenantIdSet = true; return this; } public DecisionDefinitionQuery withoutTenantId() { isTenantIdSet = true; this.tenantIds = null; return this; } public DecisionDefinitionQuery includeDecisionDefinitionsWithoutTenantId() { this.includeDefinitionsWithoutTenantId = true; return this; } public DecisionDefinitionQuery orderByDecisionDefinitionCategory() { orderBy(DecisionDefinitionQueryProperty.DECISION_DEFINITION_CATEGORY); return this; } public DecisionDefinitionQuery orderByDecisionDefinitionKey() { orderBy(DecisionDefinitionQueryProperty.DECISION_DEFINITION_KEY); return this; } public DecisionDefinitionQuery orderByDecisionDefinitionId() { orderBy(DecisionDefinitionQueryProperty.DECISION_DEFINITION_ID); return this; } public DecisionDefinitionQuery orderByDecisionDefinitionVersion() { orderBy(DecisionDefinitionQueryProperty.DECISION_DEFINITION_VERSION); return this; } public DecisionDefinitionQuery orderByDecisionDefinitionName() { orderBy(DecisionDefinitionQueryProperty.DECISION_DEFINITION_NAME); return this; } public DecisionDefinitionQuery orderByDeploymentId() { orderBy(DecisionDefinitionQueryProperty.DEPLOYMENT_ID); return this; } public DecisionDefinitionQuery orderByTenantId() { return orderBy(DecisionDefinitionQueryProperty.TENANT_ID); } @Override public DecisionDefinitionQuery orderByVersionTag() { return orderBy(DecisionDefinitionQueryProperty.VERSION_TAG); } //results //////////////////////////////////////////// @Override public long executeCount(CommandContext commandContext) { checkQueryOk(); return commandContext .getDecisionDefinitionManager() .findDecisionDefinitionCountByQueryCriteria(this); } @Override public List<DecisionDefinition> executeList(CommandContext commandContext, Page page) { checkQueryOk(); return commandContext .getDecisionDefinitionManager() .findDecisionDefinitionsByQueryCriteria(this, page); } @Override public void checkQueryOk() { super.checkQueryOk(); // latest() makes only sense when used with key() or keyLike() if (latest && ( (id != null) || (name != null) || (nameLike != null) || (version != null) || (deploymentId != null) ) ){ throw new NotValidException("Calling latest() can only be used in combination with key(String) and keyLike(String)"); } } // getters //////////////////////////////////////////// public String getId() { return id; } public String[] getIds() { return ids; } public String getCategory() { return category; } public String getCategoryLike() { return categoryLike; } public String getName() { return name; } public String getNameLike() { return nameLike; } public String getDeploymentId() { return deploymentId; } public String getKey() { return key; } public String getKeyLike() { return keyLike; } public String getResourceName() { return resourceName; } public String getResourceNameLike() { return resourceNameLike; } public Integer getVersion() { return version; } public String getVersionTag() { return versionTag; } public String getVersionTagLike() { return versionTagLike; } public boolean isLatest() { return latest; } }
package com.cloudbees.jenkins; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.Util; import hudson.model.AbstractProject; import hudson.model.Result; import hudson.model.Run; import hudson.model.TaskListener; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.BuildStepMonitor; import hudson.tasks.Notifier; import hudson.tasks.Publisher; import hudson.util.ListBoxModel; import jenkins.tasks.SimpleBuildStep; import org.eclipse.jgit.lib.ObjectId; import org.jenkinsci.plugins.github.common.ExpandableMessage; import org.jenkinsci.plugins.github.util.BuildDataHelper; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.NoExternalUse; import org.kohsuke.github.GHCommitState; import org.kohsuke.github.GHRepository; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.DataBoundSetter; import javax.annotation.Nonnull; import java.io.IOException; import static com.cloudbees.jenkins.Messages.GitHubCommitNotifier_DisplayName; import static com.cloudbees.jenkins.Messages.GitHubCommitNotifier_SettingCommitStatus; import static com.coravy.hudson.plugins.github.GithubProjectProperty.displayNameFor; import static com.google.common.base.Objects.firstNonNull; import static hudson.model.Result.FAILURE; import static hudson.model.Result.SUCCESS; import static hudson.model.Result.UNSTABLE; import static java.lang.String.format; import static org.apache.commons.lang3.StringUtils.defaultIfEmpty; import static org.apache.commons.lang3.StringUtils.trimToEmpty; /** * Create commit status notifications on the commits based on the outcome of the build. * * @author <a href="mailto:nicolas.deloof@gmail.com">Nicolas De Loof</a> */ public class GitHubCommitNotifier extends Notifier implements SimpleBuildStep { private static final ExpandableMessage DEFAULT_MESSAGE = new ExpandableMessage(""); private ExpandableMessage statusMessage = DEFAULT_MESSAGE; private final String resultOnFailure; private static final Result[] SUPPORTED_RESULTS = {FAILURE, UNSTABLE, SUCCESS}; @Restricted(NoExternalUse.class) public GitHubCommitNotifier() { this(getDefaultResultOnFailure().toString()); } /** * @since 1.10 */ @DataBoundConstructor public GitHubCommitNotifier(String resultOnFailure) { this.resultOnFailure = resultOnFailure; } /** * @since 1.14.1 */ public ExpandableMessage getStatusMessage() { return statusMessage; } /** * @since 1.14.1 */ @DataBoundSetter public void setStatusMessage(ExpandableMessage statusMessage) { this.statusMessage = statusMessage; } /** * @since 1.10 */ @Nonnull public String getResultOnFailure() { return resultOnFailure != null ? resultOnFailure : getDefaultResultOnFailure().toString(); } @Nonnull public static Result getDefaultResultOnFailure() { return FAILURE; } @Nonnull /*package*/ Result getEffectiveResultOnFailure() { return Result.fromString(trimToEmpty(resultOnFailure)); } public BuildStepMonitor getRequiredMonitorService() { return BuildStepMonitor.NONE; } @Override public void perform(Run<?, ?> build, FilePath ws, Launcher launcher, TaskListener listener) throws InterruptedException, IOException { try { updateCommitStatus(build, listener); } catch (IOException error) { final Result buildResult = getEffectiveResultOnFailure(); if (buildResult.equals(FAILURE)) { throw error; } else { listener.error(format("[GitHub Commit Notifier] - %s", error.getMessage())); listener.getLogger().println( format("[GitHub Commit Notifier] - Build result will be set to %s", buildResult) ); build.setResult(buildResult); } } } private void updateCommitStatus(@Nonnull Run<?, ?> build, @Nonnull TaskListener listener) throws InterruptedException, IOException { final String sha1 = ObjectId.toString(BuildDataHelper.getCommitSHA1(build)); StatusResult status = statusFrom(build); String message = defaultIfEmpty(firstNonNull(statusMessage, DEFAULT_MESSAGE) .expandAll(build, listener), status.getMsg()); String contextName = displayNameFor(build.getParent()); for (GitHubRepositoryName name : GitHubRepositoryNameContributor.parseAssociatedNames(build.getParent())) { for (GHRepository repository : name.resolve()) { listener.getLogger().println( GitHubCommitNotifier_SettingCommitStatus(repository.getHtmlUrl() + "/commit/" + sha1) ); repository.createCommitStatus( sha1, status.getState(), build.getAbsoluteUrl(), message, contextName ); } } } private static StatusResult statusFrom(@Nonnull Run<?, ?> build) { Result result = build.getResult(); // We do not use `build.getDurationString()` because it appends 'and counting' (build is still running) String duration = Util.getTimeSpanString(System.currentTimeMillis() - build.getTimeInMillis()); if (result == null) { // Build is ongoing return new StatusResult( GHCommitState.PENDING, Messages.CommitNotifier_Pending(build.getDisplayName()) ); } else if (result.isBetterOrEqualTo(SUCCESS)) { return new StatusResult( GHCommitState.SUCCESS, Messages.CommitNotifier_Success(build.getDisplayName(), duration) ); } else if (result.isBetterOrEqualTo(UNSTABLE)) { return new StatusResult( GHCommitState.FAILURE, Messages.CommitNotifier_Unstable(build.getDisplayName(), duration) ); } else { return new StatusResult( GHCommitState.ERROR, Messages.CommitNotifier_Failed(build.getDisplayName(), duration) ); } } private static class StatusResult { private GHCommitState state; private String msg; public StatusResult(GHCommitState state, String msg) { this.state = state; this.msg = msg; } public GHCommitState getState() { return state; } public String getMsg() { return msg; } } @Extension public static class DescriptorImpl extends BuildStepDescriptor<Publisher> { public boolean isApplicable(Class<? extends AbstractProject> aClass) { return true; } public String getDisplayName() { return GitHubCommitNotifier_DisplayName(); } public ListBoxModel doFillResultOnFailureItems() { ListBoxModel items = new ListBoxModel(); for (Result result : SUPPORTED_RESULTS) { items.add(result.toString()); } return items; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.weather; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Scanner; import org.apache.camel.component.weather.geolocation.FreeGeoIpGeoLocationProvider; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriParams; import org.apache.camel.spi.UriPath; import org.apache.camel.support.ObjectHelper; import org.apache.commons.httpclient.HttpConnectionManager; import static org.apache.camel.component.weather.WeatherLanguage.en; import static org.apache.camel.component.weather.WeatherMode.JSON; import static org.apache.camel.util.ObjectHelper.notNull; @UriParams public class WeatherConfiguration { private final WeatherComponent component; private final WeatherQuery weatherQuery; @UriPath(description = "The name value is not used.") @Metadata(required = true) private String name; @UriParam @Metadata(required = true) private String appid; @UriParam private WeatherApi weatherApi; @UriParam(label = "filter") private String location = ""; @UriParam(label = "filter") private String lat; @UriParam(label = "filter") private String lon; @UriParam(label = "filter") private String rightLon; @UriParam(label = "filter") private String topLat; @UriParam(label = "filter") private Integer zoom; @UriParam private String period = ""; @UriParam(defaultValue = "JSON") private WeatherMode mode = JSON; @UriParam private WeatherUnits units; @UriParam(defaultValue = "en") private WeatherLanguage language = en; @UriParam private String headerName; @UriParam(label = "filter") private String zip; @UriParam(label = "filter", javaType = "java.lang.String") private List<String> ids; @UriParam(label = "filter") private Integer cnt; @UriParam(label = "proxy") private String proxyHost; @UriParam(label = "proxy") private Integer proxyPort; @UriParam(label = "proxy") private String proxyAuthMethod; @UriParam(label = "proxy", secret = true) private String proxyAuthUsername; @UriParam(label = "proxy", secret = true) private String proxyAuthPassword; @UriParam(label = "proxy") private String proxyAuthDomain; @UriParam(label = "proxy") private String proxyAuthHost; @UriParam(label = "advanced") private HttpConnectionManager httpConnectionManager; @UriParam(label = "security") @Metadata(required = true) private String geolocationAccessKey; @UriParam(label = "security") @Metadata(required = true) private String geolocationRequestHostIP; public WeatherConfiguration(WeatherComponent component) { this.component = notNull(component, "component"); weatherQuery = new WeatherQuery(this); FreeGeoIpGeoLocationProvider geoLocationProvider = new FreeGeoIpGeoLocationProvider(component, geolocationAccessKey); weatherQuery.setGeoLocationProvider(geoLocationProvider); } public String getPeriod() { return period; } /** * If null, the current weather will be returned, else use values of 5, 7, * 14 days. Only the numeric value for the forecast period is actually * parsed, so spelling, capitalisation of the time period is up to you (its * ignored) */ public void setPeriod(String period) { notNull(period, "period"); int result = 0; try { result = new Scanner(period).useDelimiter("\\D+").nextInt(); } catch (Exception e) { // ignore and fallback the period to be an empty string } if (result != 0) { this.period = "" + result; } } public String getName() { return name; } public void setName(String name) { this.name = name; } public WeatherMode getMode() { return mode; } /** * The output format of the weather data. */ public void setMode(WeatherMode mode) { this.mode = notNull(mode, "mode"); } public WeatherUnits getUnits() { return units; } /** * The units for temperature measurement. */ public void setUnits(WeatherUnits units) { this.units = notNull(units, "units"); } public String getLocation() { return location; } /** * If null Camel will try and determine your current location using the * geolocation of your ip address, else specify the city,country. For well * known city names, Open Weather Map will determine the best fit, but * multiple results may be returned. Hence specifying and country as well * will return more accurate data. If you specify "current" as the location * then the component will try to get the current latitude and longitude and * use that to get the weather details. You can use lat and lon options * instead of location. */ public void setLocation(String location) { this.location = location; } public String getHeaderName() { return headerName; } /** * To store the weather result in this header instead of the message body. * This is useable if you want to keep current message body as-is. */ public void setHeaderName(String headerName) { this.headerName = headerName; } public String getLat() { return lat; } /** * Latitude of location. You can use lat and lon options instead of * location. For boxed queries this is the bottom latitude. */ public void setLat(String lat) { this.lat = lat; } public String getLon() { return lon; } /** * Longitude of location. You can use lat and lon options instead of * location. For boxed queries this is the left longtitude. */ public void setLon(String lon) { this.lon = lon; } /** * APPID ID used to authenticate the user connected to the API Server */ public void setAppid(String appid) { this.appid = appid; } public String getAppid() { return appid; } String getQuery() throws Exception { return weatherQuery.getQuery(); } String getQuery(String location) throws Exception { return weatherQuery.getQuery(location); } public WeatherLanguage getLanguage() { return language; } /** * Language of the response. */ public void setLanguage(WeatherLanguage language) { this.language = language; } public String getRightLon() { return rightLon; } /** * For boxed queries this is the right longtitude. Needs to be used in * combination with topLat and zoom. */ public void setRightLon(String rightLon) { this.rightLon = rightLon; } public String getTopLat() { return topLat; } /** * For boxed queries this is the top latitude. Needs to be used in * combination with rightLon and zoom. */ public void setTopLat(String topLat) { this.topLat = topLat; } public Integer getZoom() { return zoom; } /** * For boxed queries this is the zoom. Needs to be used in combination with * rightLon and topLat. */ public void setZoom(Integer zoom) { this.zoom = zoom; } public HttpConnectionManager getHttpConnectionManager() { return httpConnectionManager; } /** * To use a custom HttpConnectionManager to manage connections */ public void setHttpConnectionManager(HttpConnectionManager httpConnectionManager) { this.httpConnectionManager = httpConnectionManager; } public String getProxyHost() { return proxyHost; } /** * The proxy host name */ public void setProxyHost(String proxyHost) { this.proxyHost = proxyHost; } public Integer getProxyPort() { return proxyPort; } /** * The proxy port number */ public void setProxyPort(Integer proxyPort) { this.proxyPort = proxyPort; } public String getProxyAuthMethod() { return proxyAuthMethod; } /** * Authentication method for proxy, either as Basic, Digest or NTLM. */ public void setProxyAuthMethod(String proxyAuthMethod) { this.proxyAuthMethod = proxyAuthMethod; } public String getProxyAuthUsername() { return proxyAuthUsername; } /** * Username for proxy authentication */ public void setProxyAuthUsername(String proxyAuthUsername) { this.proxyAuthUsername = proxyAuthUsername; } public String getProxyAuthPassword() { return proxyAuthPassword; } /** * Password for proxy authentication */ public void setProxyAuthPassword(String proxyAuthPassword) { this.proxyAuthPassword = proxyAuthPassword; } public String getProxyAuthDomain() { return proxyAuthDomain; } /** * Domain for proxy NTLM authentication */ public void setProxyAuthDomain(String proxyAuthDomain) { this.proxyAuthDomain = proxyAuthDomain; } public String getProxyAuthHost() { return proxyAuthHost; } /** * Optional host for proxy NTLM authentication */ public void setProxyAuthHost(String proxyAuthHost) { this.proxyAuthHost = proxyAuthHost; } public String getZip() { return zip; } /** * Zip-code, e.g. 94040,us */ public void setZip(String zip) { this.zip = zip; } public List<String> getIds() { return ids; } /** * List of id's of city/stations. You can separate multiple ids by comma. */ public void setIds(String id) { if (ids == null) { ids = new ArrayList<>(); } Iterator<?> it = ObjectHelper.createIterator(id); while (it.hasNext()) { String myId = (String)it.next(); ids.add(myId); } } public void setIds(List<String> ids) { this.ids = ids; } public Integer getCnt() { return cnt; } /** * Number of results to be found */ public void setCnt(Integer cnt) { this.cnt = cnt; } public WeatherApi getWeatherApi() { return weatherApi; } /** * The API to be use (current, forecast/3 hour, forecast daily, station) */ public void setWeatherApi(WeatherApi weatherApi) { this.weatherApi = weatherApi; } public String getGeolocationAccessKey() { return geolocationAccessKey; } /** * The geolocation service now needs an accessKey to be used */ public void setGeolocationAccessKey(String geolocationAccessKey) { this.geolocationAccessKey = geolocationAccessKey; } public String getGeolocationRequestHostIP() { return geolocationRequestHostIP; } /** * The geolocation service now needs to specify the IP associated to the * accessKey you're using */ public void setGeolocationRequestHostIP(String geolocationRequestHostIP) { this.geolocationRequestHostIP = geolocationRequestHostIP; } }
/** * Copyright (C) 2006 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.inject; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.inject.internal.Annotations; import com.google.inject.internal.MoreTypes; import java.lang.annotation.Annotation; import java.lang.reflect.Type; /** * Binding key consisting of an injection type and an optional annotation. * Matches the type and annotation at a point of injection. * * <p>For example, {@code Key.get(Service.class, Transactional.class)} will * match: * * <pre> * {@literal @}Inject * public void setService({@literal @}Transactional Service service) { * ... * } * </pre> * * <p>{@code Key} supports generic types via subclassing just like {@link * TypeLiteral}. * * <p>Keys do not differentiate between primitive types (int, char, etc.) and * their correpsonding wrapper types (Integer, Character, etc.). Primitive * types will be replaced with their wrapper types when keys are created. * * @author crazybob@google.com (Bob Lee) */ public class Key<T> { private final AnnotationStrategy annotationStrategy; private final TypeLiteral<T> typeLiteral; private final int hashCode; /** * Constructs a new key. Derives the type from this class's type parameter. * * <p>Clients create an empty anonymous subclass. Doing so embeds the type * parameter in the anonymous class's type hierarchy so we can reconstitute it * at runtime despite erasure. * * <p>Example usage for a binding of type {@code Foo} annotated with * {@code @Bar}: * * <p>{@code new Key<Foo>(Bar.class) {}}. */ @SuppressWarnings("unchecked") protected Key(Class<? extends Annotation> annotationType) { this.annotationStrategy = strategyFor(annotationType); this.typeLiteral = (TypeLiteral<T>) TypeLiteral.fromSuperclassTypeParameter(getClass()); this.hashCode = computeHashCode(); } /** * Constructs a new key. Derives the type from this class's type parameter. * * <p>Clients create an empty anonymous subclass. Doing so embeds the type * parameter in the anonymous class's type hierarchy so we can reconstitute it * at runtime despite erasure. * * <p>Example usage for a binding of type {@code Foo} annotated with * {@code @Bar}: * * <p>{@code new Key<Foo>(new Bar()) {}}. */ @SuppressWarnings("unchecked") protected Key(Annotation annotation) { // no usages, not test-covered this.annotationStrategy = strategyFor(annotation); this.typeLiteral = (TypeLiteral<T>) TypeLiteral.fromSuperclassTypeParameter(getClass()); this.hashCode = computeHashCode(); } /** * Constructs a new key. Derives the type from this class's type parameter. * * <p>Clients create an empty anonymous subclass. Doing so embeds the type * parameter in the anonymous class's type hierarchy so we can reconstitute it * at runtime despite erasure. * * <p>Example usage for a binding of type {@code Foo}: * * <p>{@code new Key<Foo>() {}}. */ @SuppressWarnings("unchecked") protected Key() { this.annotationStrategy = NullAnnotationStrategy.INSTANCE; this.typeLiteral = (TypeLiteral<T>) TypeLiteral.fromSuperclassTypeParameter(getClass()); this.hashCode = computeHashCode(); } /** * Unsafe. Constructs a key from a manually specified type. */ @SuppressWarnings("unchecked") private Key(Type type, AnnotationStrategy annotationStrategy) { this.annotationStrategy = annotationStrategy; this.typeLiteral = MoreTypes.canonicalizeForKey((TypeLiteral<T>) TypeLiteral.get(type)); this.hashCode = computeHashCode(); } /** Constructs a key from a manually specified type. */ private Key(TypeLiteral<T> typeLiteral, AnnotationStrategy annotationStrategy) { this.annotationStrategy = annotationStrategy; this.typeLiteral = MoreTypes.canonicalizeForKey(typeLiteral); this.hashCode = computeHashCode(); } private int computeHashCode() { return typeLiteral.hashCode() * 31 + annotationStrategy.hashCode(); } /** * Gets the key type. */ public final TypeLiteral<T> getTypeLiteral() { return typeLiteral; } /** * Gets the annotation type. */ public final Class<? extends Annotation> getAnnotationType() { return annotationStrategy.getAnnotationType(); } /** * Gets the annotation. */ public final Annotation getAnnotation() { return annotationStrategy.getAnnotation(); } boolean hasAnnotationType() { return annotationStrategy.getAnnotationType() != null; } String getAnnotationName() { Annotation annotation = annotationStrategy.getAnnotation(); if (annotation != null) { return annotation.toString(); } // not test-covered return annotationStrategy.getAnnotationType().toString(); } Class<? super T> getRawType() { return typeLiteral.getRawType(); } /** * Gets the key of this key's provider. */ Key<Provider<T>> providerKey() { return ofType(typeLiteral.providerType()); } @Override public final boolean equals(Object o) { if (o == this) { return true; } if (!(o instanceof Key<?>)) { return false; } Key<?> other = (Key<?>) o; return annotationStrategy.equals(other.annotationStrategy) && typeLiteral.equals(other.typeLiteral); } @Override public final int hashCode() { return this.hashCode; } @Override public final String toString() { return "Key[type=" + typeLiteral + ", annotation=" + annotationStrategy + "]"; } /** * Gets a key for an injection type and an annotation strategy. */ static <T> Key<T> get(Class<T> type, AnnotationStrategy annotationStrategy) { return new Key<T>(type, annotationStrategy); } /** * Gets a key for an injection type. */ public static <T> Key<T> get(Class<T> type) { return new Key<T>(type, NullAnnotationStrategy.INSTANCE); } /** * Gets a key for an injection type and an annotation type. */ public static <T> Key<T> get(Class<T> type, Class<? extends Annotation> annotationType) { return new Key<T>(type, strategyFor(annotationType)); } /** * Gets a key for an injection type and an annotation. */ public static <T> Key<T> get(Class<T> type, Annotation annotation) { return new Key<T>(type, strategyFor(annotation)); } /** * Gets a key for an injection type. */ public static Key<?> get(Type type) { return new Key<Object>(type, NullAnnotationStrategy.INSTANCE); } /** * Gets a key for an injection type and an annotation type. */ public static Key<?> get(Type type, Class<? extends Annotation> annotationType) { return new Key<Object>(type, strategyFor(annotationType)); } /** * Gets a key for an injection type and an annotation. */ public static Key<?> get(Type type, Annotation annotation) { return new Key<Object>(type, strategyFor(annotation)); } /** * Gets a key for an injection type. */ public static <T> Key<T> get(TypeLiteral<T> typeLiteral) { return new Key<T>(typeLiteral, NullAnnotationStrategy.INSTANCE); } /** * Gets a key for an injection type and an annotation type. */ public static <T> Key<T> get(TypeLiteral<T> typeLiteral, Class<? extends Annotation> annotationType) { return new Key<T>(typeLiteral, strategyFor(annotationType)); } /** * Gets a key for an injection type and an annotation. */ public static <T> Key<T> get(TypeLiteral<T> typeLiteral, Annotation annotation) { return new Key<T>(typeLiteral, strategyFor(annotation)); } /** * Returns a new key of the specified type with the same annotation as this * key. * * @since 3.0 */ public <T> Key<T> ofType(Class<T> type) { return new Key<T>(type, annotationStrategy); } /** * Returns a new key of the specified type with the same annotation as this * key. * * @since 3.0 */ public Key<?> ofType(Type type) { return new Key<Object>(type, annotationStrategy); } /** * Returns a new key of the specified type with the same annotation as this * key. * * @since 3.0 */ public <T> Key<T> ofType(TypeLiteral<T> type) { return new Key<T>(type, annotationStrategy); } /** * Returns true if this key has annotation attributes. * * @since 3.0 */ public boolean hasAttributes() { return annotationStrategy.hasAttributes(); } /** * Returns this key without annotation attributes, i.e. with only the * annotation type. * * @since 3.0 */ public Key<T> withoutAttributes() { return new Key<T>(typeLiteral, annotationStrategy.withoutAttributes()); } interface AnnotationStrategy { Annotation getAnnotation(); Class<? extends Annotation> getAnnotationType(); boolean hasAttributes(); AnnotationStrategy withoutAttributes(); } /** * Gets the strategy for an annotation. */ static AnnotationStrategy strategyFor(Annotation annotation) { checkNotNull(annotation, "annotation"); Class<? extends Annotation> annotationType = annotation.annotationType(); ensureRetainedAtRuntime(annotationType); ensureIsBindingAnnotation(annotationType); if (Annotations.isMarker(annotationType)) { return new AnnotationTypeStrategy(annotationType, annotation); } return new AnnotationInstanceStrategy(Annotations.canonicalizeIfNamed(annotation)); } /** * Gets the strategy for an annotation type. */ static AnnotationStrategy strategyFor(Class<? extends Annotation> annotationType) { checkNotNull(annotationType, "annotation type"); ensureRetainedAtRuntime(annotationType); ensureIsBindingAnnotation(annotationType); return new AnnotationTypeStrategy(Annotations.canonicalizeIfNamed(annotationType), null); } private static void ensureRetainedAtRuntime( Class<? extends Annotation> annotationType) { checkArgument(Annotations.isRetainedAtRuntime(annotationType), "%s is not retained at runtime. Please annotate it with @Retention(RUNTIME).", annotationType.getName()); } private static void ensureIsBindingAnnotation(Class<? extends Annotation> annotationType) { checkArgument(Annotations.isBindingAnnotation(annotationType), "%s is not a binding annotation. Please annotate it with @BindingAnnotation.", annotationType.getName()); } static enum NullAnnotationStrategy implements AnnotationStrategy { INSTANCE; public boolean hasAttributes() { return false; } public AnnotationStrategy withoutAttributes() { throw new UnsupportedOperationException("Key already has no attributes."); } public Annotation getAnnotation() { return null; } public Class<? extends Annotation> getAnnotationType() { return null; } @Override public String toString() { return "[none]"; } } // this class not test-covered static class AnnotationInstanceStrategy implements AnnotationStrategy { final Annotation annotation; AnnotationInstanceStrategy(Annotation annotation) { this.annotation = checkNotNull(annotation, "annotation"); } public boolean hasAttributes() { return true; } public AnnotationStrategy withoutAttributes() { return new AnnotationTypeStrategy(getAnnotationType(), annotation); } public Annotation getAnnotation() { return annotation; } public Class<? extends Annotation> getAnnotationType() { return annotation.annotationType(); } @Override public boolean equals(Object o) { if (!(o instanceof AnnotationInstanceStrategy)) { return false; } AnnotationInstanceStrategy other = (AnnotationInstanceStrategy) o; return annotation.equals(other.annotation); } @Override public int hashCode() { return annotation.hashCode(); } @Override public String toString() { return annotation.toString(); } } static class AnnotationTypeStrategy implements AnnotationStrategy { final Class<? extends Annotation> annotationType; // Keep the instance around if we have it so the client can request it. final Annotation annotation; AnnotationTypeStrategy(Class<? extends Annotation> annotationType, Annotation annotation) { this.annotationType = checkNotNull(annotationType, "annotation type"); this.annotation = annotation; } public boolean hasAttributes() { return false; } public AnnotationStrategy withoutAttributes() { throw new UnsupportedOperationException("Key already has no attributes."); } public Annotation getAnnotation() { return annotation; } public Class<? extends Annotation> getAnnotationType() { return annotationType; } @Override public boolean equals(Object o) { if (!(o instanceof AnnotationTypeStrategy)) { return false; } AnnotationTypeStrategy other = (AnnotationTypeStrategy) o; return annotationType.equals(other.annotationType); } @Override public int hashCode() { return annotationType.hashCode(); } @Override public String toString() { return "@" + annotationType.getName(); } } }
// Copyright (c) 2014, Facebook, Inc. All rights reserved. // This source code is licensed under the BSD-style license found in the // LICENSE file in the root directory of this source tree. An additional grant // of patent rights can be found in the PATENTS file in the same directory. package org.rocksdb; import java.util.Properties; /** * ColumnFamilyOptions to control the behavior of a database. It will be used * during the creation of a {@link org.rocksdb.RocksDB} (i.e., RocksDB.open()). * * If {@link #dispose()} function is not called, then it will be GC'd automatically * and native resources will be released as part of the process. */ public class ColumnFamilyOptions extends RocksObject implements ColumnFamilyOptionsInterface { static { RocksDB.loadLibrary(); } /** * Construct ColumnFamilyOptions. * * This constructor will create (by allocating a block of memory) * an {@code rocksdb::DBOptions} in the c++ side. */ public ColumnFamilyOptions() { super(); newColumnFamilyOptions(); } /** * <p>Method to get a options instance by using pre-configured * property values. If one or many values are undefined in * the context of RocksDB the method will return a null * value.</p> * * <p><strong>Note</strong>: Property keys can be derived from * getter methods within the options class. Example: the method * {@code writeBufferSize()} has a property key: * {@code write_buffer_size}.</p> * * @param properties {@link java.util.Properties} instance. * * @return {@link org.rocksdb.ColumnFamilyOptions instance} * or null. * * @throws java.lang.IllegalArgumentException if null or empty * {@link Properties} instance is passed to the method call. */ public static ColumnFamilyOptions getColumnFamilyOptionsFromProps( final Properties properties) { if (properties == null || properties.size() == 0) { throw new IllegalArgumentException( "Properties value must contain at least one value."); } ColumnFamilyOptions columnFamilyOptions = null; StringBuilder stringBuilder = new StringBuilder(); for (final String name : properties.stringPropertyNames()){ stringBuilder.append(name); stringBuilder.append("="); stringBuilder.append(properties.getProperty(name)); stringBuilder.append(";"); } long handle = getColumnFamilyOptionsFromProps( stringBuilder.toString()); if (handle != 0){ columnFamilyOptions = new ColumnFamilyOptions(handle); } return columnFamilyOptions; } @Override public ColumnFamilyOptions optimizeForPointLookup( final long blockCacheSizeMb) { optimizeForPointLookup(nativeHandle_, blockCacheSizeMb); return this; } @Override public ColumnFamilyOptions optimizeLevelStyleCompaction() { optimizeLevelStyleCompaction(nativeHandle_, DEFAULT_COMPACTION_MEMTABLE_MEMORY_BUDGET); return this; } @Override public ColumnFamilyOptions optimizeLevelStyleCompaction( final long memtableMemoryBudget) { optimizeLevelStyleCompaction(nativeHandle_, memtableMemoryBudget); return this; } @Override public ColumnFamilyOptions optimizeUniversalStyleCompaction() { optimizeUniversalStyleCompaction(nativeHandle_, DEFAULT_COMPACTION_MEMTABLE_MEMORY_BUDGET); return this; } @Override public ColumnFamilyOptions optimizeUniversalStyleCompaction( final long memtableMemoryBudget) { optimizeUniversalStyleCompaction(nativeHandle_, memtableMemoryBudget); return this; } @Override public ColumnFamilyOptions setComparator(final BuiltinComparator builtinComparator) { assert(isInitialized()); setComparatorHandle(nativeHandle_, builtinComparator.ordinal()); return this; } @Override public ColumnFamilyOptions setComparator( final AbstractComparator<? extends AbstractSlice<?>> comparator) { assert (isInitialized()); setComparatorHandle(nativeHandle_, comparator.nativeHandle_); comparator_ = comparator; return this; } @Override public ColumnFamilyOptions setMergeOperatorName(final String name) { assert (isInitialized()); if (name == null) { throw new IllegalArgumentException( "Merge operator name must not be null."); } setMergeOperatorName(nativeHandle_, name); return this; } @Override public ColumnFamilyOptions setMergeOperator(final MergeOperator mergeOperator) { setMergeOperator(nativeHandle_, mergeOperator.newMergeOperatorHandle()); return this; } @Override public ColumnFamilyOptions setWriteBufferSize(final long writeBufferSize) { assert(isInitialized()); setWriteBufferSize(nativeHandle_, writeBufferSize); return this; } @Override public long writeBufferSize() { assert(isInitialized()); return writeBufferSize(nativeHandle_); } @Override public ColumnFamilyOptions setMaxWriteBufferNumber( final int maxWriteBufferNumber) { assert(isInitialized()); setMaxWriteBufferNumber(nativeHandle_, maxWriteBufferNumber); return this; } @Override public int maxWriteBufferNumber() { assert(isInitialized()); return maxWriteBufferNumber(nativeHandle_); } @Override public ColumnFamilyOptions setMinWriteBufferNumberToMerge( final int minWriteBufferNumberToMerge) { setMinWriteBufferNumberToMerge(nativeHandle_, minWriteBufferNumberToMerge); return this; } @Override public int minWriteBufferNumberToMerge() { return minWriteBufferNumberToMerge(nativeHandle_); } @Override public ColumnFamilyOptions useFixedLengthPrefixExtractor(final int n) { assert(isInitialized()); useFixedLengthPrefixExtractor(nativeHandle_, n); return this; } @Override public ColumnFamilyOptions setCompressionType(final CompressionType compressionType) { setCompressionType(nativeHandle_, compressionType.getValue()); return this; } @Override public CompressionType compressionType() { return CompressionType.values()[compressionType(nativeHandle_)]; } @Override public ColumnFamilyOptions setNumLevels(final int numLevels) { setNumLevels(nativeHandle_, numLevels); return this; } @Override public int numLevels() { return numLevels(nativeHandle_); } @Override public ColumnFamilyOptions setLevelZeroFileNumCompactionTrigger( final int numFiles) { setLevelZeroFileNumCompactionTrigger( nativeHandle_, numFiles); return this; } @Override public int levelZeroFileNumCompactionTrigger() { return levelZeroFileNumCompactionTrigger(nativeHandle_); } @Override public ColumnFamilyOptions setLevelZeroSlowdownWritesTrigger( final int numFiles) { setLevelZeroSlowdownWritesTrigger(nativeHandle_, numFiles); return this; } @Override public int levelZeroSlowdownWritesTrigger() { return levelZeroSlowdownWritesTrigger(nativeHandle_); } @Override public ColumnFamilyOptions setLevelZeroStopWritesTrigger(final int numFiles) { setLevelZeroStopWritesTrigger(nativeHandle_, numFiles); return this; } @Override public int levelZeroStopWritesTrigger() { return levelZeroStopWritesTrigger(nativeHandle_); } @Override public ColumnFamilyOptions setMaxMemCompactionLevel( final int maxMemCompactionLevel) { setMaxMemCompactionLevel(nativeHandle_, maxMemCompactionLevel); return this; } @Override public int maxMemCompactionLevel() { return maxMemCompactionLevel(nativeHandle_); } @Override public ColumnFamilyOptions setTargetFileSizeBase( final long targetFileSizeBase) { setTargetFileSizeBase(nativeHandle_, targetFileSizeBase); return this; } @Override public long targetFileSizeBase() { return targetFileSizeBase(nativeHandle_); } @Override public ColumnFamilyOptions setTargetFileSizeMultiplier( final int multiplier) { setTargetFileSizeMultiplier(nativeHandle_, multiplier); return this; } @Override public int targetFileSizeMultiplier() { return targetFileSizeMultiplier(nativeHandle_); } @Override public ColumnFamilyOptions setMaxBytesForLevelBase( final long maxBytesForLevelBase) { setMaxBytesForLevelBase(nativeHandle_, maxBytesForLevelBase); return this; } @Override public long maxBytesForLevelBase() { return maxBytesForLevelBase(nativeHandle_); } @Override public ColumnFamilyOptions setLevelCompactionDynamicLevelBytes( final boolean enableLevelCompactionDynamicLevelBytes) { setLevelCompactionDynamicLevelBytes(nativeHandle_, enableLevelCompactionDynamicLevelBytes); return this; } @Override public boolean levelCompactionDynamicLevelBytes() { return levelCompactionDynamicLevelBytes(nativeHandle_); } @Override public ColumnFamilyOptions setMaxBytesForLevelMultiplier( final int multiplier) { setMaxBytesForLevelMultiplier(nativeHandle_, multiplier); return this; } @Override public int maxBytesForLevelMultiplier() { return maxBytesForLevelMultiplier(nativeHandle_); } @Override public ColumnFamilyOptions setExpandedCompactionFactor( final int expandedCompactionFactor) { setExpandedCompactionFactor(nativeHandle_, expandedCompactionFactor); return this; } @Override public int expandedCompactionFactor() { return expandedCompactionFactor(nativeHandle_); } @Override public ColumnFamilyOptions setSourceCompactionFactor( final int sourceCompactionFactor) { setSourceCompactionFactor(nativeHandle_, sourceCompactionFactor); return this; } @Override public int sourceCompactionFactor() { return sourceCompactionFactor(nativeHandle_); } @Override public ColumnFamilyOptions setMaxGrandparentOverlapFactor( final int maxGrandparentOverlapFactor) { setMaxGrandparentOverlapFactor(nativeHandle_, maxGrandparentOverlapFactor); return this; } @Override public int maxGrandparentOverlapFactor() { return maxGrandparentOverlapFactor(nativeHandle_); } @Override public ColumnFamilyOptions setSoftRateLimit( final double softRateLimit) { setSoftRateLimit(nativeHandle_, softRateLimit); return this; } @Override public double softRateLimit() { return softRateLimit(nativeHandle_); } @Override public ColumnFamilyOptions setHardRateLimit( final double hardRateLimit) { setHardRateLimit(nativeHandle_, hardRateLimit); return this; } @Override public double hardRateLimit() { return hardRateLimit(nativeHandle_); } @Override public ColumnFamilyOptions setRateLimitDelayMaxMilliseconds( final int rateLimitDelayMaxMilliseconds) { setRateLimitDelayMaxMilliseconds( nativeHandle_, rateLimitDelayMaxMilliseconds); return this; } @Override public int rateLimitDelayMaxMilliseconds() { return rateLimitDelayMaxMilliseconds(nativeHandle_); } @Override public ColumnFamilyOptions setArenaBlockSize( final long arenaBlockSize) { setArenaBlockSize(nativeHandle_, arenaBlockSize); return this; } @Override public long arenaBlockSize() { return arenaBlockSize(nativeHandle_); } @Override public ColumnFamilyOptions setDisableAutoCompactions( final boolean disableAutoCompactions) { setDisableAutoCompactions(nativeHandle_, disableAutoCompactions); return this; } @Override public boolean disableAutoCompactions() { return disableAutoCompactions(nativeHandle_); } @Override public ColumnFamilyOptions setPurgeRedundantKvsWhileFlush( final boolean purgeRedundantKvsWhileFlush) { setPurgeRedundantKvsWhileFlush( nativeHandle_, purgeRedundantKvsWhileFlush); return this; } @Override public boolean purgeRedundantKvsWhileFlush() { return purgeRedundantKvsWhileFlush(nativeHandle_); } @Override public ColumnFamilyOptions setCompactionStyle( final CompactionStyle compactionStyle) { setCompactionStyle(nativeHandle_, compactionStyle.getValue()); return this; } @Override public CompactionStyle compactionStyle() { return CompactionStyle.values()[compactionStyle(nativeHandle_)]; } @Override public ColumnFamilyOptions setVerifyChecksumsInCompaction( final boolean verifyChecksumsInCompaction) { setVerifyChecksumsInCompaction( nativeHandle_, verifyChecksumsInCompaction); return this; } @Override public boolean verifyChecksumsInCompaction() { return verifyChecksumsInCompaction(nativeHandle_); } @Override public ColumnFamilyOptions setFilterDeletes( final boolean filterDeletes) { setFilterDeletes(nativeHandle_, filterDeletes); return this; } @Override public boolean filterDeletes() { return filterDeletes(nativeHandle_); } @Override public ColumnFamilyOptions setMaxSequentialSkipInIterations( final long maxSequentialSkipInIterations) { setMaxSequentialSkipInIterations(nativeHandle_, maxSequentialSkipInIterations); return this; } @Override public long maxSequentialSkipInIterations() { return maxSequentialSkipInIterations(nativeHandle_); } @Override public ColumnFamilyOptions setMemTableConfig( final MemTableConfig config) { memTableConfig_ = config; setMemTableFactory(nativeHandle_, config.newMemTableFactoryHandle()); return this; } @Override public String memTableFactoryName() { assert(isInitialized()); return memTableFactoryName(nativeHandle_); } @Override public ColumnFamilyOptions setTableFormatConfig( final TableFormatConfig config) { tableFormatConfig_ = config; setTableFactory(nativeHandle_, config.newTableFactoryHandle()); return this; } @Override public String tableFactoryName() { assert(isInitialized()); return tableFactoryName(nativeHandle_); } @Override public ColumnFamilyOptions setInplaceUpdateSupport( final boolean inplaceUpdateSupport) { setInplaceUpdateSupport(nativeHandle_, inplaceUpdateSupport); return this; } @Override public boolean inplaceUpdateSupport() { return inplaceUpdateSupport(nativeHandle_); } @Override public ColumnFamilyOptions setInplaceUpdateNumLocks( final long inplaceUpdateNumLocks) { setInplaceUpdateNumLocks(nativeHandle_, inplaceUpdateNumLocks); return this; } @Override public long inplaceUpdateNumLocks() { return inplaceUpdateNumLocks(nativeHandle_); } @Override public ColumnFamilyOptions setMemtablePrefixBloomBits( final int memtablePrefixBloomBits) { setMemtablePrefixBloomBits(nativeHandle_, memtablePrefixBloomBits); return this; } @Override public int memtablePrefixBloomBits() { return memtablePrefixBloomBits(nativeHandle_); } @Override public ColumnFamilyOptions setMemtablePrefixBloomProbes( final int memtablePrefixBloomProbes) { setMemtablePrefixBloomProbes(nativeHandle_, memtablePrefixBloomProbes); return this; } @Override public int memtablePrefixBloomProbes() { return memtablePrefixBloomProbes(nativeHandle_); } @Override public ColumnFamilyOptions setBloomLocality(int bloomLocality) { setBloomLocality(nativeHandle_, bloomLocality); return this; } @Override public int bloomLocality() { return bloomLocality(nativeHandle_); } @Override public ColumnFamilyOptions setMaxSuccessiveMerges( final long maxSuccessiveMerges) { setMaxSuccessiveMerges(nativeHandle_, maxSuccessiveMerges); return this; } @Override public long maxSuccessiveMerges() { return maxSuccessiveMerges(nativeHandle_); } @Override public ColumnFamilyOptions setMinPartialMergeOperands( final int minPartialMergeOperands) { setMinPartialMergeOperands(nativeHandle_, minPartialMergeOperands); return this; } @Override public int minPartialMergeOperands() { return minPartialMergeOperands(nativeHandle_); } @Override public ColumnFamilyOptions setOptimizeFiltersForHits( final boolean optimizeFiltersForHits) { setOptimizeFiltersForHits(nativeHandle_, optimizeFiltersForHits); return this; } @Override public boolean optimizeFiltersForHits() { return optimizeFiltersForHits(nativeHandle_); } /** * Release the memory allocated for the current instance * in the c++ side. */ @Override protected void disposeInternal() { assert(isInitialized()); disposeInternal(nativeHandle_); } /** * <p>Private constructor to be used by * {@link #getColumnFamilyOptionsFromProps(java.util.Properties)}</p> * * @param handle native handle to ColumnFamilyOptions instance. */ private ColumnFamilyOptions(final long handle) { super(); nativeHandle_ = handle; } private static native long getColumnFamilyOptionsFromProps( String optString); private native void newColumnFamilyOptions(); private native void disposeInternal(long handle); private native void optimizeForPointLookup(long handle, long blockCacheSizeMb); private native void optimizeLevelStyleCompaction(long handle, long memtableMemoryBudget); private native void optimizeUniversalStyleCompaction(long handle, long memtableMemoryBudget); private native void setComparatorHandle(long handle, int builtinComparator); private native void setComparatorHandle(long optHandle, long comparatorHandle); private native void setMergeOperatorName( long handle, String name); private native void setMergeOperator( long handle, long mergeOperatorHandle); private native void setWriteBufferSize(long handle, long writeBufferSize) throws IllegalArgumentException; private native long writeBufferSize(long handle); private native void setMaxWriteBufferNumber( long handle, int maxWriteBufferNumber); private native int maxWriteBufferNumber(long handle); private native void setMinWriteBufferNumberToMerge( long handle, int minWriteBufferNumberToMerge); private native int minWriteBufferNumberToMerge(long handle); private native void setCompressionType(long handle, byte compressionType); private native byte compressionType(long handle); private native void useFixedLengthPrefixExtractor( long handle, int prefixLength); private native void setNumLevels( long handle, int numLevels); private native int numLevels(long handle); private native void setLevelZeroFileNumCompactionTrigger( long handle, int numFiles); private native int levelZeroFileNumCompactionTrigger(long handle); private native void setLevelZeroSlowdownWritesTrigger( long handle, int numFiles); private native int levelZeroSlowdownWritesTrigger(long handle); private native void setLevelZeroStopWritesTrigger( long handle, int numFiles); private native int levelZeroStopWritesTrigger(long handle); private native void setMaxMemCompactionLevel( long handle, int maxMemCompactionLevel); private native int maxMemCompactionLevel(long handle); private native void setTargetFileSizeBase( long handle, long targetFileSizeBase); private native long targetFileSizeBase(long handle); private native void setTargetFileSizeMultiplier( long handle, int multiplier); private native int targetFileSizeMultiplier(long handle); private native void setMaxBytesForLevelBase( long handle, long maxBytesForLevelBase); private native long maxBytesForLevelBase(long handle); private native void setLevelCompactionDynamicLevelBytes( long handle, boolean enableLevelCompactionDynamicLevelBytes); private native boolean levelCompactionDynamicLevelBytes( long handle); private native void setMaxBytesForLevelMultiplier( long handle, int multiplier); private native int maxBytesForLevelMultiplier(long handle); private native void setExpandedCompactionFactor( long handle, int expandedCompactionFactor); private native int expandedCompactionFactor(long handle); private native void setSourceCompactionFactor( long handle, int sourceCompactionFactor); private native int sourceCompactionFactor(long handle); private native void setMaxGrandparentOverlapFactor( long handle, int maxGrandparentOverlapFactor); private native int maxGrandparentOverlapFactor(long handle); private native void setSoftRateLimit( long handle, double softRateLimit); private native double softRateLimit(long handle); private native void setHardRateLimit( long handle, double hardRateLimit); private native double hardRateLimit(long handle); private native void setRateLimitDelayMaxMilliseconds( long handle, int rateLimitDelayMaxMilliseconds); private native int rateLimitDelayMaxMilliseconds(long handle); private native void setArenaBlockSize( long handle, long arenaBlockSize) throws IllegalArgumentException; private native long arenaBlockSize(long handle); private native void setDisableAutoCompactions( long handle, boolean disableAutoCompactions); private native boolean disableAutoCompactions(long handle); private native void setCompactionStyle(long handle, byte compactionStyle); private native byte compactionStyle(long handle); private native void setPurgeRedundantKvsWhileFlush( long handle, boolean purgeRedundantKvsWhileFlush); private native boolean purgeRedundantKvsWhileFlush(long handle); private native void setVerifyChecksumsInCompaction( long handle, boolean verifyChecksumsInCompaction); private native boolean verifyChecksumsInCompaction(long handle); private native void setFilterDeletes( long handle, boolean filterDeletes); private native boolean filterDeletes(long handle); private native void setMaxSequentialSkipInIterations( long handle, long maxSequentialSkipInIterations); private native long maxSequentialSkipInIterations(long handle); private native void setMemTableFactory(long handle, long factoryHandle); private native String memTableFactoryName(long handle); private native void setTableFactory(long handle, long factoryHandle); private native String tableFactoryName(long handle); private native void setInplaceUpdateSupport( long handle, boolean inplaceUpdateSupport); private native boolean inplaceUpdateSupport(long handle); private native void setInplaceUpdateNumLocks( long handle, long inplaceUpdateNumLocks) throws IllegalArgumentException; private native long inplaceUpdateNumLocks(long handle); private native void setMemtablePrefixBloomBits( long handle, int memtablePrefixBloomBits); private native int memtablePrefixBloomBits(long handle); private native void setMemtablePrefixBloomProbes( long handle, int memtablePrefixBloomProbes); private native int memtablePrefixBloomProbes(long handle); private native void setBloomLocality( long handle, int bloomLocality); private native int bloomLocality(long handle); private native void setMaxSuccessiveMerges( long handle, long maxSuccessiveMerges) throws IllegalArgumentException; private native long maxSuccessiveMerges(long handle); private native void setMinPartialMergeOperands( long handle, int minPartialMergeOperands); private native int minPartialMergeOperands(long handle); private native void setOptimizeFiltersForHits(long handle, boolean optimizeFiltersForHits); private native boolean optimizeFiltersForHits(long handle); MemTableConfig memTableConfig_; TableFormatConfig tableFormatConfig_; AbstractComparator<? extends AbstractSlice<?>> comparator_; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.dataformat.bindy.fixed.marshall.simple; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.camel.CamelExecutionException; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.dataformat.bindy.annotation.DataField; import org.apache.camel.dataformat.bindy.annotation.FixedLengthRecord; import org.apache.camel.dataformat.bindy.fixed.BindyFixedLengthDataFormat; import org.apache.camel.test.junit5.CamelTestSupport; import org.junit.jupiter.api.Test; import static org.apache.camel.test.junit5.TestSupport.assertIsInstanceOf; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; public class BindySimpleFixedLengthMarshallWithNoClipTest extends CamelTestSupport { private List<Map<String, Object>> models = new ArrayList<>(); @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { BindyFixedLengthDataFormat bindy = new BindyFixedLengthDataFormat(Order.class); from("direct:start") .marshal(bindy) .to("mock:result"); } }; } @Test public void testMarshallMessage() throws Exception { List<Map<String, Object>> model = generateModel(); Exception ex = assertThrows(CamelExecutionException.class, () -> template.sendBody("direct:start", model)); IllegalArgumentException cause = assertIsInstanceOf(IllegalArgumentException.class, ex.getCause()); assertEquals("Length for the firstName must not be larger than allowed, was: 13, allowed: 9", cause.getMessage()); } public List<Map<String, Object>> generateModel() { Map<String, Object> modelObjects = new HashMap<>(); Order order = new Order(); order.setOrderNr(10); order.setOrderType("BUY"); order.setClientNr("A98"); order.setFirstName("Madame de Sol"); order.setLastName("M"); order.setAmount(new BigDecimal("2500.45")); order.setInstrumentCode("ISIN"); order.setInstrumentNumber("XD12345678"); order.setInstrumentType("Share"); order.setCurrency("USD"); Calendar calendar = new GregorianCalendar(); calendar.set(2009, 7, 1); order.setOrderDate(calendar.getTime()); modelObjects.put(order.getClass().getName(), order); models.add(modelObjects); return models; } @FixedLengthRecord(length = 65, paddingChar = ' ') public static class Order { @DataField(pos = 1, length = 2) private int orderNr; @DataField(pos = 3, length = 2, clip = true) private String clientNr; @DataField(pos = 5, length = 9) private String firstName; @DataField(pos = 14, length = 5, align = "L") private String lastName; @DataField(pos = 19, length = 4) private String instrumentCode; @DataField(pos = 23, length = 10) private String instrumentNumber; @DataField(pos = 33, length = 3) private String orderType; @DataField(pos = 36, length = 5) private String instrumentType; @DataField(pos = 41, precision = 2, length = 12, paddingChar = '0') private BigDecimal amount; @DataField(pos = 53, length = 3) private String currency; @DataField(pos = 56, length = 10, pattern = "dd-MM-yyyy") private Date orderDate; public int getOrderNr() { return orderNr; } public void setOrderNr(int orderNr) { this.orderNr = orderNr; } public String getClientNr() { return clientNr; } public void setClientNr(String clientNr) { this.clientNr = clientNr; } public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; } public String getInstrumentCode() { return instrumentCode; } public void setInstrumentCode(String instrumentCode) { this.instrumentCode = instrumentCode; } public String getInstrumentNumber() { return instrumentNumber; } public void setInstrumentNumber(String instrumentNumber) { this.instrumentNumber = instrumentNumber; } public String getOrderType() { return orderType; } public void setOrderType(String orderType) { this.orderType = orderType; } public String getInstrumentType() { return instrumentType; } public void setInstrumentType(String instrumentType) { this.instrumentType = instrumentType; } public BigDecimal getAmount() { return amount; } public void setAmount(BigDecimal amount) { this.amount = amount; } public String getCurrency() { return currency; } public void setCurrency(String currency) { this.currency = currency; } public Date getOrderDate() { return orderDate; } public void setOrderDate(Date orderDate) { this.orderDate = orderDate; } @Override public String toString() { return "Model : " + Order.class.getName() + " : " + this.orderNr + ", " + this.orderType + ", " + String.valueOf(this.amount) + ", " + this.instrumentCode + ", " + this.instrumentNumber + ", " + this.instrumentType + ", " + this.currency + ", " + this.clientNr + ", " + this.firstName + ", " + this.lastName + ", " + String.valueOf(this.orderDate); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v10/errors/media_file_error.proto package com.google.ads.googleads.v10.errors; /** * <pre> * Container for enum describing possible media file errors. * </pre> * * Protobuf type {@code google.ads.googleads.v10.errors.MediaFileErrorEnum} */ public final class MediaFileErrorEnum extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v10.errors.MediaFileErrorEnum) MediaFileErrorEnumOrBuilder { private static final long serialVersionUID = 0L; // Use MediaFileErrorEnum.newBuilder() to construct. private MediaFileErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MediaFileErrorEnum() { } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new MediaFileErrorEnum(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MediaFileErrorEnum( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v10.errors.MediaFileErrorProto.internal_static_google_ads_googleads_v10_errors_MediaFileErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v10.errors.MediaFileErrorProto.internal_static_google_ads_googleads_v10_errors_MediaFileErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v10.errors.MediaFileErrorEnum.class, com.google.ads.googleads.v10.errors.MediaFileErrorEnum.Builder.class); } /** * <pre> * Enum describing possible media file errors. * </pre> * * Protobuf enum {@code google.ads.googleads.v10.errors.MediaFileErrorEnum.MediaFileError} */ public enum MediaFileError implements com.google.protobuf.ProtocolMessageEnum { /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ UNSPECIFIED(0), /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ UNKNOWN(1), /** * <pre> * Cannot create a standard icon type. * </pre> * * <code>CANNOT_CREATE_STANDARD_ICON = 2;</code> */ CANNOT_CREATE_STANDARD_ICON(2), /** * <pre> * May only select Standard Icons alone. * </pre> * * <code>CANNOT_SELECT_STANDARD_ICON_WITH_OTHER_TYPES = 3;</code> */ CANNOT_SELECT_STANDARD_ICON_WITH_OTHER_TYPES(3), /** * <pre> * Image contains both a media file ID and data. * </pre> * * <code>CANNOT_SPECIFY_MEDIA_FILE_ID_AND_DATA = 4;</code> */ CANNOT_SPECIFY_MEDIA_FILE_ID_AND_DATA(4), /** * <pre> * A media file with given type and reference ID already exists. * </pre> * * <code>DUPLICATE_MEDIA = 5;</code> */ DUPLICATE_MEDIA(5), /** * <pre> * A required field was not specified or is an empty string. * </pre> * * <code>EMPTY_FIELD = 6;</code> */ EMPTY_FIELD(6), /** * <pre> * A media file may only be modified once per call. * </pre> * * <code>RESOURCE_REFERENCED_IN_MULTIPLE_OPS = 7;</code> */ RESOURCE_REFERENCED_IN_MULTIPLE_OPS(7), /** * <pre> * Field is not supported for the media sub type. * </pre> * * <code>FIELD_NOT_SUPPORTED_FOR_MEDIA_SUB_TYPE = 8;</code> */ FIELD_NOT_SUPPORTED_FOR_MEDIA_SUB_TYPE(8), /** * <pre> * The media file ID is invalid. * </pre> * * <code>INVALID_MEDIA_FILE_ID = 9;</code> */ INVALID_MEDIA_FILE_ID(9), /** * <pre> * The media subtype is invalid. * </pre> * * <code>INVALID_MEDIA_SUB_TYPE = 10;</code> */ INVALID_MEDIA_SUB_TYPE(10), /** * <pre> * The media file type is invalid. * </pre> * * <code>INVALID_MEDIA_FILE_TYPE = 11;</code> */ INVALID_MEDIA_FILE_TYPE(11), /** * <pre> * The mimetype is invalid. * </pre> * * <code>INVALID_MIME_TYPE = 12;</code> */ INVALID_MIME_TYPE(12), /** * <pre> * The media reference ID is invalid. * </pre> * * <code>INVALID_REFERENCE_ID = 13;</code> */ INVALID_REFERENCE_ID(13), /** * <pre> * The YouTube video ID is invalid. * </pre> * * <code>INVALID_YOU_TUBE_ID = 14;</code> */ INVALID_YOU_TUBE_ID(14), /** * <pre> * Media file has failed transcoding * </pre> * * <code>MEDIA_FILE_FAILED_TRANSCODING = 15;</code> */ MEDIA_FILE_FAILED_TRANSCODING(15), /** * <pre> * Media file has not been transcoded. * </pre> * * <code>MEDIA_NOT_TRANSCODED = 16;</code> */ MEDIA_NOT_TRANSCODED(16), /** * <pre> * The media type does not match the actual media file's type. * </pre> * * <code>MEDIA_TYPE_DOES_NOT_MATCH_MEDIA_FILE_TYPE = 17;</code> */ MEDIA_TYPE_DOES_NOT_MATCH_MEDIA_FILE_TYPE(17), /** * <pre> * None of the fields have been specified. * </pre> * * <code>NO_FIELDS_SPECIFIED = 18;</code> */ NO_FIELDS_SPECIFIED(18), /** * <pre> * One of reference ID or media file ID must be specified. * </pre> * * <code>NULL_REFERENCE_ID_AND_MEDIA_ID = 19;</code> */ NULL_REFERENCE_ID_AND_MEDIA_ID(19), /** * <pre> * The string has too many characters. * </pre> * * <code>TOO_LONG = 20;</code> */ TOO_LONG(20), /** * <pre> * The specified type is not supported. * </pre> * * <code>UNSUPPORTED_TYPE = 21;</code> */ UNSUPPORTED_TYPE(21), /** * <pre> * YouTube is unavailable for requesting video data. * </pre> * * <code>YOU_TUBE_SERVICE_UNAVAILABLE = 22;</code> */ YOU_TUBE_SERVICE_UNAVAILABLE(22), /** * <pre> * The YouTube video has a non positive duration. * </pre> * * <code>YOU_TUBE_VIDEO_HAS_NON_POSITIVE_DURATION = 23;</code> */ YOU_TUBE_VIDEO_HAS_NON_POSITIVE_DURATION(23), /** * <pre> * The YouTube video ID is syntactically valid but the video was not found. * </pre> * * <code>YOU_TUBE_VIDEO_NOT_FOUND = 24;</code> */ YOU_TUBE_VIDEO_NOT_FOUND(24), UNRECOGNIZED(-1), ; /** * <pre> * Enum unspecified. * </pre> * * <code>UNSPECIFIED = 0;</code> */ public static final int UNSPECIFIED_VALUE = 0; /** * <pre> * The received error code is not known in this version. * </pre> * * <code>UNKNOWN = 1;</code> */ public static final int UNKNOWN_VALUE = 1; /** * <pre> * Cannot create a standard icon type. * </pre> * * <code>CANNOT_CREATE_STANDARD_ICON = 2;</code> */ public static final int CANNOT_CREATE_STANDARD_ICON_VALUE = 2; /** * <pre> * May only select Standard Icons alone. * </pre> * * <code>CANNOT_SELECT_STANDARD_ICON_WITH_OTHER_TYPES = 3;</code> */ public static final int CANNOT_SELECT_STANDARD_ICON_WITH_OTHER_TYPES_VALUE = 3; /** * <pre> * Image contains both a media file ID and data. * </pre> * * <code>CANNOT_SPECIFY_MEDIA_FILE_ID_AND_DATA = 4;</code> */ public static final int CANNOT_SPECIFY_MEDIA_FILE_ID_AND_DATA_VALUE = 4; /** * <pre> * A media file with given type and reference ID already exists. * </pre> * * <code>DUPLICATE_MEDIA = 5;</code> */ public static final int DUPLICATE_MEDIA_VALUE = 5; /** * <pre> * A required field was not specified or is an empty string. * </pre> * * <code>EMPTY_FIELD = 6;</code> */ public static final int EMPTY_FIELD_VALUE = 6; /** * <pre> * A media file may only be modified once per call. * </pre> * * <code>RESOURCE_REFERENCED_IN_MULTIPLE_OPS = 7;</code> */ public static final int RESOURCE_REFERENCED_IN_MULTIPLE_OPS_VALUE = 7; /** * <pre> * Field is not supported for the media sub type. * </pre> * * <code>FIELD_NOT_SUPPORTED_FOR_MEDIA_SUB_TYPE = 8;</code> */ public static final int FIELD_NOT_SUPPORTED_FOR_MEDIA_SUB_TYPE_VALUE = 8; /** * <pre> * The media file ID is invalid. * </pre> * * <code>INVALID_MEDIA_FILE_ID = 9;</code> */ public static final int INVALID_MEDIA_FILE_ID_VALUE = 9; /** * <pre> * The media subtype is invalid. * </pre> * * <code>INVALID_MEDIA_SUB_TYPE = 10;</code> */ public static final int INVALID_MEDIA_SUB_TYPE_VALUE = 10; /** * <pre> * The media file type is invalid. * </pre> * * <code>INVALID_MEDIA_FILE_TYPE = 11;</code> */ public static final int INVALID_MEDIA_FILE_TYPE_VALUE = 11; /** * <pre> * The mimetype is invalid. * </pre> * * <code>INVALID_MIME_TYPE = 12;</code> */ public static final int INVALID_MIME_TYPE_VALUE = 12; /** * <pre> * The media reference ID is invalid. * </pre> * * <code>INVALID_REFERENCE_ID = 13;</code> */ public static final int INVALID_REFERENCE_ID_VALUE = 13; /** * <pre> * The YouTube video ID is invalid. * </pre> * * <code>INVALID_YOU_TUBE_ID = 14;</code> */ public static final int INVALID_YOU_TUBE_ID_VALUE = 14; /** * <pre> * Media file has failed transcoding * </pre> * * <code>MEDIA_FILE_FAILED_TRANSCODING = 15;</code> */ public static final int MEDIA_FILE_FAILED_TRANSCODING_VALUE = 15; /** * <pre> * Media file has not been transcoded. * </pre> * * <code>MEDIA_NOT_TRANSCODED = 16;</code> */ public static final int MEDIA_NOT_TRANSCODED_VALUE = 16; /** * <pre> * The media type does not match the actual media file's type. * </pre> * * <code>MEDIA_TYPE_DOES_NOT_MATCH_MEDIA_FILE_TYPE = 17;</code> */ public static final int MEDIA_TYPE_DOES_NOT_MATCH_MEDIA_FILE_TYPE_VALUE = 17; /** * <pre> * None of the fields have been specified. * </pre> * * <code>NO_FIELDS_SPECIFIED = 18;</code> */ public static final int NO_FIELDS_SPECIFIED_VALUE = 18; /** * <pre> * One of reference ID or media file ID must be specified. * </pre> * * <code>NULL_REFERENCE_ID_AND_MEDIA_ID = 19;</code> */ public static final int NULL_REFERENCE_ID_AND_MEDIA_ID_VALUE = 19; /** * <pre> * The string has too many characters. * </pre> * * <code>TOO_LONG = 20;</code> */ public static final int TOO_LONG_VALUE = 20; /** * <pre> * The specified type is not supported. * </pre> * * <code>UNSUPPORTED_TYPE = 21;</code> */ public static final int UNSUPPORTED_TYPE_VALUE = 21; /** * <pre> * YouTube is unavailable for requesting video data. * </pre> * * <code>YOU_TUBE_SERVICE_UNAVAILABLE = 22;</code> */ public static final int YOU_TUBE_SERVICE_UNAVAILABLE_VALUE = 22; /** * <pre> * The YouTube video has a non positive duration. * </pre> * * <code>YOU_TUBE_VIDEO_HAS_NON_POSITIVE_DURATION = 23;</code> */ public static final int YOU_TUBE_VIDEO_HAS_NON_POSITIVE_DURATION_VALUE = 23; /** * <pre> * The YouTube video ID is syntactically valid but the video was not found. * </pre> * * <code>YOU_TUBE_VIDEO_NOT_FOUND = 24;</code> */ public static final int YOU_TUBE_VIDEO_NOT_FOUND_VALUE = 24; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static MediaFileError valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static MediaFileError forNumber(int value) { switch (value) { case 0: return UNSPECIFIED; case 1: return UNKNOWN; case 2: return CANNOT_CREATE_STANDARD_ICON; case 3: return CANNOT_SELECT_STANDARD_ICON_WITH_OTHER_TYPES; case 4: return CANNOT_SPECIFY_MEDIA_FILE_ID_AND_DATA; case 5: return DUPLICATE_MEDIA; case 6: return EMPTY_FIELD; case 7: return RESOURCE_REFERENCED_IN_MULTIPLE_OPS; case 8: return FIELD_NOT_SUPPORTED_FOR_MEDIA_SUB_TYPE; case 9: return INVALID_MEDIA_FILE_ID; case 10: return INVALID_MEDIA_SUB_TYPE; case 11: return INVALID_MEDIA_FILE_TYPE; case 12: return INVALID_MIME_TYPE; case 13: return INVALID_REFERENCE_ID; case 14: return INVALID_YOU_TUBE_ID; case 15: return MEDIA_FILE_FAILED_TRANSCODING; case 16: return MEDIA_NOT_TRANSCODED; case 17: return MEDIA_TYPE_DOES_NOT_MATCH_MEDIA_FILE_TYPE; case 18: return NO_FIELDS_SPECIFIED; case 19: return NULL_REFERENCE_ID_AND_MEDIA_ID; case 20: return TOO_LONG; case 21: return UNSUPPORTED_TYPE; case 22: return YOU_TUBE_SERVICE_UNAVAILABLE; case 23: return YOU_TUBE_VIDEO_HAS_NON_POSITIVE_DURATION; case 24: return YOU_TUBE_VIDEO_NOT_FOUND; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<MediaFileError> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap< MediaFileError> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<MediaFileError>() { public MediaFileError findValueByNumber(int number) { return MediaFileError.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.ads.googleads.v10.errors.MediaFileErrorEnum.getDescriptor().getEnumTypes().get(0); } private static final MediaFileError[] VALUES = values(); public static MediaFileError valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private MediaFileError(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.ads.googleads.v10.errors.MediaFileErrorEnum.MediaFileError) } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v10.errors.MediaFileErrorEnum)) { return super.equals(obj); } com.google.ads.googleads.v10.errors.MediaFileErrorEnum other = (com.google.ads.googleads.v10.errors.MediaFileErrorEnum) obj; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v10.errors.MediaFileErrorEnum prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Container for enum describing possible media file errors. * </pre> * * Protobuf type {@code google.ads.googleads.v10.errors.MediaFileErrorEnum} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.errors.MediaFileErrorEnum) com.google.ads.googleads.v10.errors.MediaFileErrorEnumOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v10.errors.MediaFileErrorProto.internal_static_google_ads_googleads_v10_errors_MediaFileErrorEnum_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v10.errors.MediaFileErrorProto.internal_static_google_ads_googleads_v10_errors_MediaFileErrorEnum_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v10.errors.MediaFileErrorEnum.class, com.google.ads.googleads.v10.errors.MediaFileErrorEnum.Builder.class); } // Construct using com.google.ads.googleads.v10.errors.MediaFileErrorEnum.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @java.lang.Override public Builder clear() { super.clear(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v10.errors.MediaFileErrorProto.internal_static_google_ads_googleads_v10_errors_MediaFileErrorEnum_descriptor; } @java.lang.Override public com.google.ads.googleads.v10.errors.MediaFileErrorEnum getDefaultInstanceForType() { return com.google.ads.googleads.v10.errors.MediaFileErrorEnum.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v10.errors.MediaFileErrorEnum build() { com.google.ads.googleads.v10.errors.MediaFileErrorEnum result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v10.errors.MediaFileErrorEnum buildPartial() { com.google.ads.googleads.v10.errors.MediaFileErrorEnum result = new com.google.ads.googleads.v10.errors.MediaFileErrorEnum(this); onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v10.errors.MediaFileErrorEnum) { return mergeFrom((com.google.ads.googleads.v10.errors.MediaFileErrorEnum)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v10.errors.MediaFileErrorEnum other) { if (other == com.google.ads.googleads.v10.errors.MediaFileErrorEnum.getDefaultInstance()) return this; this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.ads.googleads.v10.errors.MediaFileErrorEnum parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.ads.googleads.v10.errors.MediaFileErrorEnum) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.errors.MediaFileErrorEnum) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v10.errors.MediaFileErrorEnum) private static final com.google.ads.googleads.v10.errors.MediaFileErrorEnum DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v10.errors.MediaFileErrorEnum(); } public static com.google.ads.googleads.v10.errors.MediaFileErrorEnum getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MediaFileErrorEnum> PARSER = new com.google.protobuf.AbstractParser<MediaFileErrorEnum>() { @java.lang.Override public MediaFileErrorEnum parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new MediaFileErrorEnum(input, extensionRegistry); } }; public static com.google.protobuf.Parser<MediaFileErrorEnum> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MediaFileErrorEnum> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v10.errors.MediaFileErrorEnum getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package tat.filter; import java.util.Calendar; import java.util.Collections; import java.util.GregorianCalendar; import java.util.concurrent.ConcurrentSkipListSet; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; import tat.data.ChartType; import tat.data.OHLC; import tat.indicator.CalculationBase; import tat.indicator.Diff; import tat.indicator.Duration; import tat.indicator.Logic; import tat.indicator.LogicType; import tat.indicator.Order; import tat.indicator.Range; import tat.indicator.Rank; import tat.indicator.SMA; /** * * @author Thomas */ public class ConditionTest { private ConcurrentSkipListSet<OHLC> rawData = new ConcurrentSkipListSet(); private ConcurrentSkipListSet<OHLC> aList; private ObservableList<OHLC> bList = FXCollections.observableArrayList(); private Calendar day1 = new GregorianCalendar(2009, 11, 14, 0, 0, 0); private Calendar day2 = new GregorianCalendar(2009, 11, 15, 0, 0, 0); private Calendar day3 = new GregorianCalendar(2009, 11, 16, 0, 0, 0); private Calendar day4 = new GregorianCalendar(2009, 11, 17, 0, 0, 0); private Calendar day5 = new GregorianCalendar(2009, 11, 18, 0, 0, 0); private Calendar day6 = new GregorianCalendar(2009, 11, 21, 0, 0, 0); private Calendar day7 = new GregorianCalendar(2009, 11, 22, 0, 0, 0); private Calendar day8 = new GregorianCalendar(2009, 11, 23, 0, 0, 0); private Calendar day9 = new GregorianCalendar(2009, 11, 24, 0, 0, 0); private Calendar day10 = new GregorianCalendar(2009, 11, 28, 0, 0, 0); private Calendar day11 = new GregorianCalendar(2009, 11, 29, 0, 0, 0); private Calendar day12 = new GregorianCalendar(2009, 11, 30, 0, 0, 0); private Calendar day13 = new GregorianCalendar(2009, 11, 31, 0, 0, 0); private Calendar day14 = new GregorianCalendar(2010, 0, 4, 0, 0, 0); private Calendar day15 = new GregorianCalendar(2010, 0, 5, 0, 0, 0); private Calendar day16 = new GregorianCalendar(2010, 0, 6, 0, 0, 0); private Calendar day17 = new GregorianCalendar(2010, 0, 7, 0, 0, 0); private Calendar day18 = new GregorianCalendar(2010, 0, 8, 0, 0, 0); private Calendar day19 = new GregorianCalendar(2010, 0, 11, 0, 0, 0); private Calendar day20 = new GregorianCalendar(2010, 0, 12, 0, 0, 0); private Calendar day21 = new GregorianCalendar(2010, 0, 13, 0, 0, 0); private Calendar day22 = new GregorianCalendar(2010, 0, 14, 0, 0, 0); private Calendar day23 = new GregorianCalendar(2010, 0, 15, 0, 0, 0); private Calendar day24 = new GregorianCalendar(2010, 0, 19, 0, 0, 0); private Calendar day25 = new GregorianCalendar(2010, 0, 20, 0, 0, 0); private Calendar day26 = new GregorianCalendar(2010, 0, 21, 0, 0, 0); private Calendar day27 = new GregorianCalendar(2010, 0, 22, 0, 0, 0); private Calendar day28 = new GregorianCalendar(2010, 0, 25, 0, 0, 0); private Calendar day29 = new GregorianCalendar(2010, 0, 26, 0, 0, 0); private Calendar day30 = new GregorianCalendar(2010, 0, 27, 0, 0, 0); private Calendar day31 = new GregorianCalendar(2010, 0, 28, 0, 0, 0); private Calendar day32 = new GregorianCalendar(2010, 0, 29, 0, 0, 0); private Calendar day33 = new GregorianCalendar(2010, 1, 1, 0, 0, 0); public ConditionTest() { } @BeforeClass public static void setUpClass() { } @AfterClass public static void tearDownClass() { } @Before public void setUp() { //Setup testing data OHLC aqcOhlc1 = new OHLC("AQC", day1, 1f, 1f, 1f, 1.075f, 100000); OHLC aqcOhlc2 = new OHLC("AQC", day2, 1f, 1f, 1f, 1.08f, 200000); OHLC aqcOhlc3 = new OHLC("AQC", day3, 1f, 1f, 1f, 1.07f, 300000); OHLC aqcOhlc4 = new OHLC("AQC", day4, 1f, 1f, 1f, 1.05f, 400000); OHLC aqcOhlc5 = new OHLC("AQC", day5, 1f, 1f, 1f, 1.045f, 500000); OHLC aqcOhlc6 = new OHLC("AQC", day6, 1f, 1f, 1f, 1.035f, 600000); OHLC aqcOhlc7 = new OHLC("AQC", day7, 1f, 1f, 1f, 1.025f, 700000); OHLC aqcOhlc8 = new OHLC("AQC", day8, 1f, 1f, 1f, 1.02f, 800000); OHLC aqcOhlc9 = new OHLC("AQC", day9, 1f, 1f, 1f, 1.025f, 900000); OHLC aqcOhlc10 = new OHLC("AQC", day10, 1f, 1f, 1f, 1.015f, 1000000); OHLC aqcOhlc11 = new OHLC("AQC", day11, 1f, 1f, 1f, 1.00f, 1100000); OHLC aqcOhlc12 = new OHLC("AQC", day12, 1f, 1f, 1f, 1.02f, 1200000); OHLC aqcOhlc13 = new OHLC("AQC", day13, 1f, 1f, 1f, 1.055f, 100000); OHLC aqcOhlc14 = new OHLC("AQC", day14, 1f, 1f, 1f, 1.035f, 100000); OHLC aqcOhlc15 = new OHLC("AQC", day15, 1f, 1f, 1f, 1.048f, 100000); OHLC aqcOhlc16 = new OHLC("AQC", day16, 1f, 1f, 1f, 1.025f, 100000); OHLC aqcOhlc17 = new OHLC("AQC", day17, 1f, 1f, 1f, 1.015f, 100000); OHLC aqcOhlc18 = new OHLC("AQC", day18, 1f, 1f, 1f, 1.01f, 100000); OHLC aqcOhlc19 = new OHLC("AQC", day19, 1f, 1f, 1f, 1.01f, 100000); OHLC aqcOhlc20 = new OHLC("AQC", day20, 1f, 1f, 1f, 2.00f, 100000); OHLC aqcOhlc21 = new OHLC("AQC", day21, 1f, 1f, 1f, 0.98f, 100000); OHLC aqcOhlc22 = new OHLC("AQC", day22, 1f, 1f, 1f, 0.995f, 100000); OHLC aqcOhlc23 = new OHLC("AQC", day23, 1f, 1f, 1f, 0.98f, 100000); OHLC aqcOhlc24 = new OHLC("AQC", day24, 1f, 1f, 1f, 0.975f, 100000); OHLC aqcOhlc25 = new OHLC("AQC", day25, 1f, 1f, 1f, 0.95f, 100000); OHLC aqcOhlc26 = new OHLC("AQC", day26, 1f, 1f, 1f, 0.95f, 100000); OHLC aqcOhlc27 = new OHLC("AQC", day27, 1f, 1f, 1f, 1.01f, 100000); OHLC aqcOhlc28 = new OHLC("AQC", day28, 1f, 1f, 1f, 1.005f, 100000); OHLC aqcOhlc29 = new OHLC("AQC", day29, 1f, 1f, 1f, 1.025f, 100000); OHLC aqcOhlc30 = new OHLC("AQC", day30, 1f, 1f, 1f, 1.065f, 100000); OHLC aqcOhlc31 = new OHLC("AQC", day31, 1f, 1f, 1f, 1.075f, 100000); OHLC aqcOhlc32 = new OHLC("AQC", day32, 1f, 1f, 1f, 1.085f, 900000); OHLC aqcOhlc33 = new OHLC("AQC", day33, 1f, 1f, 1f, 1.105f, 200000); bList.add(aqcOhlc1); bList.add(aqcOhlc2); bList.add(aqcOhlc3); bList.add(aqcOhlc4); bList.add(aqcOhlc5); bList.add(aqcOhlc6); bList.add(aqcOhlc7); bList.add(aqcOhlc8); bList.add(aqcOhlc9); bList.add(aqcOhlc10); bList.add(aqcOhlc11); bList.add(aqcOhlc12); bList.add(aqcOhlc13); bList.add(aqcOhlc14); bList.add(aqcOhlc15); bList.add(aqcOhlc16); bList.add(aqcOhlc17); bList.add(aqcOhlc18); bList.add(aqcOhlc19); bList.add(aqcOhlc20); bList.add(aqcOhlc21); bList.add(aqcOhlc22); bList.add(aqcOhlc23); bList.add(aqcOhlc24); bList.add(aqcOhlc25); bList.add(aqcOhlc26); bList.add(aqcOhlc27); bList.add(aqcOhlc28); bList.add(aqcOhlc29); bList.add(aqcOhlc30); bList.add(aqcOhlc31); bList.add(aqcOhlc32); bList.add(aqcOhlc33); Collections.sort(bList, OHLC.dateComparator); aList = new ConcurrentSkipListSet<>(bList); } @After public void tearDown() { } /** * Test of isDiffSet method, of class Condition. */ @Test public void testIsDiffSet() { System.out.println("isDiffSet"); Condition instance = new Condition(); boolean expResult = false; boolean result = instance.isDiffSet(); assertEquals(expResult, result); Diff diff = new Diff("UUID_xxx"); SMA smaL = new SMA(10, CalculationBase.CLOSE, "U1"); SMA smaR = new SMA(20, CalculationBase.CLOSE, "U2"); smaL.doCalculation(aList); smaR.doCalculation(aList); diff.setLHS(smaL); diff.setRHS(smaR); instance.diffProperty.setValue(diff); assertTrue(instance.isDiffSet()); } /** * Test of isRangeSet method, of class Condition. */ @Test public void testIsRangeSet() { System.out.println("isRangeSet"); Condition instance = new Condition(); boolean result = instance.isRangeSet(); assertEquals(false, result); Range range = new Range("UUID"); instance.rangeProperty.setValue(range); assertEquals(false, instance.isRangeSet()); range.addRange(new Logic(LogicType.GREATER_THAN), 100); assertEquals(true, instance.isRangeSet()); } /** * Test of isDurationSet method, of class Condition. */ @Test public void testIsDurationSet() { System.out.println("isDurationSet"); Condition instance = new Condition(); boolean result = instance.isDurationSet(); assertEquals(false, result); Duration dur = new Duration(); instance.durationProperty.setValue(dur); assertEquals(false, instance.isDurationSet()); Duration duration = new Duration(ChartType.DAY, 10); instance.durationProperty.setValue(duration); assertEquals(true, instance.isDurationSet()); } /** * Test of isRankSet method, of class Condition. */ @Test public void testIsRankSet() { System.out.println("isRankSet"); Condition instance = new Condition(); assertEquals(false, instance.isRankSet()); Rank rank = new Rank(); instance.rankProperty.setValue(rank); assertEquals(false, instance.isRankSet()); rank.setOrder(Order.ASCENDING); assertEquals(true, instance.isRankSet()); } }
package com.reactnativenavigation.screens; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import android.widget.RelativeLayout; import android.widget.RelativeLayout.LayoutParams; import com.reactnativenavigation.NavigationApplication; import com.reactnativenavigation.params.FabParams; import com.reactnativenavigation.params.ScreenParams; import com.reactnativenavigation.params.StyleParams; import com.reactnativenavigation.params.TitleBarButtonParams; import com.reactnativenavigation.params.TitleBarLeftButtonParams; import com.reactnativenavigation.utils.KeyboardVisibilityDetector; import com.reactnativenavigation.utils.Task; import com.reactnativenavigation.views.LeftButtonOnClickListener; import com.reactnativenavigation.views.MenuButtonOnClickListener; import java.util.List; import java.util.Stack; public class ScreenStack { private static final String TAG = "ScreenStack"; public interface OnScreenPop { void onScreenPopAnimationEnd(); } private final AppCompatActivity activity; private RelativeLayout parent; private LeftButtonOnClickListener leftButtonOnClickListener; private MenuButtonOnClickListener rightButtonsClickListener; private Stack<Screen> stack = new Stack<>(); private boolean disableBackNavigation = false; private final KeyboardVisibilityDetector keyboardVisibilityDetector; private boolean isStackVisible = false; private final String navigatorId; public String getNavigatorId() { return navigatorId; } public ScreenStack(AppCompatActivity activity, RelativeLayout parent, String navigatorId, LeftButtonOnClickListener leftButtonOnClickListener, MenuButtonOnClickListener rightButtonsClickListener) { this.activity = activity; this.parent = parent; this.navigatorId = navigatorId; this.leftButtonOnClickListener = leftButtonOnClickListener; this.rightButtonsClickListener = rightButtonsClickListener; keyboardVisibilityDetector = new KeyboardVisibilityDetector(parent); } public void newStack(final ScreenParams params, LayoutParams layoutParams) { final Screen nextScreen = ScreenFactory.create(activity, params, leftButtonOnClickListener, rightButtonsClickListener); final Screen previousScreen = stack.peek(); if (isStackVisible) { pushScreenToVisibleStack(layoutParams, nextScreen, previousScreen, new Screen.OnDisplayListener() { @Override public void onDisplay() { removeElementsBelowTop(); } }); } else { pushScreenToInvisibleStack(layoutParams, nextScreen, previousScreen); removeElementsBelowTop(); } } private void removeElementsBelowTop() { while (stack.size() > 1) { Screen screen = stack.get(0); parent.removeView(screen); screen.destroy(); stack.remove(0); } } public void pushInitialScreenWithAnimation(final ScreenParams initialScreenParams, LayoutParams params) { isStackVisible = true; pushInitialScreen(initialScreenParams, params); final Screen screen = stack.peek(); screen.setOnDisplayListener(new Screen.OnDisplayListener() { @Override public void onDisplay() { screen.show(initialScreenParams.animateScreenTransitions); screen.setStyle(); } }); } public void pushInitialScreen(ScreenParams initialScreenParams, LayoutParams params) { Screen initialScreen = ScreenFactory.create(activity, initialScreenParams, leftButtonOnClickListener, rightButtonsClickListener); initialScreen.setVisibility(View.INVISIBLE); initialScreen.setOnDisplayListener(new Screen.OnDisplayListener() { @Override public void onDisplay() { if (isStackVisible) { NavigationApplication.instance.getEventEmitter().sendScreenChangedEvent("willAppear", stack.peek().getNavigatorEventId()); NavigationApplication.instance.getEventEmitter().sendScreenChangedEvent("didAppear", stack.peek().getNavigatorEventId()); } } }); addScreen(initialScreen, params); } public void push(final ScreenParams params, LayoutParams layoutParams) { Screen nextScreen = ScreenFactory.create(activity, params, leftButtonOnClickListener, rightButtonsClickListener); final Screen previousScreen = stack.peek(); if (isStackVisible) { if (nextScreen.screenParams.sharedElementsTransitions.isEmpty()) { pushScreenToVisibleStack(layoutParams, nextScreen, previousScreen); } else { pushScreenToVisibleStackWithSharedElementTransition(layoutParams, nextScreen, previousScreen); } } else { pushScreenToInvisibleStack(layoutParams, nextScreen, previousScreen); } } private void pushScreenToVisibleStack(LayoutParams layoutParams, final Screen nextScreen, final Screen previousScreen) { pushScreenToVisibleStack(layoutParams, nextScreen, previousScreen, null); } private void pushScreenToVisibleStack(LayoutParams layoutParams, final Screen nextScreen, final Screen previousScreen, @Nullable final Screen.OnDisplayListener onDisplay) { nextScreen.setVisibility(View.INVISIBLE); addScreen(nextScreen, layoutParams); NavigationApplication.instance.getEventEmitter().sendScreenChangedEvent("willDisappear", previousScreen.getNavigatorEventId()); nextScreen.setOnDisplayListener(new Screen.OnDisplayListener() { @Override public void onDisplay() { nextScreen.show(nextScreen.screenParams.animateScreenTransitions, new Runnable() { @Override public void run() { if (onDisplay != null) onDisplay.onDisplay(); NavigationApplication.instance.getEventEmitter().sendScreenChangedEvent("didDisappear", previousScreen.getNavigatorEventId()); parent.removeView(previousScreen); } }); } }); } private void pushScreenToVisibleStackWithSharedElementTransition(LayoutParams layoutParams, final Screen nextScreen, final Screen previousScreen) { nextScreen.setVisibility(View.INVISIBLE); nextScreen.setOnDisplayListener(new Screen.OnDisplayListener() { @Override public void onDisplay() { nextScreen.showWithSharedElementsTransitions(previousScreen.sharedElements.getToElements(), new Runnable() { @Override public void run() { parent.removeView(previousScreen); } }); } }); addScreen(nextScreen, layoutParams); } private void pushScreenToInvisibleStack(LayoutParams layoutParams, Screen nextScreen, Screen previousScreen) { nextScreen.setVisibility(View.INVISIBLE); addScreen(nextScreen, layoutParams); parent.removeView(previousScreen); } private void addScreen(Screen screen, LayoutParams layoutParams) { parent.addView(screen, layoutParams); stack.push(screen); } public void pop(boolean animated) { pop(animated, null); } public void pop(final boolean animated, @Nullable final OnScreenPop onScreenPop) { if (!canPop()) { return; } if (keyboardVisibilityDetector.isKeyboardVisible()) { keyboardVisibilityDetector.setKeyboardCloseListener(new Runnable() { @Override public void run() { keyboardVisibilityDetector.setKeyboardCloseListener(null); popInternal(animated, onScreenPop); } }); keyboardVisibilityDetector.closeKeyboard(); } else { popInternal(animated, onScreenPop); } } private void popInternal(final boolean animated, @Nullable final OnScreenPop onScreenPop) { final Screen toRemove = stack.pop(); final Screen previous = stack.peek(); swapScreens(animated, toRemove, previous, onScreenPop); } private void swapScreens(boolean animated, final Screen toRemove, Screen previous, OnScreenPop onScreenPop) { readdPrevious(previous); previous.setStyle(); hideScreen(animated, toRemove, previous); if (onScreenPop != null) { onScreenPop.onScreenPopAnimationEnd(); } } private void hideScreen(boolean animated, final Screen toRemove, Screen previous) { Runnable onAnimationEnd = new Runnable() { @Override public void run() { toRemove.destroy(); parent.removeView(toRemove); } }; if (animated) { toRemove.animateHide(previous.sharedElements.getToElements(), onAnimationEnd); } else { toRemove.hide(previous.sharedElements.getToElements(), onAnimationEnd); } } public Screen peek() { return stack.peek(); } public Screen bottom() { return stack.get(0); } public boolean empty() { return stack.empty(); } private void readdPrevious(Screen previous) { previous.setVisibility(View.VISIBLE); NavigationApplication.instance.getEventEmitter().sendScreenChangedEvent("willAppear", previous.getNavigatorEventId()); NavigationApplication.instance.getEventEmitter().sendScreenChangedEvent("didAppear", previous.getNavigatorEventId()); parent.addView(previous, 0); } public void popToRoot(final boolean animated, @Nullable final OnScreenPop onScreenPop) { if (keyboardVisibilityDetector.isKeyboardVisible()) { keyboardVisibilityDetector.setKeyboardCloseListener(new Runnable() { @Override public void run() { keyboardVisibilityDetector.setKeyboardCloseListener(null); popToRootInternal(animated, onScreenPop); } }); keyboardVisibilityDetector.closeKeyboard(); } else { popToRootInternal(animated, onScreenPop); } } private void popToRootInternal(final boolean animated, @Nullable final OnScreenPop onScreenPop) { while (canPop()) { if (stack.size() == 2) { popInternal(animated, onScreenPop); } else { popInternal(animated, null); } } } public void destroy() { for (Screen screen : stack) { screen.destroy(); parent.removeView(screen); } stack.clear(); } public boolean canPop() { return stack.size() > 1 && !isPreviousScreenAttachedToWindow() && !disableBackNavigation; } private boolean isPreviousScreenAttachedToWindow() { Screen previousScreen = stack.get(stack.size() - 2); if (previousScreen.getParent() != null) { Log.w(TAG, "Can't pop stack. reason: previous screen is already attached"); return true; } return false; } public void setScreenTopBarVisible(String screenInstanceId, final boolean visible, final boolean animate) { performOnScreen(screenInstanceId, new Task<Screen>() { @Override public void run(Screen param) { param.setTopBarVisible(visible, animate); } }); } public void setScreenTitleBarTitle(String screenInstanceId, final String title) { performOnScreen(screenInstanceId, new Task<Screen>() { @Override public void run(Screen param) { param.setTitleBarTitle(title); } }); } public void setScreenTitleBarSubtitle(String screenInstanceId, final String subtitle) { performOnScreen(screenInstanceId, new Task<Screen>() { @Override public void run(Screen param) { param.setTitleBarSubtitle(subtitle); } }); } public void setScreenTitleBarRightButtons(String screenInstanceId, final String navigatorEventId, final List<TitleBarButtonParams> titleBarButtons) { performOnScreen(screenInstanceId, new Task<Screen>() { @Override public void run(Screen param) { param.setTitleBarRightButtons(navigatorEventId, titleBarButtons); } }); } public void setScreenTitleBarLeftButton(String screenInstanceId, final String navigatorEventId, final TitleBarLeftButtonParams titleBarLeftButtonParams) { performOnScreen(screenInstanceId, new Task<Screen>() { @Override public void run(Screen screen) { screen.setTitleBarLeftButton(navigatorEventId, leftButtonOnClickListener, titleBarLeftButtonParams); } }); } public void setFab(String screenInstanceId, final FabParams fabParams) { performOnScreen(screenInstanceId, new Task<Screen>() { @Override public void run(Screen screen) { screen.setFab(fabParams); } }); } public void setDisableBackNavigation(boolean disableBackNavigation) { this.disableBackNavigation = disableBackNavigation; StyleParams params = stack.peek().getStyleParams(); params.backButtonHidden = disableBackNavigation; stack.peek().getTopBar().setStyle(params); } public boolean getDisableBackNavigation() { return this.disableBackNavigation; } public void updateScreenStyle(String screenInstanceId, final Bundle styleParams) { performOnScreen(screenInstanceId, new Task<Screen>() { @Override public void run(Screen screen) { if (isScreenVisible(screen)) { screen.updateVisibleScreenStyle(styleParams); } else { screen.updateInvisibleScreenStyle(styleParams); } } }); } private boolean isScreenVisible(Screen screen) { return isStackVisible && peek() == screen; } public void selectTopTabByTabIndex(String screenInstanceId, final int index) { performOnScreen(screenInstanceId, new Task<Screen>() { @Override public void run(Screen screen) { if (screen.screenParams.hasTopTabs()) { ((ViewPagerScreen) screen).selectTopTabByTabIndex(index); } } }); } public void selectTopTabByScreen(final String screenInstanceId) { performOnScreen(screenInstanceId, new Task<Screen>() { @Override public void run(Screen screen) { ((ViewPagerScreen) screen).selectTopTabByTabByScreen(screenInstanceId); } }); } public StyleParams getCurrentScreenStyleParams() { return stack.peek().getStyleParams(); } public boolean handleBackPressInJs() { ScreenParams currentScreen = stack.peek().screenParams; if (currentScreen.overrideBackPressInJs) { NavigationApplication.instance.getEventEmitter().sendNavigatorEvent("backPress", currentScreen.getNavigatorEventId()); return true; } return false; } private void performOnScreen(String screenInstanceId, Task<Screen> task) { if (stack.isEmpty()) { return; } for (Screen screen : stack) { if (screen.hasScreenInstance(screenInstanceId)) { task.run(screen); return; } } } public void show() { isStackVisible = true; if (!stack.empty()) { stack.peek().setStyle(); stack.peek().setVisibility(View.VISIBLE); NavigationApplication.instance.getEventEmitter().sendScreenChangedEvent("willAppear", stack.peek().getNavigatorEventId()); NavigationApplication.instance.getEventEmitter().sendScreenChangedEvent("didAppear", stack.peek().getNavigatorEventId()); } } public void hide() { if (!stack.empty()) { NavigationApplication.instance.getEventEmitter().sendScreenChangedEvent("willDisappear", stack.peek().getNavigatorEventId()); NavigationApplication.instance.getEventEmitter().sendScreenChangedEvent("didDisappear", stack.peek().getNavigatorEventId()); isStackVisible = false; stack.peek().setVisibility(View.INVISIBLE); } } public String rootScreenId() { return stack.get(0).screenParams.screenId; } }
package com.eboji.persist.pojo; import java.util.ArrayList; import java.util.Date; import java.util.List; public class GgRoomExample { protected String orderByClause; protected boolean distinct; protected List<Criteria> oredCriteria; public GgRoomExample() { oredCriteria = new ArrayList<Criteria>(); } public void setOrderByClause(String orderByClause) { this.orderByClause = orderByClause; } public String getOrderByClause() { return orderByClause; } public void setDistinct(boolean distinct) { this.distinct = distinct; } public boolean isDistinct() { return distinct; } public List<Criteria> getOredCriteria() { return oredCriteria; } public void or(Criteria criteria) { oredCriteria.add(criteria); } public Criteria or() { Criteria criteria = createCriteriaInternal(); oredCriteria.add(criteria); return criteria; } public Criteria createCriteria() { Criteria criteria = createCriteriaInternal(); if (oredCriteria.size() == 0) { oredCriteria.add(criteria); } return criteria; } protected Criteria createCriteriaInternal() { Criteria criteria = new Criteria(); return criteria; } public void clear() { oredCriteria.clear(); orderByClause = null; distinct = false; } protected abstract static class GeneratedCriteria { protected List<Criterion> criteria; protected GeneratedCriteria() { super(); criteria = new ArrayList<Criterion>(); } public boolean isValid() { return criteria.size() > 0; } public List<Criterion> getAllCriteria() { return criteria; } public List<Criterion> getCriteria() { return criteria; } protected void addCriterion(String condition) { if (condition == null) { throw new RuntimeException("Value for condition cannot be null"); } criteria.add(new Criterion(condition)); } protected void addCriterion(String condition, Object value, String property) { if (value == null) { throw new RuntimeException("Value for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value)); } protected void addCriterion(String condition, Object value1, Object value2, String property) { if (value1 == null || value2 == null) { throw new RuntimeException("Between values for " + property + " cannot be null"); } criteria.add(new Criterion(condition, value1, value2)); } public Criteria andIdIsNull() { addCriterion("id is null"); return (Criteria) this; } public Criteria andIdIsNotNull() { addCriterion("id is not null"); return (Criteria) this; } public Criteria andIdEqualTo(Long value) { addCriterion("id =", value, "id"); return (Criteria) this; } public Criteria andIdNotEqualTo(Long value) { addCriterion("id <>", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThan(Long value) { addCriterion("id >", value, "id"); return (Criteria) this; } public Criteria andIdGreaterThanOrEqualTo(Long value) { addCriterion("id >=", value, "id"); return (Criteria) this; } public Criteria andIdLessThan(Long value) { addCriterion("id <", value, "id"); return (Criteria) this; } public Criteria andIdLessThanOrEqualTo(Long value) { addCriterion("id <=", value, "id"); return (Criteria) this; } public Criteria andIdIn(List<Long> values) { addCriterion("id in", values, "id"); return (Criteria) this; } public Criteria andIdNotIn(List<Long> values) { addCriterion("id not in", values, "id"); return (Criteria) this; } public Criteria andIdBetween(Long value1, Long value2) { addCriterion("id between", value1, value2, "id"); return (Criteria) this; } public Criteria andIdNotBetween(Long value1, Long value2) { addCriterion("id not between", value1, value2, "id"); return (Criteria) this; } public Criteria andRoomnoIsNull() { addCriterion("roomno is null"); return (Criteria) this; } public Criteria andRoomnoIsNotNull() { addCriterion("roomno is not null"); return (Criteria) this; } public Criteria andRoomnoEqualTo(Integer value) { addCriterion("roomno =", value, "roomno"); return (Criteria) this; } public Criteria andRoomnoNotEqualTo(Integer value) { addCriterion("roomno <>", value, "roomno"); return (Criteria) this; } public Criteria andRoomnoGreaterThan(Integer value) { addCriterion("roomno >", value, "roomno"); return (Criteria) this; } public Criteria andRoomnoGreaterThanOrEqualTo(Integer value) { addCriterion("roomno >=", value, "roomno"); return (Criteria) this; } public Criteria andRoomnoLessThan(Integer value) { addCriterion("roomno <", value, "roomno"); return (Criteria) this; } public Criteria andRoomnoLessThanOrEqualTo(Integer value) { addCriterion("roomno <=", value, "roomno"); return (Criteria) this; } public Criteria andRoomnoIn(List<Integer> values) { addCriterion("roomno in", values, "roomno"); return (Criteria) this; } public Criteria andRoomnoNotIn(List<Integer> values) { addCriterion("roomno not in", values, "roomno"); return (Criteria) this; } public Criteria andRoomnoBetween(Integer value1, Integer value2) { addCriterion("roomno between", value1, value2, "roomno"); return (Criteria) this; } public Criteria andRoomnoNotBetween(Integer value1, Integer value2) { addCriterion("roomno not between", value1, value2, "roomno"); return (Criteria) this; } public Criteria andStatusIsNull() { addCriterion("status is null"); return (Criteria) this; } public Criteria andStatusIsNotNull() { addCriterion("status is not null"); return (Criteria) this; } public Criteria andStatusEqualTo(Integer value) { addCriterion("status =", value, "status"); return (Criteria) this; } public Criteria andStatusNotEqualTo(Integer value) { addCriterion("status <>", value, "status"); return (Criteria) this; } public Criteria andStatusGreaterThan(Integer value) { addCriterion("status >", value, "status"); return (Criteria) this; } public Criteria andStatusGreaterThanOrEqualTo(Integer value) { addCriterion("status >=", value, "status"); return (Criteria) this; } public Criteria andStatusLessThan(Integer value) { addCriterion("status <", value, "status"); return (Criteria) this; } public Criteria andStatusLessThanOrEqualTo(Integer value) { addCriterion("status <=", value, "status"); return (Criteria) this; } public Criteria andStatusIn(List<Integer> values) { addCriterion("status in", values, "status"); return (Criteria) this; } public Criteria andStatusNotIn(List<Integer> values) { addCriterion("status not in", values, "status"); return (Criteria) this; } public Criteria andStatusBetween(Integer value1, Integer value2) { addCriterion("status between", value1, value2, "status"); return (Criteria) this; } public Criteria andStatusNotBetween(Integer value1, Integer value2) { addCriterion("status not between", value1, value2, "status"); return (Criteria) this; } public Criteria andGameidIsNull() { addCriterion("gameid is null"); return (Criteria) this; } public Criteria andGameidIsNotNull() { addCriterion("gameid is not null"); return (Criteria) this; } public Criteria andGameidEqualTo(Integer value) { addCriterion("gameid =", value, "gameid"); return (Criteria) this; } public Criteria andGameidNotEqualTo(Integer value) { addCriterion("gameid <>", value, "gameid"); return (Criteria) this; } public Criteria andGameidGreaterThan(Integer value) { addCriterion("gameid >", value, "gameid"); return (Criteria) this; } public Criteria andGameidGreaterThanOrEqualTo(Integer value) { addCriterion("gameid >=", value, "gameid"); return (Criteria) this; } public Criteria andGameidLessThan(Integer value) { addCriterion("gameid <", value, "gameid"); return (Criteria) this; } public Criteria andGameidLessThanOrEqualTo(Integer value) { addCriterion("gameid <=", value, "gameid"); return (Criteria) this; } public Criteria andGameidIn(List<Integer> values) { addCriterion("gameid in", values, "gameid"); return (Criteria) this; } public Criteria andGameidNotIn(List<Integer> values) { addCriterion("gameid not in", values, "gameid"); return (Criteria) this; } public Criteria andGameidBetween(Integer value1, Integer value2) { addCriterion("gameid between", value1, value2, "gameid"); return (Criteria) this; } public Criteria andGameidNotBetween(Integer value1, Integer value2) { addCriterion("gameid not between", value1, value2, "gameid"); return (Criteria) this; } public Criteria andGametypeIsNull() { addCriterion("gametype is null"); return (Criteria) this; } public Criteria andGametypeIsNotNull() { addCriterion("gametype is not null"); return (Criteria) this; } public Criteria andGametypeEqualTo(Integer value) { addCriterion("gametype =", value, "gametype"); return (Criteria) this; } public Criteria andGametypeNotEqualTo(Integer value) { addCriterion("gametype <>", value, "gametype"); return (Criteria) this; } public Criteria andGametypeGreaterThan(Integer value) { addCriterion("gametype >", value, "gametype"); return (Criteria) this; } public Criteria andGametypeGreaterThanOrEqualTo(Integer value) { addCriterion("gametype >=", value, "gametype"); return (Criteria) this; } public Criteria andGametypeLessThan(Integer value) { addCriterion("gametype <", value, "gametype"); return (Criteria) this; } public Criteria andGametypeLessThanOrEqualTo(Integer value) { addCriterion("gametype <=", value, "gametype"); return (Criteria) this; } public Criteria andGametypeIn(List<Integer> values) { addCriterion("gametype in", values, "gametype"); return (Criteria) this; } public Criteria andGametypeNotIn(List<Integer> values) { addCriterion("gametype not in", values, "gametype"); return (Criteria) this; } public Criteria andGametypeBetween(Integer value1, Integer value2) { addCriterion("gametype between", value1, value2, "gametype"); return (Criteria) this; } public Criteria andGametypeNotBetween(Integer value1, Integer value2) { addCriterion("gametype not between", value1, value2, "gametype"); return (Criteria) this; } public Criteria andGamepriceIsNull() { addCriterion("gameprice is null"); return (Criteria) this; } public Criteria andGamepriceIsNotNull() { addCriterion("gameprice is not null"); return (Criteria) this; } public Criteria andGamepriceEqualTo(Long value) { addCriterion("gameprice =", value, "gameprice"); return (Criteria) this; } public Criteria andGamepriceNotEqualTo(Long value) { addCriterion("gameprice <>", value, "gameprice"); return (Criteria) this; } public Criteria andGamepriceGreaterThan(Long value) { addCriterion("gameprice >", value, "gameprice"); return (Criteria) this; } public Criteria andGamepriceGreaterThanOrEqualTo(Long value) { addCriterion("gameprice >=", value, "gameprice"); return (Criteria) this; } public Criteria andGamepriceLessThan(Long value) { addCriterion("gameprice <", value, "gameprice"); return (Criteria) this; } public Criteria andGamepriceLessThanOrEqualTo(Long value) { addCriterion("gameprice <=", value, "gameprice"); return (Criteria) this; } public Criteria andGamepriceIn(List<Long> values) { addCriterion("gameprice in", values, "gameprice"); return (Criteria) this; } public Criteria andGamepriceNotIn(List<Long> values) { addCriterion("gameprice not in", values, "gameprice"); return (Criteria) this; } public Criteria andGamepriceBetween(Long value1, Long value2) { addCriterion("gameprice between", value1, value2, "gameprice"); return (Criteria) this; } public Criteria andGamepriceNotBetween(Long value1, Long value2) { addCriterion("gameprice not between", value1, value2, "gameprice"); return (Criteria) this; } public Criteria andRoundIsNull() { addCriterion("round is null"); return (Criteria) this; } public Criteria andRoundIsNotNull() { addCriterion("round is not null"); return (Criteria) this; } public Criteria andRoundEqualTo(Integer value) { addCriterion("round =", value, "round"); return (Criteria) this; } public Criteria andRoundNotEqualTo(Integer value) { addCriterion("round <>", value, "round"); return (Criteria) this; } public Criteria andRoundGreaterThan(Integer value) { addCriterion("round >", value, "round"); return (Criteria) this; } public Criteria andRoundGreaterThanOrEqualTo(Integer value) { addCriterion("round >=", value, "round"); return (Criteria) this; } public Criteria andRoundLessThan(Integer value) { addCriterion("round <", value, "round"); return (Criteria) this; } public Criteria andRoundLessThanOrEqualTo(Integer value) { addCriterion("round <=", value, "round"); return (Criteria) this; } public Criteria andRoundIn(List<Integer> values) { addCriterion("round in", values, "round"); return (Criteria) this; } public Criteria andRoundNotIn(List<Integer> values) { addCriterion("round not in", values, "round"); return (Criteria) this; } public Criteria andRoundBetween(Integer value1, Integer value2) { addCriterion("round between", value1, value2, "round"); return (Criteria) this; } public Criteria andRoundNotBetween(Integer value1, Integer value2) { addCriterion("round not between", value1, value2, "round"); return (Criteria) this; } public Criteria andCreatetimeIsNull() { addCriterion("createtime is null"); return (Criteria) this; } public Criteria andCreatetimeIsNotNull() { addCriterion("createtime is not null"); return (Criteria) this; } public Criteria andCreatetimeEqualTo(Date value) { addCriterion("createtime =", value, "createtime"); return (Criteria) this; } public Criteria andCreatetimeNotEqualTo(Date value) { addCriterion("createtime <>", value, "createtime"); return (Criteria) this; } public Criteria andCreatetimeGreaterThan(Date value) { addCriterion("createtime >", value, "createtime"); return (Criteria) this; } public Criteria andCreatetimeGreaterThanOrEqualTo(Date value) { addCriterion("createtime >=", value, "createtime"); return (Criteria) this; } public Criteria andCreatetimeLessThan(Date value) { addCriterion("createtime <", value, "createtime"); return (Criteria) this; } public Criteria andCreatetimeLessThanOrEqualTo(Date value) { addCriterion("createtime <=", value, "createtime"); return (Criteria) this; } public Criteria andCreatetimeIn(List<Date> values) { addCriterion("createtime in", values, "createtime"); return (Criteria) this; } public Criteria andCreatetimeNotIn(List<Date> values) { addCriterion("createtime not in", values, "createtime"); return (Criteria) this; } public Criteria andCreatetimeBetween(Date value1, Date value2) { addCriterion("createtime between", value1, value2, "createtime"); return (Criteria) this; } public Criteria andCreatetimeNotBetween(Date value1, Date value2) { addCriterion("createtime not between", value1, value2, "createtime"); return (Criteria) this; } public Criteria andRemarkIsNull() { addCriterion("remark is null"); return (Criteria) this; } public Criteria andRemarkIsNotNull() { addCriterion("remark is not null"); return (Criteria) this; } public Criteria andRemarkEqualTo(String value) { addCriterion("remark =", value, "remark"); return (Criteria) this; } public Criteria andRemarkNotEqualTo(String value) { addCriterion("remark <>", value, "remark"); return (Criteria) this; } public Criteria andRemarkGreaterThan(String value) { addCriterion("remark >", value, "remark"); return (Criteria) this; } public Criteria andRemarkGreaterThanOrEqualTo(String value) { addCriterion("remark >=", value, "remark"); return (Criteria) this; } public Criteria andRemarkLessThan(String value) { addCriterion("remark <", value, "remark"); return (Criteria) this; } public Criteria andRemarkLessThanOrEqualTo(String value) { addCriterion("remark <=", value, "remark"); return (Criteria) this; } public Criteria andRemarkLike(String value) { addCriterion("remark like", value, "remark"); return (Criteria) this; } public Criteria andRemarkNotLike(String value) { addCriterion("remark not like", value, "remark"); return (Criteria) this; } public Criteria andRemarkIn(List<String> values) { addCriterion("remark in", values, "remark"); return (Criteria) this; } public Criteria andRemarkNotIn(List<String> values) { addCriterion("remark not in", values, "remark"); return (Criteria) this; } public Criteria andRemarkBetween(String value1, String value2) { addCriterion("remark between", value1, value2, "remark"); return (Criteria) this; } public Criteria andRemarkNotBetween(String value1, String value2) { addCriterion("remark not between", value1, value2, "remark"); return (Criteria) this; } public Criteria andPlayersIsNull() { addCriterion("players is null"); return (Criteria) this; } public Criteria andPlayersIsNotNull() { addCriterion("players is not null"); return (Criteria) this; } public Criteria andPlayersEqualTo(Integer value) { addCriterion("players =", value, "players"); return (Criteria) this; } public Criteria andPlayersNotEqualTo(Integer value) { addCriterion("players <>", value, "players"); return (Criteria) this; } public Criteria andPlayersGreaterThan(Integer value) { addCriterion("players >", value, "players"); return (Criteria) this; } public Criteria andPlayersGreaterThanOrEqualTo(Integer value) { addCriterion("players >=", value, "players"); return (Criteria) this; } public Criteria andPlayersLessThan(Integer value) { addCriterion("players <", value, "players"); return (Criteria) this; } public Criteria andPlayersLessThanOrEqualTo(Integer value) { addCriterion("players <=", value, "players"); return (Criteria) this; } public Criteria andPlayersIn(List<Integer> values) { addCriterion("players in", values, "players"); return (Criteria) this; } public Criteria andPlayersNotIn(List<Integer> values) { addCriterion("players not in", values, "players"); return (Criteria) this; } public Criteria andPlayersBetween(Integer value1, Integer value2) { addCriterion("players between", value1, value2, "players"); return (Criteria) this; } public Criteria andPlayersNotBetween(Integer value1, Integer value2) { addCriterion("players not between", value1, value2, "players"); return (Criteria) this; } } public static class Criteria extends GeneratedCriteria { protected Criteria() { super(); } } public static class Criterion { private String condition; private Object value; private Object secondValue; private boolean noValue; private boolean singleValue; private boolean betweenValue; private boolean listValue; private String typeHandler; public String getCondition() { return condition; } public Object getValue() { return value; } public Object getSecondValue() { return secondValue; } public boolean isNoValue() { return noValue; } public boolean isSingleValue() { return singleValue; } public boolean isBetweenValue() { return betweenValue; } public boolean isListValue() { return listValue; } public String getTypeHandler() { return typeHandler; } protected Criterion(String condition) { super(); this.condition = condition; this.typeHandler = null; this.noValue = true; } protected Criterion(String condition, Object value, String typeHandler) { super(); this.condition = condition; this.value = value; this.typeHandler = typeHandler; if (value instanceof List<?>) { this.listValue = true; } else { this.singleValue = true; } } protected Criterion(String condition, Object value) { this(condition, value, null); } protected Criterion(String condition, Object value, Object secondValue, String typeHandler) { super(); this.condition = condition; this.value = value; this.secondValue = secondValue; this.typeHandler = typeHandler; this.betweenValue = true; } protected Criterion(String condition, Object value, Object secondValue) { this(condition, value, secondValue, null); } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.apple; import com.facebook.buck.apple.toolchain.AppleCxxPlatformsProvider; import com.facebook.buck.apple.toolchain.ApplePlatform; import com.facebook.buck.apple.toolchain.CodeSignIdentityStore; import com.facebook.buck.apple.toolchain.ProvisioningProfileStore; import com.facebook.buck.apple.toolchain.UnresolvedAppleCxxPlatform; import com.facebook.buck.core.cell.CellPathResolver; import com.facebook.buck.core.cell.nameresolver.CellNameResolver; import com.facebook.buck.core.description.arg.HasContacts; import com.facebook.buck.core.description.attr.ImplicitDepsInferringDescription; import com.facebook.buck.core.description.attr.ImplicitFlavorsInferringDescription; import com.facebook.buck.core.description.impl.DescriptionCache; import com.facebook.buck.core.description.metadata.MetadataProvidingDescription; import com.facebook.buck.core.exceptions.HumanReadableException; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.Flavor; import com.facebook.buck.core.model.FlavorDomain; import com.facebook.buck.core.model.FlavorSet; import com.facebook.buck.core.model.Flavored; import com.facebook.buck.core.model.InternalFlavor; import com.facebook.buck.core.model.TargetConfiguration; import com.facebook.buck.core.model.impl.BuildTargetPaths; import com.facebook.buck.core.model.targetgraph.TargetGraph; import com.facebook.buck.core.rules.ActionGraphBuilder; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.rules.BuildRuleCreationContextWithTargetGraph; import com.facebook.buck.core.rules.BuildRuleParams; import com.facebook.buck.core.rules.BuildRuleResolver; import com.facebook.buck.core.rules.DescriptionWithTargetGraph; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.core.toolchain.ToolchainProvider; import com.facebook.buck.core.util.immutables.RuleArg; import com.facebook.buck.cxx.CxxBinaryDescription; import com.facebook.buck.cxx.CxxBinaryDescriptionArg; import com.facebook.buck.cxx.CxxBinaryFactory; import com.facebook.buck.cxx.CxxBinaryFlavored; import com.facebook.buck.cxx.CxxBinaryImplicitFlavors; import com.facebook.buck.cxx.CxxBinaryMetadataFactory; import com.facebook.buck.cxx.CxxCompilationDatabase; import com.facebook.buck.cxx.FrameworkDependencies; import com.facebook.buck.cxx.HasAppleDebugSymbolDeps; import com.facebook.buck.cxx.config.CxxBuckConfig; import com.facebook.buck.cxx.toolchain.CxxPlatform; import com.facebook.buck.cxx.toolchain.CxxPlatformsProvider; import com.facebook.buck.cxx.toolchain.LinkerMapMode; import com.facebook.buck.cxx.toolchain.StripStyle; import com.facebook.buck.cxx.toolchain.impl.CxxPlatforms; import com.facebook.buck.file.WriteFile; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.rules.macros.StringWithMacros; import com.facebook.buck.swift.SwiftBuckConfig; import com.facebook.buck.swift.SwiftLibraryDescription; import com.facebook.buck.util.types.Either; import com.facebook.buck.versions.Version; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Predicates; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Optional; import java.util.Set; public class AppleBinaryDescription implements DescriptionWithTargetGraph<AppleBinaryDescriptionArg>, Flavored, ImplicitDepsInferringDescription<AppleBinaryDescription.AbstractAppleBinaryDescriptionArg>, ImplicitFlavorsInferringDescription, MetadataProvidingDescription<AppleBinaryDescriptionArg> { public static final Flavor APP_FLAVOR = InternalFlavor.of("app"); public static final Sets.SetView<Flavor> NON_DELEGATE_FLAVORS = Sets.union(AppleDebugFormat.FLAVOR_DOMAIN.getFlavors(), ImmutableSet.of(APP_FLAVOR)); public static final Flavor LEGACY_WATCH_FLAVOR = InternalFlavor.of("legacy_watch"); @SuppressWarnings("PMD") // PMD doesn't understand method references private static final Set<Flavor> SUPPORTED_FLAVORS = ImmutableSet.of( APP_FLAVOR, CxxCompilationDatabase.COMPILATION_DATABASE, CxxCompilationDatabase.UBER_COMPILATION_DATABASE, AppleDebugFormat.DWARF_AND_DSYM.getFlavor(), AppleDebugFormat.DWARF.getFlavor(), AppleDebugFormat.NONE.getFlavor(), LinkerMapMode.NO_LINKER_MAP.getFlavor()); private final ToolchainProvider toolchainProvider; private final XCodeDescriptions xcodeDescriptions; private final Optional<SwiftLibraryDescription> swiftDelegate; private final AppleConfig appleConfig; private final CxxBuckConfig cxxBuckConfig; private final SwiftBuckConfig swiftBuckConfig; private final CxxBinaryImplicitFlavors cxxBinaryImplicitFlavors; private final CxxBinaryFactory cxxBinaryFactory; private final CxxBinaryMetadataFactory cxxBinaryMetadataFactory; private final CxxBinaryFlavored cxxBinaryFlavored; public AppleBinaryDescription( ToolchainProvider toolchainProvider, XCodeDescriptions xcodeDescriptions, SwiftLibraryDescription swiftDelegate, AppleConfig appleConfig, CxxBuckConfig cxxBuckConfig, SwiftBuckConfig swiftBuckConfig, CxxBinaryImplicitFlavors cxxBinaryImplicitFlavors, CxxBinaryFactory cxxBinaryFactory, CxxBinaryMetadataFactory cxxBinaryMetadataFactory, CxxBinaryFlavored cxxBinaryFlavored) { this.toolchainProvider = toolchainProvider; this.xcodeDescriptions = xcodeDescriptions; // TODO(T22135033): Make apple_binary not use a Swift delegate this.swiftDelegate = Optional.of(swiftDelegate); this.appleConfig = appleConfig; this.cxxBuckConfig = cxxBuckConfig; this.swiftBuckConfig = swiftBuckConfig; this.cxxBinaryImplicitFlavors = cxxBinaryImplicitFlavors; this.cxxBinaryFactory = cxxBinaryFactory; this.cxxBinaryMetadataFactory = cxxBinaryMetadataFactory; this.cxxBinaryFlavored = cxxBinaryFlavored; } @Override public Class<AppleBinaryDescriptionArg> getConstructorArgType() { return AppleBinaryDescriptionArg.class; } @Override public Optional<ImmutableSet<FlavorDomain<?>>> flavorDomains( TargetConfiguration toolchainTargetConfiguration) { ImmutableSet.Builder<FlavorDomain<?>> builder = ImmutableSet.builder(); ImmutableSet<FlavorDomain<?>> localDomains = ImmutableSet.of(AppleDebugFormat.FLAVOR_DOMAIN); builder.addAll(localDomains); cxxBinaryFlavored .flavorDomains(toolchainTargetConfiguration) .ifPresent(domains -> builder.addAll(domains)); swiftDelegate .flatMap(swift -> swift.flavorDomains(toolchainTargetConfiguration)) .ifPresent(domains -> builder.addAll(domains)); ImmutableSet<FlavorDomain<?>> result = builder.build(); // Drop StripStyle because it's overridden by AppleDebugFormat result = result.stream() .filter(domain -> !domain.equals(StripStyle.FLAVOR_DOMAIN)) .collect(ImmutableSet.toImmutableSet()); return Optional.of(result); } @Override public boolean hasFlavors( ImmutableSet<Flavor> flavors, TargetConfiguration toolchainTargetConfiguration) { Set<Flavor> unmatchedFlavors = Sets.difference(flavors, SUPPORTED_FLAVORS); if (unmatchedFlavors.isEmpty()) { return true; } ImmutableSet<Flavor> delegateFlavors = ImmutableSet.copyOf(Sets.difference(flavors, NON_DELEGATE_FLAVORS)); ImmutableSet<Flavor> supportedDelegateFlavors = swiftDelegate .map(swift -> swift.getSupportedFlavors(delegateFlavors, toolchainTargetConfiguration)) .orElse(ImmutableSet.<Flavor>of()); unmatchedFlavors = Sets.difference(unmatchedFlavors, supportedDelegateFlavors); if (unmatchedFlavors.isEmpty()) { return true; } ImmutableSet<Flavor> immutableUnmatchedFlavors = ImmutableSet.<Flavor>copyOf(unmatchedFlavors); ImmutableList<ImmutableSortedSet<Flavor>> thinFlavorSets = generateThinDelegateFlavors(immutableUnmatchedFlavors); if (thinFlavorSets.size() > 0) { return Iterables.all( thinFlavorSets, inputFlavors -> cxxBinaryFlavored.hasFlavors(inputFlavors, toolchainTargetConfiguration)); } else { return cxxBinaryFlavored.hasFlavors(immutableUnmatchedFlavors, toolchainTargetConfiguration); } } private ImmutableList<ImmutableSortedSet<Flavor>> generateThinDelegateFlavors( ImmutableSet<Flavor> delegateFlavors) { return MultiarchFileInfos.generateThinFlavors(ImmutableSortedSet.copyOf(delegateFlavors)); } @Override public BuildRule createBuildRule( BuildRuleCreationContextWithTargetGraph context, BuildTarget buildTarget, BuildRuleParams params, AppleBinaryDescriptionArg args) { FlavorDomain<UnresolvedAppleCxxPlatform> appleCxxPlatformsFlavorDomain = getAppleCxxPlatformsFlavorDomain(buildTarget.getTargetConfiguration()); ActionGraphBuilder actionGraphBuilder = context.getActionGraphBuilder(); args.checkDuplicateSources(actionGraphBuilder.getSourcePathResolver()); if (buildTarget.getFlavors().contains(APP_FLAVOR)) { return createBundleBuildRule( context.getTargetGraph(), buildTarget, context.getProjectFilesystem(), params, actionGraphBuilder, appleCxxPlatformsFlavorDomain, args); } else { return createBinaryBuildRule( context, buildTarget, context.getProjectFilesystem(), params, actionGraphBuilder, context.getCellPathResolver(), appleCxxPlatformsFlavorDomain, args); } } private FlavorDomain<UnresolvedAppleCxxPlatform> getAppleCxxPlatformsFlavorDomain( TargetConfiguration toolchainTargetConfiguration) { AppleCxxPlatformsProvider appleCxxPlatformsProvider = toolchainProvider.getByName( AppleCxxPlatformsProvider.DEFAULT_NAME, toolchainTargetConfiguration, AppleCxxPlatformsProvider.class); return appleCxxPlatformsProvider.getUnresolvedAppleCxxPlatforms(); } // We want to wrap only if we have explicit debug flavor. This is because we don't want to // force dSYM generation in case if its enabled by default in config. We just want the binary, // so unless flavor is explicitly set, lets just produce binary! private boolean shouldWrapIntoAppleDebuggableBinary( BuildTarget buildTarget, BuildRule binaryBuildRule) { Optional<AppleDebugFormat> explicitDebugInfoFormat = AppleDebugFormat.FLAVOR_DOMAIN.getValue(buildTarget); boolean binaryIsWrappable = AppleDebuggableBinary.canWrapBinaryBuildRule(binaryBuildRule); return explicitDebugInfoFormat.isPresent() && binaryIsWrappable; } private BuildRule createBinaryBuildRule( BuildRuleCreationContextWithTargetGraph context, BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, ActionGraphBuilder graphBuilder, CellPathResolver cellRoots, FlavorDomain<UnresolvedAppleCxxPlatform> appleCxxPlatformsFlavorDomain, AppleBinaryDescriptionArg args) { args.checkDuplicateSources(graphBuilder.getSourcePathResolver()); // remove some flavors so binary will have the same output regardless their values BuildTarget unstrippedBinaryBuildTarget = buildTarget .withoutFlavors(AppleDebugFormat.FLAVOR_DOMAIN.getFlavors()) .withoutFlavors(StripStyle.FLAVOR_DOMAIN.getFlavors()); BuildRule unstrippedBinaryRule = createBinary( context, unstrippedBinaryBuildTarget, projectFilesystem, params, graphBuilder, cellRoots, appleCxxPlatformsFlavorDomain, args); if (shouldWrapIntoAppleDebuggableBinary(buildTarget, unstrippedBinaryRule)) { return createAppleDebuggableBinary( context, buildTarget, projectFilesystem, params, graphBuilder, cellRoots, appleCxxPlatformsFlavorDomain, args, unstrippedBinaryBuildTarget, (HasAppleDebugSymbolDeps) unstrippedBinaryRule); } else { return unstrippedBinaryRule; } } private BuildRule createAppleDebuggableBinary( BuildRuleCreationContextWithTargetGraph context, BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, ActionGraphBuilder graphBuilder, CellPathResolver cellRoots, FlavorDomain<UnresolvedAppleCxxPlatform> appleCxxPlatformsFlavorDomain, AppleBinaryDescriptionArg args, BuildTarget unstrippedBinaryBuildTarget, HasAppleDebugSymbolDeps unstrippedBinaryRule) { CxxPlatformsProvider cxxPlatformsProvider = getCxxPlatformsProvider(buildTarget.getTargetConfiguration()); BuildTarget strippedBinaryBuildTarget = unstrippedBinaryBuildTarget.withAppendedFlavors( StripStyle.FLAVOR_DOMAIN .getFlavor(buildTarget.getFlavors()) .orElse(StripStyle.NON_GLOBAL_SYMBOLS.getFlavor())); BuildRule strippedBinaryRule = createBinary( context, strippedBinaryBuildTarget, projectFilesystem, params, graphBuilder, cellRoots, appleCxxPlatformsFlavorDomain, args); return AppleDescriptions.createAppleDebuggableBinary( unstrippedBinaryBuildTarget, projectFilesystem, graphBuilder, strippedBinaryRule, unstrippedBinaryRule, AppleDebugFormat.FLAVOR_DOMAIN.getRequiredValue(buildTarget), cxxPlatformsProvider, appleCxxPlatformsFlavorDomain, cxxBuckConfig.shouldCacheStrip()); } private BuildRule createBundleBuildRule( TargetGraph targetGraph, BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, ActionGraphBuilder graphBuilder, FlavorDomain<UnresolvedAppleCxxPlatform> appleCxxPlatformsFlavorDomain, AppleBinaryDescriptionArg args) { if (!args.getInfoPlist().isPresent()) { throw new HumanReadableException( "Cannot create application for apple_binary '%s':\n", "No value specified for 'info_plist' attribute.", buildTarget.getUnflavoredBuildTarget()); } AppleDebugFormat flavoredDebugFormat = AppleDebugFormat.FLAVOR_DOMAIN .getValue(buildTarget) .orElse(appleConfig.getDefaultDebugInfoFormatForBinaries()); if (!buildTarget.getFlavors().contains(flavoredDebugFormat.getFlavor())) { return graphBuilder.requireRule( buildTarget.withAppendedFlavors(flavoredDebugFormat.getFlavor())); } CxxPlatformsProvider cxxPlatformsProvider = getCxxPlatformsProvider(buildTarget.getTargetConfiguration()); if (!AppleDescriptions.INCLUDE_FRAMEWORKS.getValue(buildTarget).isPresent()) { CxxPlatform cxxPlatform = ApplePlatforms.getCxxPlatformForBuildTarget( cxxPlatformsProvider, buildTarget, Optional.empty()) .resolve(graphBuilder, buildTarget.getTargetConfiguration()); ApplePlatform applePlatform = appleCxxPlatformsFlavorDomain .getValue(cxxPlatform.getFlavor()) .resolve(graphBuilder) .getAppleSdk() .getApplePlatform(); if (applePlatform.getAppIncludesFrameworks()) { return graphBuilder.requireRule( buildTarget.withAppendedFlavors(AppleDescriptions.INCLUDE_FRAMEWORKS_FLAVOR)); } return graphBuilder.requireRule( buildTarget.withAppendedFlavors(AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR)); } BuildTarget binaryTarget = buildTarget.withoutFlavors(APP_FLAVOR); return AppleDescriptions.createAppleBundle( xcodeDescriptions, getCxxPlatformsProvider(buildTarget.getTargetConfiguration()), appleCxxPlatformsFlavorDomain, targetGraph, buildTarget, projectFilesystem, params, graphBuilder, toolchainProvider.getByName( CodeSignIdentityStore.DEFAULT_NAME, buildTarget.getTargetConfiguration(), CodeSignIdentityStore.class), toolchainProvider.getByName( ProvisioningProfileStore.DEFAULT_NAME, buildTarget.getTargetConfiguration(), ProvisioningProfileStore.class), Optional.of(binaryTarget), Optional.empty(), args.getDefaultPlatform(), Either.ofLeft(AppleBundleExtension.APP), Optional.empty(), args.getInfoPlist().get(), args.getInfoPlistSubstitutions(), args.getDeps(), args.getTests(), flavoredDebugFormat, appleConfig.useDryRunCodeSigning(), appleConfig.cacheBundlesAndPackages(), appleConfig.shouldVerifyBundleResources(), appleConfig.assetCatalogValidation(), AppleAssetCatalogsCompilationOptions.builder().build(), ImmutableList.of(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), appleConfig.getCodesignTimeout(), swiftBuckConfig.getCopyStdlibToFrameworks(), swiftBuckConfig.getUseLipoThin(), cxxBuckConfig.shouldCacheStrip(), appleConfig.useEntitlementsWhenAdhocCodeSigning(), Predicates.alwaysTrue()); } private BuildRule createBinary( BuildRuleCreationContextWithTargetGraph context, BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, ActionGraphBuilder graphBuilder, CellPathResolver cellRoots, FlavorDomain<UnresolvedAppleCxxPlatform> appleCxxPlatformsFlavorDomain, AppleBinaryDescriptionArg args) { if (AppleDescriptions.flavorsDoNotAllowLinkerMapMode(buildTarget)) { buildTarget = buildTarget.withoutFlavors(LinkerMapMode.NO_LINKER_MAP.getFlavor()); } Optional<MultiarchFileInfo> fatBinaryInfo = MultiarchFileInfos.create(appleCxxPlatformsFlavorDomain, buildTarget); if (fatBinaryInfo.isPresent()) { if (shouldUseStubBinary(buildTarget, args)) { BuildTarget thinTarget = Iterables.getFirst(fatBinaryInfo.get().getThinTargets(), null); return requireThinBinary( context, thinTarget, projectFilesystem, params, graphBuilder, cellRoots, appleCxxPlatformsFlavorDomain, args); } ImmutableSortedSet.Builder<BuildRule> thinRules = ImmutableSortedSet.naturalOrder(); for (BuildTarget thinTarget : fatBinaryInfo.get().getThinTargets()) { thinRules.add( requireThinBinary( context, thinTarget, projectFilesystem, params, graphBuilder, cellRoots, appleCxxPlatformsFlavorDomain, args)); } return MultiarchFileInfos.requireMultiarchRule( buildTarget, projectFilesystem, params, graphBuilder, fatBinaryInfo.get(), thinRules.build(), cxxBuckConfig, appleCxxPlatformsFlavorDomain); } else { return requireThinBinary( context, buildTarget, projectFilesystem, params, graphBuilder, cellRoots, appleCxxPlatformsFlavorDomain, args); } } private BuildRule requireThinBinary( BuildRuleCreationContextWithTargetGraph context, BuildTarget buildTarget, ProjectFilesystem projectFilesystem, BuildRuleParams params, ActionGraphBuilder graphBuilder, CellPathResolver cellRoots, FlavorDomain<UnresolvedAppleCxxPlatform> appleCxxPlatformsFlavorDomain, AppleBinaryDescriptionArg args) { return graphBuilder.computeIfAbsent( buildTarget, ignored -> { ImmutableSortedSet<BuildTarget> extraCxxDeps; Optional<BuildRule> swiftCompanionBuildRule = swiftDelegate.flatMap( swift -> swift.createCompanionBuildRule( context, buildTarget, params, graphBuilder, args, args.getTargetSdkVersion())); if (swiftCompanionBuildRule.isPresent() && SwiftLibraryDescription.isSwiftTarget(buildTarget)) { // when creating a swift target, there is no need to proceed with apple binary rules, return swiftCompanionBuildRule.get(); } else if (swiftCompanionBuildRule.isPresent()) { // otherwise, add this swift rule as a dependency. extraCxxDeps = ImmutableSortedSet.of(swiftCompanionBuildRule.get().getBuildTarget()); } else { extraCxxDeps = ImmutableSortedSet.of(); } Optional<Path> stubBinaryPath = getStubBinaryPath(buildTarget, appleCxxPlatformsFlavorDomain, args, graphBuilder); if (shouldUseStubBinary(buildTarget, args) && stubBinaryPath.isPresent()) { try { return new WriteFile( buildTarget, projectFilesystem, Files.readAllBytes(stubBinaryPath.get()), BuildTargetPaths.getGenPath(projectFilesystem, buildTarget, "%s"), true); } catch (IOException e) { throw new HumanReadableException( "Could not read stub binary " + stubBinaryPath.get()); } } else { CxxBinaryDescriptionArg.Builder delegateArg = CxxBinaryDescriptionArg.builder().from(args); Optional<UnresolvedAppleCxxPlatform> appleCxxPlatform = getAppleCxxPlatformFromParams(appleCxxPlatformsFlavorDomain, buildTarget); AppleDescriptions.populateCxxBinaryDescriptionArg( graphBuilder, delegateArg, appleCxxPlatform, args, buildTarget); Optional<ApplePlatform> applePlatform = getApplePlatformForTarget( buildTarget, args.getDefaultPlatform(), appleCxxPlatformsFlavorDomain, graphBuilder); if (applePlatform.isPresent() && ApplePlatform.needsEntitlementsInBinary(applePlatform.get().getName())) { Optional<SourcePath> entitlements = args.getEntitlementsFile(); if (entitlements.isPresent()) { ImmutableList<String> flags = ImmutableList.of( "-Xlinker", "-sectcreate", "-Xlinker", "__TEXT", "-Xlinker", "__entitlements", "-Xlinker", graphBuilder .getSourcePathResolver() .getAbsolutePath(entitlements.get()) .toString()); delegateArg.addAllLinkerFlags( Iterables.transform(flags, StringWithMacros::ofConstantString)); } } return cxxBinaryFactory.createBuildRule( context.getTargetGraph(), buildTarget, projectFilesystem, graphBuilder, cellRoots, delegateArg.build(), extraCxxDeps); } }); } private boolean shouldUseStubBinary(BuildTarget buildTarget, AppleBinaryDescriptionArg args) { // If the target has sources, it's not a watch app, it might be a watch extension instead. // In this case, we don't need to add a watch kit stub. if (!args.getSrcs().isEmpty()) { return false; } FlavorSet flavors = buildTarget.getFlavors(); return (flavors.contains(AppleBundleDescription.WATCH_OS_FLAVOR) || flavors.contains(AppleBundleDescription.WATCH_OS_64_32_FLAVOR) || flavors.contains(AppleBundleDescription.WATCH_SIMULATOR_FLAVOR) || flavors.contains(LEGACY_WATCH_FLAVOR)); } private Optional<Path> getStubBinaryPath( BuildTarget buildTarget, FlavorDomain<UnresolvedAppleCxxPlatform> appleCxxPlatformsFlavorDomain, AppleBinaryDescriptionArg args, ActionGraphBuilder graphBuilder) { Optional<Path> stubBinaryPath = Optional.empty(); Optional<UnresolvedAppleCxxPlatform> appleCxxPlatform = getAppleCxxPlatformFromParams(appleCxxPlatformsFlavorDomain, buildTarget); if (appleCxxPlatform.isPresent() && args.getSrcs().isEmpty()) { stubBinaryPath = appleCxxPlatform.get().resolve(graphBuilder).getStubBinary(); } return stubBinaryPath; } private Optional<ApplePlatform> getApplePlatformForTarget( BuildTarget buildTarget, Optional<Flavor> defaultPlatform, FlavorDomain<UnresolvedAppleCxxPlatform> appleCxxPlatformsFlavorDomain, BuildRuleResolver ruleResolver) { CxxPlatformsProvider cxxPlatformsProvider = getCxxPlatformsProvider(buildTarget.getTargetConfiguration()); CxxPlatform cxxPlatform = ApplePlatforms.getCxxPlatformForBuildTarget( cxxPlatformsProvider, buildTarget, defaultPlatform) .resolve(ruleResolver, buildTarget.getTargetConfiguration()); if (!appleCxxPlatformsFlavorDomain.contains(cxxPlatform.getFlavor())) { return Optional.empty(); } return Optional.of( appleCxxPlatformsFlavorDomain .getValue(cxxPlatform.getFlavor()) .resolve(ruleResolver) .getAppleSdk() .getApplePlatform()); } private Optional<UnresolvedAppleCxxPlatform> getAppleCxxPlatformFromParams( FlavorDomain<UnresolvedAppleCxxPlatform> appleCxxPlatformsFlavorDomain, BuildTarget buildTarget) { return appleCxxPlatformsFlavorDomain.getValue(buildTarget); } @Override public <U> Optional<U> createMetadata( BuildTarget buildTarget, ActionGraphBuilder graphBuilder, CellPathResolver cellRoots, AppleBinaryDescriptionArg args, Optional<ImmutableMap<BuildTarget, Version>> selectedVersions, Class<U> metadataClass) { if (!metadataClass.isAssignableFrom(FrameworkDependencies.class)) { CxxBinaryDescriptionArg.Builder delegateArg = CxxBinaryDescriptionArg.builder().from(args); Optional<UnresolvedAppleCxxPlatform> appleCxxPlatform = getAppleCxxPlatformFromParams( getAppleCxxPlatformsFlavorDomain(buildTarget.getTargetConfiguration()), buildTarget); AppleDescriptions.populateCxxBinaryDescriptionArg( graphBuilder, delegateArg, appleCxxPlatform, args, buildTarget); return cxxBinaryMetadataFactory.createMetadata( buildTarget, graphBuilder, delegateArg.build().getDeps(), metadataClass); } if (metadataClass.isAssignableFrom(HasEntitlementsFile.class)) { return Optional.of(metadataClass.cast(args)); } Optional<Flavor> cxxPlatformFlavor = getCxxPlatformsProvider(buildTarget.getTargetConfiguration()) .getUnresolvedCxxPlatforms() .getFlavor(buildTarget); Preconditions.checkState( cxxPlatformFlavor.isPresent(), "Could not find cxx platform in:\n%s", Joiner.on(", ").join(buildTarget.getFlavors().getSet())); ImmutableSet.Builder<SourcePath> sourcePaths = ImmutableSet.builder(); for (BuildTarget dep : args.getDeps()) { Optional<FrameworkDependencies> frameworks = graphBuilder.requireMetadata( dep.withAppendedFlavors( AppleDescriptions.NO_INCLUDE_FRAMEWORKS_FLAVOR, cxxPlatformFlavor.get()), FrameworkDependencies.class); if (frameworks.isPresent()) { sourcePaths.addAll(frameworks.get().getSourcePaths()); } } return Optional.of(metadataClass.cast(FrameworkDependencies.of(sourcePaths.build()))); } @Override public ImmutableSortedSet<Flavor> addImplicitFlavors( ImmutableSortedSet<Flavor> argDefaultFlavors, TargetConfiguration toolchainTargetConfiguration) { // Use defaults.apple_binary if present, but fall back to defaults.cxx_binary otherwise. return cxxBinaryImplicitFlavors.addImplicitFlavorsForRuleTypes( argDefaultFlavors, toolchainTargetConfiguration, DescriptionCache.getRuleType(this), DescriptionCache.getRuleType(CxxBinaryDescription.class)); } @Override public void findDepsForTargetFromConstructorArgs( BuildTarget buildTarget, CellNameResolver cellRoots, AbstractAppleBinaryDescriptionArg constructorArg, ImmutableCollection.Builder<BuildTarget> extraDepsBuilder, ImmutableCollection.Builder<BuildTarget> targetGraphOnlyDepsBuilder) { ImmutableList<ImmutableSortedSet<Flavor>> thinFlavorSets = generateThinDelegateFlavors(buildTarget.getFlavors().getSet()); CxxPlatformsProvider cxxPlatformsProvider = getCxxPlatformsProvider(buildTarget.getTargetConfiguration()); if (thinFlavorSets.size() > 0) { for (ImmutableSortedSet<Flavor> flavors : thinFlavorSets) { extraDepsBuilder.addAll( CxxPlatforms.findDepsForTargetFromConstructorArgs( cxxPlatformsProvider, buildTarget.withFlavors(flavors), Optional.empty())); } } else { extraDepsBuilder.addAll( CxxPlatforms.findDepsForTargetFromConstructorArgs( cxxPlatformsProvider, buildTarget, Optional.empty())); } getAppleCxxPlatformsFlavorDomain(buildTarget.getTargetConfiguration()) .getValues() .forEach( platform -> targetGraphOnlyDepsBuilder.addAll( platform.getParseTimeDeps(buildTarget.getTargetConfiguration()))); } private CxxPlatformsProvider getCxxPlatformsProvider( TargetConfiguration toolchainTargetConfiguration) { return toolchainProvider.getByName( CxxPlatformsProvider.DEFAULT_NAME, toolchainTargetConfiguration, CxxPlatformsProvider.class); } @RuleArg interface AbstractAppleBinaryDescriptionArg extends AppleNativeTargetDescriptionArg, HasContacts, HasEntitlementsFile { Optional<SourcePath> getInfoPlist(); ImmutableMap<String, String> getInfoPlistSubstitutions(); } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.bbg.historicaltimeseries; import static com.opengamma.bbg.BloombergConstants.BLOOMBERG_DATA_SOURCE_NAME; import static com.opengamma.bbg.BloombergConstants.BLOOMBERG_FIELDS_REQUEST; import static com.opengamma.bbg.BloombergConstants.BLOOMBERG_HISTORICAL_DATA_REQUEST; import static com.opengamma.bbg.BloombergConstants.BLOOMBERG_SECURITIES_REQUEST; import static com.opengamma.bbg.BloombergConstants.DATA_PROVIDER_UNKNOWN; import static com.opengamma.bbg.BloombergConstants.DEFAULT_DATA_PROVIDER; import static com.opengamma.bbg.BloombergConstants.EID_DATA; import static com.opengamma.bbg.BloombergConstants.ERROR_INFO; import static com.opengamma.bbg.BloombergConstants.FIELD_DATA; import static com.opengamma.bbg.BloombergConstants.FIELD_EXCEPTIONS; import static com.opengamma.bbg.BloombergConstants.FIELD_ID; import static com.opengamma.bbg.BloombergConstants.RESPONSE_ERROR; import static com.opengamma.bbg.BloombergConstants.SECURITY_DATA; import static com.opengamma.bbg.BloombergConstants.SECURITY_ERROR; import static com.opengamma.bbg.util.BloombergDataUtils.toBloombergDate; import static com.opengamma.core.id.ExternalSchemes.BLOOMBERG_BUID; import static com.opengamma.core.id.ExternalSchemes.BLOOMBERG_TICKER; import java.text.MessageFormat; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ExecutionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.Lifecycle; import org.threeten.bp.LocalDate; import com.bloomberglp.blpapi.Datetime; import com.bloomberglp.blpapi.Element; import com.bloomberglp.blpapi.Request; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.bbg.AbstractBloombergStaticDataProvider; import com.opengamma.bbg.BloombergConnector; import com.opengamma.bbg.BloombergConstants; import com.opengamma.bbg.BloombergPermissions; import com.opengamma.bbg.referencedata.statistics.BloombergReferenceDataStatistics; import com.opengamma.bbg.util.BloombergDomainIdentifierResolver; import com.opengamma.id.ExternalId; import com.opengamma.id.ExternalIdBundle; import com.opengamma.provider.historicaltimeseries.HistoricalTimeSeriesProviderGetRequest; import com.opengamma.provider.historicaltimeseries.HistoricalTimeSeriesProviderGetResult; import com.opengamma.provider.historicaltimeseries.impl.AbstractHistoricalTimeSeriesProvider; import com.opengamma.timeseries.date.localdate.ImmutableLocalDateDoubleTimeSeries; import com.opengamma.timeseries.date.localdate.LocalDateDoubleTimeSeries; import com.opengamma.timeseries.date.localdate.LocalDateDoubleTimeSeriesBuilder; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.time.LocalDateRange; /** * Provider of time-series from the Bloomberg data source. */ public class BloombergHistoricalTimeSeriesProvider extends AbstractHistoricalTimeSeriesProvider implements Lifecycle { /** Logger. */ private static final Logger LOGGER = LoggerFactory.getLogger(BloombergHistoricalTimeSeriesProvider.class); /** * Default start date for loading time-series */ private static final LocalDate DEFAULT_START_DATE = LocalDate.of(1900, 1, 1); /** * Implementation class. */ private final BloombergHistoricalDataRequestService _historicalDataService; /** * Creates an instance. * <p> * This will use the statistics tool in the connector. * * @param bloombergConnector * the bloomberg connector, not null */ public BloombergHistoricalTimeSeriesProvider(final BloombergConnector bloombergConnector) { this(ArgumentChecker.notNull(bloombergConnector, "bloombergConnector"), bloombergConnector.getReferenceDataStatistics()); } /** * Creates an instance. * * @param bloombergConnector * the bloomberg connector, not null * @param statistics * the statistics, not null */ public BloombergHistoricalTimeSeriesProvider(final BloombergConnector bloombergConnector, final BloombergReferenceDataStatistics statistics) { super(BLOOMBERG_DATA_SOURCE_NAME); _historicalDataService = new BloombergHistoricalDataRequestService(bloombergConnector, statistics); } // ------------------------------------------------------------------------- @Override protected HistoricalTimeSeriesProviderGetResult doBulkGet(final HistoricalTimeSeriesProviderGetRequest request) { fixRequestDateRange(request, DEFAULT_START_DATE); final HistoricalTimeSeriesProviderGetResult result = _historicalDataService.doBulkGet(request.getExternalIdBundles(), request.getDataProvider(), request.getDataField(), request.getDateRange(), request.getMaxPoints()); return filterResult(result, request.getDateRange(), request.getMaxPoints()); } // ------------------------------------------------------------------------- @Override public void start() { _historicalDataService.start(); } @Override public void stop() { _historicalDataService.stop(); } @Override public boolean isRunning() { return _historicalDataService.isRunning(); } static class BloombergHistoricalDataRequestService extends AbstractBloombergStaticDataProvider { /** * The format of error messages. */ private static final String ERROR_MESSAGE_FORMAT = "{0}:{1}/{2} - {3}"; /** * Bloomberg statistics. */ private final BloombergReferenceDataStatistics _statistics; BloombergHistoricalDataRequestService(final BloombergConnector bloombergConnector) { this(ArgumentChecker.notNull(bloombergConnector, "bloombergConnector"), bloombergConnector.getReferenceDataStatistics()); } /** * Creates an instance. * * @param bloombergConnector * the bloomberg connector, not null * @param statistics * the statistics, not null */ BloombergHistoricalDataRequestService(final BloombergConnector bloombergConnector, final BloombergReferenceDataStatistics statistics) { super(bloombergConnector, BloombergConstants.REF_DATA_SVC_NAME); ArgumentChecker.notNull(statistics, "statistics"); _statistics = statistics; } // ------------------------------------------------------------------------- @Override protected Logger getLogger() { return LOGGER; } // ------------------------------------------------------------------------- /** * Get time-series from Bloomberg. * * @param externalIdBundle * the identifier bundle, not null * @param dataProvider * the data provider, not null * @param dataField * the dataField, not null * @param dateRange * the date range to obtain, not null * @param maxPoints * the maximum number of points required, negative back from the end date, null for all * @return a map of each supplied identifier bundle to the corresponding time-series, not null */ public HistoricalTimeSeriesProviderGetResult doBulkGet(final Set<ExternalIdBundle> externalIdBundle, final String dataProvider, final String dataField, final LocalDateRange dateRange, final Integer maxPoints) { ensureStarted(); getLogger().debug("Getting historical data for {}", externalIdBundle); if (externalIdBundle.isEmpty()) { getLogger().info("Historical data request for empty identifier set"); return new HistoricalTimeSeriesProviderGetResult(); } final Map<String, ExternalIdBundle> reverseBundleMap = Maps.newHashMap(); final Request request = createRequest(externalIdBundle, dataProvider, dataField, dateRange, maxPoints, reverseBundleMap); _statistics.recordStatistics(reverseBundleMap.keySet(), Collections.singleton(dataField)); HistoricalTimeSeriesProviderGetResult result = new HistoricalTimeSeriesProviderGetResult(); try { final List<Element> responseElements = submitRequest(request).get(); final Map<ExternalIdBundle, LocalDateDoubleTimeSeries> tsMap = extractTimeSeries(externalIdBundle, dataField, reverseBundleMap, responseElements); final Map<ExternalIdBundle, Set<String>> permissions = extractPermissions(reverseBundleMap, responseElements); if (tsMap != null) { result = permissions == null ? new HistoricalTimeSeriesProviderGetResult(tsMap) : new HistoricalTimeSeriesProviderGetResult(tsMap, permissions); } } catch (InterruptedException | ExecutionException ex) { getLogger() .warn(String.format("Error getting bulk historical data for %s %s %s %s %s", externalIdBundle, dataProvider, dataField, dateRange, maxPoints), ex); } return result; } // ------------------------------------------------------------------------- /** * Creates the Bloomberg request. * * @param externalIdBundle * the external bundles, not null * @param dataProvider * the data provider, not null * @param dataField * the data field, not null * @param dateRange * the date range, not null * @param maxPoints * the maximum points * @param reverseBundleMap * the reverse bundle map, not null * * @return the bloomberg request */ protected Request createRequest(final Set<ExternalIdBundle> externalIdBundle, final String dataProvider, final String dataField, final LocalDateRange dateRange, final Integer maxPoints, final Map<String, ExternalIdBundle> reverseBundleMap) { // create request final Request request = getService().createRequest(BLOOMBERG_HISTORICAL_DATA_REQUEST); final Element securitiesElem = request.getElement(BLOOMBERG_SECURITIES_REQUEST); // identifiers for (final ExternalIdBundle identifiers : externalIdBundle) { final ExternalId preferredId = getPreferredIdentifier(identifiers, dataProvider); getLogger().debug("Resolved preferred identifier {} from identifier bundle {}", preferredId, identifiers); final String bbgKey = BloombergDomainIdentifierResolver.toBloombergKeyWithDataProvider(preferredId, dataProvider); securitiesElem.appendValue(bbgKey); reverseBundleMap.put(bbgKey, identifiers); } // field required final Element fieldElem = request.getElement(BLOOMBERG_FIELDS_REQUEST); fieldElem.appendValue(dataField); // general settings request.set("periodicityAdjustment", "ACTUAL"); request.set("periodicitySelection", "DAILY"); request.set("startDate", toBloombergDate(dateRange.getStartDateInclusive())); if (!dateRange.isEndDateMaximum()) { request.set("endDate", toBloombergDate(dateRange.getEndDateInclusive())); } request.set("adjustmentSplit", true); if (maxPoints != null && maxPoints <= 0) { request.set("maxDataPoints", -maxPoints); } request.set("returnEids", true); return request; } private ExternalId getPreferredIdentifier(final ExternalIdBundle identifiers, final String dataProvider) { ExternalId preferredId = null; if (dataProvider == null || dataProvider.equalsIgnoreCase(DATA_PROVIDER_UNKNOWN) || dataProvider.equalsIgnoreCase(DEFAULT_DATA_PROVIDER)) { preferredId = identifiers.getExternalId(BLOOMBERG_BUID); } if (preferredId == null) { final Set<ExternalId> tickers = identifiers.getExternalIds(BLOOMBERG_TICKER); if (tickers == null || tickers.size() == 0) { preferredId = BloombergDomainIdentifierResolver.resolvePreferredIdentifier(identifiers); } else if (tickers.size() == 1) { preferredId = tickers.iterator().next(); } else { // multiple matches, find the shortest code and use that. int minLength = Integer.MAX_VALUE; for (final ExternalId id : tickers) { if (id.getValue().length() <= minLength) { preferredId = id; minLength = id.getValue().length(); } } } } if (preferredId == null) { throw new OpenGammaRuntimeException("Couldn't establish preferred identifier, this should not happen and indicates a code logic error"); } return preferredId; } /** * Convert response to time-series. */ private Map<ExternalIdBundle, LocalDateDoubleTimeSeries> extractTimeSeries(final Set<ExternalIdBundle> externalIdBundle, final String dataField, final Map<String, ExternalIdBundle> reverseBundleMap, final List<Element> resultElements) { // handle empty case if (resultElements == null || resultElements.isEmpty()) { getLogger().warn("Unable to get historical data for {}", externalIdBundle); return null; } // parse data final Map<ExternalIdBundle, LocalDateDoubleTimeSeriesBuilder> result = Maps.newHashMap(); for (final Element resultElem : resultElements) { if (resultElem.hasElement(RESPONSE_ERROR)) { getLogger().warn("Response error"); extractError(resultElem.getElement(RESPONSE_ERROR)); continue; } final Element securityElem = resultElem.getElement(SECURITY_DATA); if (securityElem.hasElement(SECURITY_ERROR)) { extractError(securityElem.getElement(SECURITY_ERROR)); } if (securityElem.hasElement(FIELD_EXCEPTIONS)) { final Element fieldExceptions = securityElem.getElement(FIELD_EXCEPTIONS); for (int i = 0; i < fieldExceptions.numValues(); i++) { final Element fieldException = fieldExceptions.getValueAsElement(i); final String fieldId = fieldException.getElementAsString(FIELD_ID); getLogger().warn("Field error on {}", fieldId); final Element errorInfo = fieldException.getElement(ERROR_INFO); extractError(errorInfo); } } if (securityElem.hasElement(FIELD_DATA)) { extractFieldData(securityElem, dataField, reverseBundleMap, result); } } if (externalIdBundle.size() != result.size()) { getLogger().warn("Failed to get time series results for ({}/{}) {}", externalIdBundle.size() - result.size(), externalIdBundle.size(), Sets.difference(externalIdBundle, result.keySet())); } return convertResult(result); } @SuppressWarnings({ "rawtypes", "unchecked" }) private static Map<ExternalIdBundle, LocalDateDoubleTimeSeries> convertResult(final Map result) { // ignore generics, which is safe as of JDK8 for (final Object o : result.entrySet()) { final Entry entry = (Entry) o; final LocalDateDoubleTimeSeriesBuilder bld = (LocalDateDoubleTimeSeriesBuilder) entry.getValue(); entry.setValue(bld.build()); } return result; } /** * Extracts time-series. */ private void extractFieldData(final Element securityElem, final String field, final Map<String, ExternalIdBundle> reverseBundleMap, final Map<ExternalIdBundle, LocalDateDoubleTimeSeriesBuilder> result) { final String secDes = securityElem.getElementAsString(BloombergConstants.SECURITY); final ExternalIdBundle identifiers = reverseBundleMap.get(secDes); if (identifiers == null) { final String message = "Found time series data for unrecognized security" + secDes + " " + reverseBundleMap; throw new OpenGammaRuntimeException(message); } LocalDateDoubleTimeSeriesBuilder bld = result.get(identifiers); if (bld == null) { bld = ImmutableLocalDateDoubleTimeSeries.builder(); result.put(identifiers, bld); } final Element fieldDataArray = securityElem.getElement(FIELD_DATA); final int numValues = fieldDataArray.numValues(); for (int i = 0; i < numValues; i++) { final Element fieldData = fieldDataArray.getValueAsElement(i); final Datetime date = fieldData.getElementAsDate("date"); final LocalDate ldate = LocalDate.of(date.year(), date.month(), date.dayOfMonth()); final double lastPrice = fieldData.getElementAsFloat64(field); bld.put(ldate, lastPrice); } } /** * Process an error. * * @param element * the error element, not null */ private static void extractError(final Element element) { final int code = element.getElementAsInt32("code"); final String category = element.getElementAsString("category"); final String subcategory = element.getElementAsString("subcategory"); final String message = element.getElementAsString("message"); final String errorMessage = MessageFormat.format(ERROR_MESSAGE_FORMAT, code, category, subcategory, message); LOGGER.warn(errorMessage); } protected Map<ExternalIdBundle, Set<String>> extractPermissions(final Map<String, ExternalIdBundle> reverseBundleMap, final List<Element> responseElements) { final Map<ExternalIdBundle, Set<String>> result = new HashMap<>(); for (final Element resultElem : responseElements) { if (resultElem.hasElement(SECURITY_DATA)) { final Element securityElem = resultElem.getElement(SECURITY_DATA); final String secDes = securityElem.getElementAsString(BloombergConstants.SECURITY); final ExternalIdBundle identifiers = reverseBundleMap.get(secDes); if (identifiers != null) { if (securityElem.hasElement(EID_DATA)) { final Element eidData = securityElem.getElement(EID_DATA); final Set<String> eids = new HashSet<>(); final int numValues = eidData.numValues(); for (int i = 0; i < numValues; i++) { try { final int eid = eidData.getValueAsInt32(i); eids.add(BloombergPermissions.createEidPermissionString(eid)); } catch (final Exception ex) { getLogger().warn("Error extracting EID from {} for security:{}", eidData, identifiers); } } getLogger().debug("EIDS {} return for security {}", eids, identifiers); result.put(identifiers, eids); } } } } return result; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators; import org.apache.pig.backend.executionengine.ExecException; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.POStatus; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.Result; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhyPlanVisitor; import org.apache.pig.data.Tuple; import org.apache.pig.data.TupleFactory; import org.apache.pig.impl.builtin.PoissonSampleLoader; import org.apache.pig.impl.plan.OperatorKey; import org.apache.pig.impl.plan.VisitorException; public class POPoissonSample extends PhysicalOperator { private static final long serialVersionUID = 1L; private static final TupleFactory tf = TupleFactory.getInstance(); private static Result eop = new Result(POStatus.STATUS_EOP, null); // num of rows sampled so far private int numRowsSampled = 0; // average size of tuple in memory, for tuples sampled private long avgTupleMemSz = 0; // current row number private long rowNum = 0; // number of tuples to skip after each sample private long skipInterval = -1; // bytes in input to skip after every sample. // divide this by avgTupleMemSize to get skipInterval private long memToSkipPerSample = 0; // has the special row with row number information been returned private boolean numRowSplTupleReturned = false; // 17 is not a magic number. It can be obtained by using a poisson // cumulative distribution function with the mean set to 10 (empirically, // minimum number of samples) and the confidence set to 95% public static final int DEFAULT_SAMPLE_RATE = 17; private int sampleRate = 0; private float heapPerc = 0f; // new Sample result private Result newSample = null; public POPoissonSample(OperatorKey k, int rp, int sr, float hp) { super(k, rp, null); numRowsSampled = 0; avgTupleMemSz = 0; rowNum = 0; skipInterval = -1; memToSkipPerSample = 0; numRowSplTupleReturned = false; newSample = null; sampleRate = sr; heapPerc = hp; } @Override public Tuple illustratorMarkup(Object in, Object out, int eqClassIndex) { // TODO Auto-generated method stub return null; } @Override public void visit(PhyPlanVisitor v) throws VisitorException { v.visitPoissonSample(this); } @Override public Result getNextTuple() throws ExecException { if (numRowSplTupleReturned) { // row num special row has been returned after all inputs // were read, nothing more to read return eop; } Result res = null; if (skipInterval == -1) { // select first tuple as sample and calculate // number of tuples to be skipped while (true) { res = processInput(); if (res.returnStatus == POStatus.STATUS_NULL) { continue; } else if (res.returnStatus == POStatus.STATUS_EOP) { if (this.parentPlan.endOfAllInput) { return eop; } else { continue; } } else if (res.returnStatus == POStatus.STATUS_ERR) { return res; } if (res.result == null) { continue; } long availRedMem = (long) (Runtime.getRuntime().maxMemory() * heapPerc); memToSkipPerSample = availRedMem/sampleRate; updateSkipInterval((Tuple)res.result); rowNum++; newSample = res; break; } } // skip tuples for (long numSkipped = 0; numSkipped < skipInterval; numSkipped++) { res = processInput(); if (res.returnStatus == POStatus.STATUS_NULL) { continue; } else if (res.returnStatus == POStatus.STATUS_EOP) { if (this.parentPlan.endOfAllInput) { return createNumRowTuple((Tuple)newSample.result); } else { return res; } } else if (res.returnStatus == POStatus.STATUS_ERR){ return res; } rowNum++; } // skipped enough, get new sample while (true) { res = processInput(); if (res.returnStatus == POStatus.STATUS_NULL) { continue; } else if (res.returnStatus == POStatus.STATUS_EOP) { if (this.parentPlan.endOfAllInput) { return createNumRowTuple((Tuple)newSample.result); } else { return res; } } else if (res.returnStatus == POStatus.STATUS_ERR){ return res; } if (res.result == null) { continue; } updateSkipInterval((Tuple)res.result); Result currentSample = newSample; rowNum++; newSample = res; return currentSample; } } @Override public boolean supportsMultipleInputs() { return false; } @Override public boolean supportsMultipleOutputs() { return false; } @Override public String name() { return getAliasString() + "PoissonSample - " + mKey.toString(); } /** * Update the average tuple size base on newly sampled tuple t * and recalculate skipInterval * @param t - tuple */ private void updateSkipInterval(Tuple t) { avgTupleMemSz = ((avgTupleMemSz*numRowsSampled) + t.getMemorySize())/(numRowsSampled + 1); skipInterval = memToSkipPerSample/avgTupleMemSz; // skipping fewer number of rows the first few times, to reduce the // probability of first tuples size (if much smaller than rest) // resulting in very few samples being sampled. Sampling a little extra // is OK if(numRowsSampled < 5) { skipInterval = skipInterval/(10-numRowsSampled); } ++numRowsSampled; } /** * @param sample - sample tuple * @return - Tuple appended with special marker string column, num-rows column * @throws ExecException */ private Result createNumRowTuple(Tuple sample) throws ExecException { int sz = (sample == null) ? 0 : sample.size(); Tuple t = tf.newTuple(sz + 2); if (sample != null) { for (int i=0; i<sample.size(); i++){ t.set(i, sample.get(i)); } } t.set(sz, PoissonSampleLoader.NUMROWS_TUPLE_MARKER); t.set(sz + 1, rowNum); numRowSplTupleReturned = true; return new Result(POStatus.STATUS_OK, t); } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * UserTeamAssociationPage.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.admanager.axis.v202111; /** * Captures a page of {@link UserTeamAssociation} objects. */ public class UserTeamAssociationPage implements java.io.Serializable , Iterable<com.google.api.ads.admanager.axis.v202111.UserTeamAssociation>{ /* The size of the total result set to which this page belongs. */ private java.lang.Integer totalResultSetSize; /* The absolute index in the total result set on which this page * begins. */ private java.lang.Integer startIndex; /* The collection of user team associations contained within this * page. */ private com.google.api.ads.admanager.axis.v202111.UserTeamAssociation[] results; public UserTeamAssociationPage() { } public UserTeamAssociationPage( java.lang.Integer totalResultSetSize, java.lang.Integer startIndex, com.google.api.ads.admanager.axis.v202111.UserTeamAssociation[] results) { this.totalResultSetSize = totalResultSetSize; this.startIndex = startIndex; this.results = results; } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() // Only include length of results to avoid overly verbose output .add("results.length", getResults() == null ? 0 : getResults().length) .add("startIndex", getStartIndex()) .add("totalResultSetSize", getTotalResultSetSize()) .toString(); } /** * Gets the totalResultSetSize value for this UserTeamAssociationPage. * * @return totalResultSetSize * The size of the total result set to which this page belongs. */ public java.lang.Integer getTotalResultSetSize() { return totalResultSetSize; } /** * Sets the totalResultSetSize value for this UserTeamAssociationPage. * * @param totalResultSetSize * The size of the total result set to which this page belongs. */ public void setTotalResultSetSize(java.lang.Integer totalResultSetSize) { this.totalResultSetSize = totalResultSetSize; } /** * Gets the startIndex value for this UserTeamAssociationPage. * * @return startIndex * The absolute index in the total result set on which this page * begins. */ public java.lang.Integer getStartIndex() { return startIndex; } /** * Sets the startIndex value for this UserTeamAssociationPage. * * @param startIndex * The absolute index in the total result set on which this page * begins. */ public void setStartIndex(java.lang.Integer startIndex) { this.startIndex = startIndex; } /** * Gets the results value for this UserTeamAssociationPage. * * @return results * The collection of user team associations contained within this * page. */ public com.google.api.ads.admanager.axis.v202111.UserTeamAssociation[] getResults() { return results; } /** * Sets the results value for this UserTeamAssociationPage. * * @param results * The collection of user team associations contained within this * page. */ public void setResults(com.google.api.ads.admanager.axis.v202111.UserTeamAssociation[] results) { this.results = results; } public com.google.api.ads.admanager.axis.v202111.UserTeamAssociation getResults(int i) { return this.results[i]; } public void setResults(int i, com.google.api.ads.admanager.axis.v202111.UserTeamAssociation _value) { this.results[i] = _value; } /** * Returns an iterator over this page's {@code results} that: * <ul> * <li>Will not be {@code null}.</li> * <li>Will not support {@link java.util.Iterator#remove()}.</li> * </ul> * * @return a non-null iterator. */ @Override public java.util.Iterator<com.google.api.ads.admanager.axis.v202111.UserTeamAssociation> iterator() { if (results == null) { return java.util.Collections.<com.google.api.ads.admanager.axis.v202111.UserTeamAssociation>emptyIterator(); } return java.util.Arrays.<com.google.api.ads.admanager.axis.v202111.UserTeamAssociation>asList(results).iterator(); } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof UserTeamAssociationPage)) return false; UserTeamAssociationPage other = (UserTeamAssociationPage) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.totalResultSetSize==null && other.getTotalResultSetSize()==null) || (this.totalResultSetSize!=null && this.totalResultSetSize.equals(other.getTotalResultSetSize()))) && ((this.startIndex==null && other.getStartIndex()==null) || (this.startIndex!=null && this.startIndex.equals(other.getStartIndex()))) && ((this.results==null && other.getResults()==null) || (this.results!=null && java.util.Arrays.equals(this.results, other.getResults()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getTotalResultSetSize() != null) { _hashCode += getTotalResultSetSize().hashCode(); } if (getStartIndex() != null) { _hashCode += getStartIndex().hashCode(); } if (getResults() != null) { for (int i=0; i<java.lang.reflect.Array.getLength(getResults()); i++) { java.lang.Object obj = java.lang.reflect.Array.get(getResults(), i); if (obj != null && !obj.getClass().isArray()) { _hashCode += obj.hashCode(); } } } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(UserTeamAssociationPage.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "UserTeamAssociationPage")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("totalResultSetSize"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "totalResultSetSize")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("startIndex"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "startIndex")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "int")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("results"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "results")); elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "UserTeamAssociation")); elemField.setMinOccurs(0); elemField.setNillable(false); elemField.setMaxOccursUnbounded(true); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
package com.badlogic.gdx.backends.iosrobovm; import java.nio.Buffer; import java.nio.FloatBuffer; import java.nio.IntBuffer; import com.badlogic.gdx.graphics.GL10; import com.badlogic.gdx.graphics.GL11; public class IOSGLES10 implements GL10, GL11 { // @off /*JNI #ifdef __APPLE__ #include <TargetConditionals.h> #if TARGET_OS_IPHONE #include <OpenGLES/ES1/gl.h> #include <OpenGLES/ES1/glext.h> #include <stdio.h> */ @Override public native void glActiveTexture (int texture); /* glActiveTexture(texture); */ @Override public native void glBindTexture (int target, int texture); /* glBindTexture(target, texture); */ @Override public native void glBlendFunc (int sfactor, int dfactor); /* glBlendFunc(sfactor, dfactor); */ @Override public native void glClear (int mask); /* glClear(mask); */ @Override public native void glClearColor (float red, float green, float blue, float alpha); /* glClearColor(red, green, blue, alpha); */ @Override public native void glClearDepthf (float depth); /* glClearDepthf(depth); */ @Override public native void glClearStencil (int s); /* glClearStencil(s); */ @Override public native void glColorMask (boolean red, boolean green, boolean blue, boolean alpha); /* glColorMask(red, green, blue, alpha); */ @Override public native void glCompressedTexImage2D (int target, int level, int internalformat, int width, int height, int border, int imageSize, Buffer data); /* // FIXME */ @Override public native void glCompressedTexSubImage2D (int target, int level, int xoffset, int yoffset, int width, int height, int format, int imageSize, Buffer data); /* // FIXME */ @Override public native void glCopyTexImage2D (int target, int level, int internalformat, int x, int y, int width, int height, int border); /* // FIXME */ @Override public native void glCopyTexSubImage2D (int target, int level, int xoffset, int yoffset, int x, int y, int width, int height); /* glCopyTexSubImage2D(target, level, xoffset, yoffset, x, y, width, height); */ @Override public native void glCullFace (int mode); /* glCullFace(mode); */ @Override public native void glDeleteTextures (int n, IntBuffer textures); /* // FIXME */ @Override public native void glDepthFunc (int func); /* glDepthFunc(func); */ @Override public native void glDepthMask (boolean flag); /* glDepthMask(flag); */ @Override public native void glDepthRangef (float zNear, float zFar); /* glDepthRangef(zNear, zFar); */ @Override public native void glDisable (int cap); /* glDisable(cap); */ @Override public native void glDrawArrays (int mode, int first, int count); /* glDrawArrays(mode, first, count); */ @Override public native void glDrawElements (int mode, int count, int type, Buffer indices); /* // FIXME */ @Override public native void glEnable (int cap); /* glEnable(cap); */ @Override public native void glFinish (); /* glFinish(); */ @Override public native void glFlush (); /* glFlush(); */ @Override public native void glFrontFace (int mode); /* glFrontFace(mode); */ @Override public native void glGenTextures (int n, IntBuffer textures); /* // FIXME */ @Override public native int glGetError (); /* return glGetError(); */ @Override public native void glGetIntegerv (int pname, IntBuffer params); /* // FIXME */ @Override public native String glGetString (int name); /* // FIXME */ @Override public native void glHint (int target, int mode); /* glHint(target, mode); */ @Override public native void glLineWidth (float width); /* glLineWidth(width); */ @Override public native void glPixelStorei (int pname, int param); /* glPixelStorei(pname, param); */ @Override public native void glPolygonOffset (float factor, float units); /* glPolygonOffset(factor, units); */ @Override public native void glReadPixels (int x, int y, int width, int height, int format, int type, Buffer pixels); /* // FIXME */ @Override public native void glScissor (int x, int y, int width, int height); /* glScissor(x, y, width, height); */ @Override public native void glStencilFunc (int func, int ref, int mask); /* glStencilFunc(func, ref, mask); */ @Override public native void glStencilMask (int mask); /* glStencilMask(mask); */ @Override public native void glStencilOp (int fail, int zfail, int zpass); /* glStencilOp(fail, zfail, zpass); */ @Override public native void glTexImage2D (int target, int level, int internalformat, int width, int height, int border, int format, int type, Buffer pixels); /* // FIXME */ @Override public native void glTexParameterf (int target, int pname, float param); /* glTexParameterf(target, pname, param); */ @Override public native void glTexSubImage2D (int target, int level, int xoffset, int yoffset, int width, int height, int format, int type, Buffer pixels); /* // FIXME */ @Override public native void glViewport (int x, int y, int width, int height); /* glViewport(x, y, width, height); */ @Override public native void glClipPlanef (int plane, float[] equation, int offset); /* // FIXME */ @Override public native void glClipPlanef (int plane, FloatBuffer equation); /* // FIXME */ @Override public native void glGetClipPlanef (int pname, float[] eqn, int offset); /* // FIXME */ @Override public native void glGetClipPlanef (int pname, FloatBuffer eqn); /* // FIXME */ @Override public native void glGetFloatv (int pname, float[] params, int offset); /* // FIXME */ @Override public native void glGetFloatv (int pname, FloatBuffer params); /* // FIXME */ @Override public native void glGetLightfv (int light, int pname, float[] params, int offset); /* // FIXME */ @Override public native void glGetLightfv (int light, int pname, FloatBuffer params); /* // FIXME */ @Override public native void glGetMaterialfv (int face, int pname, float[] params, int offset); /* // FIXME */ @Override public native void glGetMaterialfv (int face, int pname, FloatBuffer params); /* // FIXME */ @Override public native void glGetTexParameterfv (int target, int pname, float[] params, int offset) ; /* // FIXME */ @Override public native void glGetTexParameterfv (int target, int pname, FloatBuffer params); /* // FIXME */ @Override public native void glPointParameterf (int pname, float param) ; /* // FIXME */ @Override public native void glPointParameterfv (int pname, float[] params, int offset) ; /* // FIXME */ @Override public native void glPointParameterfv (int pname, FloatBuffer params) ; /* // FIXME */ @Override public native void glTexParameterfv (int target, int pname, float[] params, int offset); /* // FIXME */ @Override public native void glTexParameterfv (int target, int pname, FloatBuffer params) ; /* // FIXME */ @Override public native void glBindBuffer (int target, int buffer) ; /* // FIXME */ @Override public native void glBufferData (int target, int size, Buffer data, int usage) ; /* // FIXME */ @Override public native void glBufferSubData (int target, int offset, int size, Buffer data) ; /* // FIXME */ @Override public native void glColor4ub (byte red, byte green, byte blue, byte alpha) ; /* // FIXME */ @Override public native void glDeleteBuffers (int n, int[] buffers, int offset) ; /* // FIXME */ @Override public native void glDeleteBuffers (int n, IntBuffer buffers) ; /* // FIXME */ @Override public native void glGetBooleanv (int pname, boolean[] params, int offset) ; /* // FIXME */ @Override public native void glGetBooleanv (int pname, IntBuffer params) ; /* // FIXME */ @Override public native void glGetBufferParameteriv (int target, int pname, int[] params, int offset) ; /* // FIXME */ @Override public native void glGetBufferParameteriv (int target, int pname, IntBuffer params) ; /* // FIXME */ @Override public native void glGenBuffers (int n, int[] buffers, int offset) ; /* // FIXME */ @Override public native void glGenBuffers (int n, IntBuffer buffers) ; /* // FIXME */ @Override public void glGetPointerv (int pname, Buffer[] params) { // FIXME } @Override public native void glGetTexEnviv (int envi, int pname, int[] params, int offset) ; /* // FIXME */ @Override public native void glGetTexEnviv (int envi, int pname, IntBuffer params) ; /* // FIXME */ @Override public native void glGetTexParameteriv (int target, int pname, int[] params, int offset) ; /* // FIXME */ @Override public native void glGetTexParameteriv (int target, int pname, IntBuffer params) ; /* // FIXME */ @Override public native boolean glIsBuffer (int buffer) ; /* return glIsBuffer(buffer); */ @Override public native boolean glIsEnabled (int cap) ; /* return glIsEnabled(cap); */ @Override public native boolean glIsTexture (int texture) ; /* glIsTexture(texture); */ @Override public native void glTexEnvi (int target, int pname, int param) ; /* glTexEnvi(target, pname, param); */ @Override public native void glTexEnviv (int target, int pname, int[] params, int offset) ; /* // FIXME */ @Override public native void glTexEnviv (int target, int pname, IntBuffer params) ; /* // FIXME */ @Override public native void glTexParameteri (int target, int pname, int param) ; /* glTexParameteri(target, pname, param); */ @Override public native void glTexParameteriv (int target, int pname, int[] params, int offset) ; /* // FIXME */ @Override public native void glTexParameteriv (int target, int pname, IntBuffer params) ; /* // FIXME */ @Override public native void glPointSizePointerOES (int type, int stride, Buffer pointer) ; /* // FIXME */ @Override public native void glVertexPointer (int size, int type, int stride, int pointer) ; /* glVertexPointer(size, type, stride, (void*)pointer); */ @Override public native void glColorPointer (int size, int type, int stride, int pointer) ; /* glColorPointer(size, type, stride, (void*)pointer); */ @Override public native void glNormalPointer (int type, int stride, int pointer); /* glNormalPointer(type, stride, (void*)pointer); */ @Override public native void glTexCoordPointer (int size, int type, int stride, int pointer); /* glTexCoordPointer(size, type, stride, (void*)pointer); */ @Override public native void glDrawElements (int mode, int count, int type, int indices) ; /* glDrawElements(mode, count, type, (void*)indices); */ @Override public native void glAlphaFunc (int func, float ref) ; /* glAlphaFunc(func, ref); */ @Override public native void glClientActiveTexture (int texture) ; /* glClientActiveTexture(texture); */ @Override public native void glColor4f (float red, float green, float blue, float alpha) ; /* glColor4f(red, green, blue, alpha); */ @Override public native void glColorPointer (int size, int type, int stride, Buffer pointer) ; /* glColorPointer(size, type, stride, pointer); */ @Override public native void glDeleteTextures (int n, int[] textures, int offset) ; /* // FIXME */ @Override public native void glDisableClientState (int array) ; /* glDisableClientState(array); */ @Override public native void glEnableClientState (int array) ; /* glEnableClientState(array); */ @Override public native void glFogf (int pname, float param) ; /* glFogf(pname, param); */ @Override public native void glFogfv (int pname, float[] params, int offset) ; /* // FIXME */ @Override public native void glFogfv (int pname, FloatBuffer params); /* // FIXME */ @Override public native void glFrustumf (float left, float right, float bottom, float top, float zNear, float zFar) ; /* glFrustumf(left, right, bottom, top, zNear, zFar); */ @Override public native void glGenTextures (int n, int[] textures, int offset) ; /* // FIXME */ @Override public native void glGetIntegerv (int pname, int[] params, int offset) ; /* // FIXME */ @Override public native void glLightModelf (int pname, float param) ; /* glLightModelf(pname, param); */ @Override public native void glLightModelfv (int pname, float[] params, int offset) ; /* // FIXME */ @Override public native void glLightModelfv (int pname, FloatBuffer params) ; /* // FIXME */ @Override public native void glLightf (int light, int pname, float param) ; /* glLightf(light, pname, param); */ @Override public native void glLightfv (int light, int pname, float[] params, int offset) ; /* // FIXME */ @Override public native void glLightfv (int light, int pname, FloatBuffer params) ; /* // FIXME */ @Override public native void glLoadIdentity () ; /* glLoadIdentity(); */ @Override public native void glLoadMatrixf (float[] m, int offset) ; /* // FIXME */ @Override public native void glLoadMatrixf (FloatBuffer m) ; /* // FIXME */ @Override public native void glLogicOp (int opcode) ; /* glLogicOp(opcode); */ @Override public native void glMaterialf (int face, int pname, float param) ; /* glMaterialf(face, pname, param); */ @Override public native void glMaterialfv (int face, int pname, float[] params, int offset) ; /* // FIXME */ @Override public native void glMaterialfv (int face, int pname, FloatBuffer params) ; /* // FIXME */ @Override public native void glMatrixMode (int mode) ; /* glMatrixMode(mode); */ @Override public native void glMultMatrixf (float[] m, int offset) ; /* // FIXME */ @Override public native void glMultMatrixf (FloatBuffer m) ; /* // FIXME */ @Override public native void glMultiTexCoord4f (int target, float s, float t, float r, float q) ; /* glMultiTexCoord4f(target, s, t, r, q); */ @Override public native void glNormal3f (float nx, float ny, float nz) ; /* glNormal3f(nx, ny, nz); */ @Override public native void glNormalPointer (int type, int stride, Buffer pointer) ; /* // FIXME */ @Override public native void glOrthof (float left, float right, float bottom, float top, float zNear, float zFar) ; /* glOrthof(left, right, bottom, top, zNear, zFar); */ @Override public native void glPointSize (float size) ; /* glPointSize(size); */ @Override public native void glPopMatrix () ; /* glPopMatrix(); */ @Override public native void glPushMatrix () ; /* glPushMatrix(); */ @Override public native void glRotatef (float angle, float x, float y, float z) ; /* glRotatef(angle, x, y, z); */ @Override public native void glSampleCoverage (float value, boolean invert) ; /* // FIXME */ @Override public native void glScalef (float x, float y, float z) ; /* glScalef(x, y, z); */ @Override public native void glShadeModel (int mode) ; /* glShadeModel(mode); */ @Override public native void glTexCoordPointer (int size, int type, int stride, Buffer pointer) ; /* // FIXME */ @Override public native void glTexEnvf (int target, int pname, float param) ; /* glTexEnvf(target, pname, param); */ @Override public native void glTexEnvfv (int target, int pname, float[] params, int offset) ; /* // FIXME */ @Override public native void glTexEnvfv (int target, int pname, FloatBuffer params) ; /* // FIXME */ @Override public native void glTranslatef (float x, float y, float z) ; /* glTranslatef(x, y, z); */ @Override public native void glVertexPointer (int size, int type, int stride, Buffer pointer) ; /* // FIXME */ @Override public native void glPolygonMode (int face, int mode) ; /* */ // @off /*JNI #endif #endif */ }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.13.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.storm.generated; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)") public class CommonAggregateStats implements org.apache.storm.thrift.TBase<CommonAggregateStats, CommonAggregateStats._Fields>, java.io.Serializable, Cloneable, Comparable<CommonAggregateStats> { private static final org.apache.storm.thrift.protocol.TStruct STRUCT_DESC = new org.apache.storm.thrift.protocol.TStruct("CommonAggregateStats"); private static final org.apache.storm.thrift.protocol.TField NUM_EXECUTORS_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("num_executors", org.apache.storm.thrift.protocol.TType.I32, (short)1); private static final org.apache.storm.thrift.protocol.TField NUM_TASKS_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("num_tasks", org.apache.storm.thrift.protocol.TType.I32, (short)2); private static final org.apache.storm.thrift.protocol.TField EMITTED_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("emitted", org.apache.storm.thrift.protocol.TType.I64, (short)3); private static final org.apache.storm.thrift.protocol.TField TRANSFERRED_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("transferred", org.apache.storm.thrift.protocol.TType.I64, (short)4); private static final org.apache.storm.thrift.protocol.TField ACKED_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("acked", org.apache.storm.thrift.protocol.TType.I64, (short)5); private static final org.apache.storm.thrift.protocol.TField FAILED_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("failed", org.apache.storm.thrift.protocol.TType.I64, (short)6); private static final org.apache.storm.thrift.protocol.TField RESOURCES_MAP_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("resources_map", org.apache.storm.thrift.protocol.TType.MAP, (short)7); private static final org.apache.storm.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new CommonAggregateStatsStandardSchemeFactory(); private static final org.apache.storm.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new CommonAggregateStatsTupleSchemeFactory(); private int num_executors; // optional private int num_tasks; // optional private long emitted; // optional private long transferred; // optional private long acked; // optional private long failed; // optional private @org.apache.storm.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.Double> resources_map; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.storm.thrift.TFieldIdEnum { NUM_EXECUTORS((short)1, "num_executors"), NUM_TASKS((short)2, "num_tasks"), EMITTED((short)3, "emitted"), TRANSFERRED((short)4, "transferred"), ACKED((short)5, "acked"), FAILED((short)6, "failed"), RESOURCES_MAP((short)7, "resources_map"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.storm.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // NUM_EXECUTORS return NUM_EXECUTORS; case 2: // NUM_TASKS return NUM_TASKS; case 3: // EMITTED return EMITTED; case 4: // TRANSFERRED return TRANSFERRED; case 5: // ACKED return ACKED; case 6: // FAILED return FAILED; case 7: // RESOURCES_MAP return RESOURCES_MAP; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.storm.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments private static final int __NUM_EXECUTORS_ISSET_ID = 0; private static final int __NUM_TASKS_ISSET_ID = 1; private static final int __EMITTED_ISSET_ID = 2; private static final int __TRANSFERRED_ISSET_ID = 3; private static final int __ACKED_ISSET_ID = 4; private static final int __FAILED_ISSET_ID = 5; private byte __isset_bitfield = 0; private static final _Fields optionals[] = {_Fields.NUM_EXECUTORS,_Fields.NUM_TASKS,_Fields.EMITTED,_Fields.TRANSFERRED,_Fields.ACKED,_Fields.FAILED,_Fields.RESOURCES_MAP}; public static final java.util.Map<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.NUM_EXECUTORS, new org.apache.storm.thrift.meta_data.FieldMetaData("num_executors", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL, new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I32))); tmpMap.put(_Fields.NUM_TASKS, new org.apache.storm.thrift.meta_data.FieldMetaData("num_tasks", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL, new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I32))); tmpMap.put(_Fields.EMITTED, new org.apache.storm.thrift.meta_data.FieldMetaData("emitted", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL, new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I64))); tmpMap.put(_Fields.TRANSFERRED, new org.apache.storm.thrift.meta_data.FieldMetaData("transferred", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL, new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I64))); tmpMap.put(_Fields.ACKED, new org.apache.storm.thrift.meta_data.FieldMetaData("acked", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL, new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I64))); tmpMap.put(_Fields.FAILED, new org.apache.storm.thrift.meta_data.FieldMetaData("failed", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL, new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I64))); tmpMap.put(_Fields.RESOURCES_MAP, new org.apache.storm.thrift.meta_data.FieldMetaData("resources_map", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL, new org.apache.storm.thrift.meta_data.MapMetaData(org.apache.storm.thrift.protocol.TType.MAP, new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING), new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.DOUBLE)))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.storm.thrift.meta_data.FieldMetaData.addStructMetaDataMap(CommonAggregateStats.class, metaDataMap); } public CommonAggregateStats() { } /** * Performs a deep copy on <i>other</i>. */ public CommonAggregateStats(CommonAggregateStats other) { __isset_bitfield = other.__isset_bitfield; this.num_executors = other.num_executors; this.num_tasks = other.num_tasks; this.emitted = other.emitted; this.transferred = other.transferred; this.acked = other.acked; this.failed = other.failed; if (other.is_set_resources_map()) { java.util.Map<java.lang.String,java.lang.Double> __this__resources_map = new java.util.HashMap<java.lang.String,java.lang.Double>(other.resources_map); this.resources_map = __this__resources_map; } } public CommonAggregateStats deepCopy() { return new CommonAggregateStats(this); } @Override public void clear() { set_num_executors_isSet(false); this.num_executors = 0; set_num_tasks_isSet(false); this.num_tasks = 0; set_emitted_isSet(false); this.emitted = 0; set_transferred_isSet(false); this.transferred = 0; set_acked_isSet(false); this.acked = 0; set_failed_isSet(false); this.failed = 0; this.resources_map = null; } public int get_num_executors() { return this.num_executors; } public void set_num_executors(int num_executors) { this.num_executors = num_executors; set_num_executors_isSet(true); } public void unset_num_executors() { __isset_bitfield = org.apache.storm.thrift.EncodingUtils.clearBit(__isset_bitfield, __NUM_EXECUTORS_ISSET_ID); } /** Returns true if field num_executors is set (has been assigned a value) and false otherwise */ public boolean is_set_num_executors() { return org.apache.storm.thrift.EncodingUtils.testBit(__isset_bitfield, __NUM_EXECUTORS_ISSET_ID); } public void set_num_executors_isSet(boolean value) { __isset_bitfield = org.apache.storm.thrift.EncodingUtils.setBit(__isset_bitfield, __NUM_EXECUTORS_ISSET_ID, value); } public int get_num_tasks() { return this.num_tasks; } public void set_num_tasks(int num_tasks) { this.num_tasks = num_tasks; set_num_tasks_isSet(true); } public void unset_num_tasks() { __isset_bitfield = org.apache.storm.thrift.EncodingUtils.clearBit(__isset_bitfield, __NUM_TASKS_ISSET_ID); } /** Returns true if field num_tasks is set (has been assigned a value) and false otherwise */ public boolean is_set_num_tasks() { return org.apache.storm.thrift.EncodingUtils.testBit(__isset_bitfield, __NUM_TASKS_ISSET_ID); } public void set_num_tasks_isSet(boolean value) { __isset_bitfield = org.apache.storm.thrift.EncodingUtils.setBit(__isset_bitfield, __NUM_TASKS_ISSET_ID, value); } public long get_emitted() { return this.emitted; } public void set_emitted(long emitted) { this.emitted = emitted; set_emitted_isSet(true); } public void unset_emitted() { __isset_bitfield = org.apache.storm.thrift.EncodingUtils.clearBit(__isset_bitfield, __EMITTED_ISSET_ID); } /** Returns true if field emitted is set (has been assigned a value) and false otherwise */ public boolean is_set_emitted() { return org.apache.storm.thrift.EncodingUtils.testBit(__isset_bitfield, __EMITTED_ISSET_ID); } public void set_emitted_isSet(boolean value) { __isset_bitfield = org.apache.storm.thrift.EncodingUtils.setBit(__isset_bitfield, __EMITTED_ISSET_ID, value); } public long get_transferred() { return this.transferred; } public void set_transferred(long transferred) { this.transferred = transferred; set_transferred_isSet(true); } public void unset_transferred() { __isset_bitfield = org.apache.storm.thrift.EncodingUtils.clearBit(__isset_bitfield, __TRANSFERRED_ISSET_ID); } /** Returns true if field transferred is set (has been assigned a value) and false otherwise */ public boolean is_set_transferred() { return org.apache.storm.thrift.EncodingUtils.testBit(__isset_bitfield, __TRANSFERRED_ISSET_ID); } public void set_transferred_isSet(boolean value) { __isset_bitfield = org.apache.storm.thrift.EncodingUtils.setBit(__isset_bitfield, __TRANSFERRED_ISSET_ID, value); } public long get_acked() { return this.acked; } public void set_acked(long acked) { this.acked = acked; set_acked_isSet(true); } public void unset_acked() { __isset_bitfield = org.apache.storm.thrift.EncodingUtils.clearBit(__isset_bitfield, __ACKED_ISSET_ID); } /** Returns true if field acked is set (has been assigned a value) and false otherwise */ public boolean is_set_acked() { return org.apache.storm.thrift.EncodingUtils.testBit(__isset_bitfield, __ACKED_ISSET_ID); } public void set_acked_isSet(boolean value) { __isset_bitfield = org.apache.storm.thrift.EncodingUtils.setBit(__isset_bitfield, __ACKED_ISSET_ID, value); } public long get_failed() { return this.failed; } public void set_failed(long failed) { this.failed = failed; set_failed_isSet(true); } public void unset_failed() { __isset_bitfield = org.apache.storm.thrift.EncodingUtils.clearBit(__isset_bitfield, __FAILED_ISSET_ID); } /** Returns true if field failed is set (has been assigned a value) and false otherwise */ public boolean is_set_failed() { return org.apache.storm.thrift.EncodingUtils.testBit(__isset_bitfield, __FAILED_ISSET_ID); } public void set_failed_isSet(boolean value) { __isset_bitfield = org.apache.storm.thrift.EncodingUtils.setBit(__isset_bitfield, __FAILED_ISSET_ID, value); } public int get_resources_map_size() { return (this.resources_map == null) ? 0 : this.resources_map.size(); } public void put_to_resources_map(java.lang.String key, double val) { if (this.resources_map == null) { this.resources_map = new java.util.HashMap<java.lang.String,java.lang.Double>(); } this.resources_map.put(key, val); } @org.apache.storm.thrift.annotation.Nullable public java.util.Map<java.lang.String,java.lang.Double> get_resources_map() { return this.resources_map; } public void set_resources_map(@org.apache.storm.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.Double> resources_map) { this.resources_map = resources_map; } public void unset_resources_map() { this.resources_map = null; } /** Returns true if field resources_map is set (has been assigned a value) and false otherwise */ public boolean is_set_resources_map() { return this.resources_map != null; } public void set_resources_map_isSet(boolean value) { if (!value) { this.resources_map = null; } } public void setFieldValue(_Fields field, @org.apache.storm.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case NUM_EXECUTORS: if (value == null) { unset_num_executors(); } else { set_num_executors((java.lang.Integer)value); } break; case NUM_TASKS: if (value == null) { unset_num_tasks(); } else { set_num_tasks((java.lang.Integer)value); } break; case EMITTED: if (value == null) { unset_emitted(); } else { set_emitted((java.lang.Long)value); } break; case TRANSFERRED: if (value == null) { unset_transferred(); } else { set_transferred((java.lang.Long)value); } break; case ACKED: if (value == null) { unset_acked(); } else { set_acked((java.lang.Long)value); } break; case FAILED: if (value == null) { unset_failed(); } else { set_failed((java.lang.Long)value); } break; case RESOURCES_MAP: if (value == null) { unset_resources_map(); } else { set_resources_map((java.util.Map<java.lang.String,java.lang.Double>)value); } break; } } @org.apache.storm.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case NUM_EXECUTORS: return get_num_executors(); case NUM_TASKS: return get_num_tasks(); case EMITTED: return get_emitted(); case TRANSFERRED: return get_transferred(); case ACKED: return get_acked(); case FAILED: return get_failed(); case RESOURCES_MAP: return get_resources_map(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case NUM_EXECUTORS: return is_set_num_executors(); case NUM_TASKS: return is_set_num_tasks(); case EMITTED: return is_set_emitted(); case TRANSFERRED: return is_set_transferred(); case ACKED: return is_set_acked(); case FAILED: return is_set_failed(); case RESOURCES_MAP: return is_set_resources_map(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that == null) return false; if (that instanceof CommonAggregateStats) return this.equals((CommonAggregateStats)that); return false; } public boolean equals(CommonAggregateStats that) { if (that == null) return false; if (this == that) return true; boolean this_present_num_executors = true && this.is_set_num_executors(); boolean that_present_num_executors = true && that.is_set_num_executors(); if (this_present_num_executors || that_present_num_executors) { if (!(this_present_num_executors && that_present_num_executors)) return false; if (this.num_executors != that.num_executors) return false; } boolean this_present_num_tasks = true && this.is_set_num_tasks(); boolean that_present_num_tasks = true && that.is_set_num_tasks(); if (this_present_num_tasks || that_present_num_tasks) { if (!(this_present_num_tasks && that_present_num_tasks)) return false; if (this.num_tasks != that.num_tasks) return false; } boolean this_present_emitted = true && this.is_set_emitted(); boolean that_present_emitted = true && that.is_set_emitted(); if (this_present_emitted || that_present_emitted) { if (!(this_present_emitted && that_present_emitted)) return false; if (this.emitted != that.emitted) return false; } boolean this_present_transferred = true && this.is_set_transferred(); boolean that_present_transferred = true && that.is_set_transferred(); if (this_present_transferred || that_present_transferred) { if (!(this_present_transferred && that_present_transferred)) return false; if (this.transferred != that.transferred) return false; } boolean this_present_acked = true && this.is_set_acked(); boolean that_present_acked = true && that.is_set_acked(); if (this_present_acked || that_present_acked) { if (!(this_present_acked && that_present_acked)) return false; if (this.acked != that.acked) return false; } boolean this_present_failed = true && this.is_set_failed(); boolean that_present_failed = true && that.is_set_failed(); if (this_present_failed || that_present_failed) { if (!(this_present_failed && that_present_failed)) return false; if (this.failed != that.failed) return false; } boolean this_present_resources_map = true && this.is_set_resources_map(); boolean that_present_resources_map = true && that.is_set_resources_map(); if (this_present_resources_map || that_present_resources_map) { if (!(this_present_resources_map && that_present_resources_map)) return false; if (!this.resources_map.equals(that.resources_map)) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((is_set_num_executors()) ? 131071 : 524287); if (is_set_num_executors()) hashCode = hashCode * 8191 + num_executors; hashCode = hashCode * 8191 + ((is_set_num_tasks()) ? 131071 : 524287); if (is_set_num_tasks()) hashCode = hashCode * 8191 + num_tasks; hashCode = hashCode * 8191 + ((is_set_emitted()) ? 131071 : 524287); if (is_set_emitted()) hashCode = hashCode * 8191 + org.apache.storm.thrift.TBaseHelper.hashCode(emitted); hashCode = hashCode * 8191 + ((is_set_transferred()) ? 131071 : 524287); if (is_set_transferred()) hashCode = hashCode * 8191 + org.apache.storm.thrift.TBaseHelper.hashCode(transferred); hashCode = hashCode * 8191 + ((is_set_acked()) ? 131071 : 524287); if (is_set_acked()) hashCode = hashCode * 8191 + org.apache.storm.thrift.TBaseHelper.hashCode(acked); hashCode = hashCode * 8191 + ((is_set_failed()) ? 131071 : 524287); if (is_set_failed()) hashCode = hashCode * 8191 + org.apache.storm.thrift.TBaseHelper.hashCode(failed); hashCode = hashCode * 8191 + ((is_set_resources_map()) ? 131071 : 524287); if (is_set_resources_map()) hashCode = hashCode * 8191 + resources_map.hashCode(); return hashCode; } @Override public int compareTo(CommonAggregateStats other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.valueOf(is_set_num_executors()).compareTo(other.is_set_num_executors()); if (lastComparison != 0) { return lastComparison; } if (is_set_num_executors()) { lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.num_executors, other.num_executors); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.valueOf(is_set_num_tasks()).compareTo(other.is_set_num_tasks()); if (lastComparison != 0) { return lastComparison; } if (is_set_num_tasks()) { lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.num_tasks, other.num_tasks); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.valueOf(is_set_emitted()).compareTo(other.is_set_emitted()); if (lastComparison != 0) { return lastComparison; } if (is_set_emitted()) { lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.emitted, other.emitted); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.valueOf(is_set_transferred()).compareTo(other.is_set_transferred()); if (lastComparison != 0) { return lastComparison; } if (is_set_transferred()) { lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.transferred, other.transferred); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.valueOf(is_set_acked()).compareTo(other.is_set_acked()); if (lastComparison != 0) { return lastComparison; } if (is_set_acked()) { lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.acked, other.acked); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.valueOf(is_set_failed()).compareTo(other.is_set_failed()); if (lastComparison != 0) { return lastComparison; } if (is_set_failed()) { lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.failed, other.failed); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.valueOf(is_set_resources_map()).compareTo(other.is_set_resources_map()); if (lastComparison != 0) { return lastComparison; } if (is_set_resources_map()) { lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.resources_map, other.resources_map); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.storm.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.storm.thrift.protocol.TProtocol iprot) throws org.apache.storm.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.storm.thrift.protocol.TProtocol oprot) throws org.apache.storm.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("CommonAggregateStats("); boolean first = true; if (is_set_num_executors()) { sb.append("num_executors:"); sb.append(this.num_executors); first = false; } if (is_set_num_tasks()) { if (!first) sb.append(", "); sb.append("num_tasks:"); sb.append(this.num_tasks); first = false; } if (is_set_emitted()) { if (!first) sb.append(", "); sb.append("emitted:"); sb.append(this.emitted); first = false; } if (is_set_transferred()) { if (!first) sb.append(", "); sb.append("transferred:"); sb.append(this.transferred); first = false; } if (is_set_acked()) { if (!first) sb.append(", "); sb.append("acked:"); sb.append(this.acked); first = false; } if (is_set_failed()) { if (!first) sb.append(", "); sb.append("failed:"); sb.append(this.failed); first = false; } if (is_set_resources_map()) { if (!first) sb.append(", "); sb.append("resources_map:"); if (this.resources_map == null) { sb.append("null"); } else { sb.append(this.resources_map); } first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.storm.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.storm.thrift.protocol.TCompactProtocol(new org.apache.storm.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.storm.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.storm.thrift.protocol.TCompactProtocol(new org.apache.storm.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.storm.thrift.TException te) { throw new java.io.IOException(te); } } private static class CommonAggregateStatsStandardSchemeFactory implements org.apache.storm.thrift.scheme.SchemeFactory { public CommonAggregateStatsStandardScheme getScheme() { return new CommonAggregateStatsStandardScheme(); } } private static class CommonAggregateStatsStandardScheme extends org.apache.storm.thrift.scheme.StandardScheme<CommonAggregateStats> { public void read(org.apache.storm.thrift.protocol.TProtocol iprot, CommonAggregateStats struct) throws org.apache.storm.thrift.TException { org.apache.storm.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.storm.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // NUM_EXECUTORS if (schemeField.type == org.apache.storm.thrift.protocol.TType.I32) { struct.num_executors = iprot.readI32(); struct.set_num_executors_isSet(true); } else { org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // NUM_TASKS if (schemeField.type == org.apache.storm.thrift.protocol.TType.I32) { struct.num_tasks = iprot.readI32(); struct.set_num_tasks_isSet(true); } else { org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // EMITTED if (schemeField.type == org.apache.storm.thrift.protocol.TType.I64) { struct.emitted = iprot.readI64(); struct.set_emitted_isSet(true); } else { org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // TRANSFERRED if (schemeField.type == org.apache.storm.thrift.protocol.TType.I64) { struct.transferred = iprot.readI64(); struct.set_transferred_isSet(true); } else { org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // ACKED if (schemeField.type == org.apache.storm.thrift.protocol.TType.I64) { struct.acked = iprot.readI64(); struct.set_acked_isSet(true); } else { org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 6: // FAILED if (schemeField.type == org.apache.storm.thrift.protocol.TType.I64) { struct.failed = iprot.readI64(); struct.set_failed_isSet(true); } else { org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 7: // RESOURCES_MAP if (schemeField.type == org.apache.storm.thrift.protocol.TType.MAP) { { org.apache.storm.thrift.protocol.TMap _map426 = iprot.readMapBegin(); struct.resources_map = new java.util.HashMap<java.lang.String,java.lang.Double>(2*_map426.size); @org.apache.storm.thrift.annotation.Nullable java.lang.String _key427; double _val428; for (int _i429 = 0; _i429 < _map426.size; ++_i429) { _key427 = iprot.readString(); _val428 = iprot.readDouble(); struct.resources_map.put(_key427, _val428); } iprot.readMapEnd(); } struct.set_resources_map_isSet(true); } else { org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.storm.thrift.protocol.TProtocol oprot, CommonAggregateStats struct) throws org.apache.storm.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.is_set_num_executors()) { oprot.writeFieldBegin(NUM_EXECUTORS_FIELD_DESC); oprot.writeI32(struct.num_executors); oprot.writeFieldEnd(); } if (struct.is_set_num_tasks()) { oprot.writeFieldBegin(NUM_TASKS_FIELD_DESC); oprot.writeI32(struct.num_tasks); oprot.writeFieldEnd(); } if (struct.is_set_emitted()) { oprot.writeFieldBegin(EMITTED_FIELD_DESC); oprot.writeI64(struct.emitted); oprot.writeFieldEnd(); } if (struct.is_set_transferred()) { oprot.writeFieldBegin(TRANSFERRED_FIELD_DESC); oprot.writeI64(struct.transferred); oprot.writeFieldEnd(); } if (struct.is_set_acked()) { oprot.writeFieldBegin(ACKED_FIELD_DESC); oprot.writeI64(struct.acked); oprot.writeFieldEnd(); } if (struct.is_set_failed()) { oprot.writeFieldBegin(FAILED_FIELD_DESC); oprot.writeI64(struct.failed); oprot.writeFieldEnd(); } if (struct.resources_map != null) { if (struct.is_set_resources_map()) { oprot.writeFieldBegin(RESOURCES_MAP_FIELD_DESC); { oprot.writeMapBegin(new org.apache.storm.thrift.protocol.TMap(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.DOUBLE, struct.resources_map.size())); for (java.util.Map.Entry<java.lang.String, java.lang.Double> _iter430 : struct.resources_map.entrySet()) { oprot.writeString(_iter430.getKey()); oprot.writeDouble(_iter430.getValue()); } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class CommonAggregateStatsTupleSchemeFactory implements org.apache.storm.thrift.scheme.SchemeFactory { public CommonAggregateStatsTupleScheme getScheme() { return new CommonAggregateStatsTupleScheme(); } } private static class CommonAggregateStatsTupleScheme extends org.apache.storm.thrift.scheme.TupleScheme<CommonAggregateStats> { @Override public void write(org.apache.storm.thrift.protocol.TProtocol prot, CommonAggregateStats struct) throws org.apache.storm.thrift.TException { org.apache.storm.thrift.protocol.TTupleProtocol oprot = (org.apache.storm.thrift.protocol.TTupleProtocol) prot; java.util.BitSet optionals = new java.util.BitSet(); if (struct.is_set_num_executors()) { optionals.set(0); } if (struct.is_set_num_tasks()) { optionals.set(1); } if (struct.is_set_emitted()) { optionals.set(2); } if (struct.is_set_transferred()) { optionals.set(3); } if (struct.is_set_acked()) { optionals.set(4); } if (struct.is_set_failed()) { optionals.set(5); } if (struct.is_set_resources_map()) { optionals.set(6); } oprot.writeBitSet(optionals, 7); if (struct.is_set_num_executors()) { oprot.writeI32(struct.num_executors); } if (struct.is_set_num_tasks()) { oprot.writeI32(struct.num_tasks); } if (struct.is_set_emitted()) { oprot.writeI64(struct.emitted); } if (struct.is_set_transferred()) { oprot.writeI64(struct.transferred); } if (struct.is_set_acked()) { oprot.writeI64(struct.acked); } if (struct.is_set_failed()) { oprot.writeI64(struct.failed); } if (struct.is_set_resources_map()) { { oprot.writeI32(struct.resources_map.size()); for (java.util.Map.Entry<java.lang.String, java.lang.Double> _iter431 : struct.resources_map.entrySet()) { oprot.writeString(_iter431.getKey()); oprot.writeDouble(_iter431.getValue()); } } } } @Override public void read(org.apache.storm.thrift.protocol.TProtocol prot, CommonAggregateStats struct) throws org.apache.storm.thrift.TException { org.apache.storm.thrift.protocol.TTupleProtocol iprot = (org.apache.storm.thrift.protocol.TTupleProtocol) prot; java.util.BitSet incoming = iprot.readBitSet(7); if (incoming.get(0)) { struct.num_executors = iprot.readI32(); struct.set_num_executors_isSet(true); } if (incoming.get(1)) { struct.num_tasks = iprot.readI32(); struct.set_num_tasks_isSet(true); } if (incoming.get(2)) { struct.emitted = iprot.readI64(); struct.set_emitted_isSet(true); } if (incoming.get(3)) { struct.transferred = iprot.readI64(); struct.set_transferred_isSet(true); } if (incoming.get(4)) { struct.acked = iprot.readI64(); struct.set_acked_isSet(true); } if (incoming.get(5)) { struct.failed = iprot.readI64(); struct.set_failed_isSet(true); } if (incoming.get(6)) { { org.apache.storm.thrift.protocol.TMap _map432 = new org.apache.storm.thrift.protocol.TMap(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.DOUBLE, iprot.readI32()); struct.resources_map = new java.util.HashMap<java.lang.String,java.lang.Double>(2*_map432.size); @org.apache.storm.thrift.annotation.Nullable java.lang.String _key433; double _val434; for (int _i435 = 0; _i435 < _map432.size; ++_i435) { _key433 = iprot.readString(); _val434 = iprot.readDouble(); struct.resources_map.put(_key433, _val434); } } struct.set_resources_map_isSet(true); } } } private static <S extends org.apache.storm.thrift.scheme.IScheme> S scheme(org.apache.storm.thrift.protocol.TProtocol proto) { return (org.apache.storm.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.java.decompiler.struct; import org.jetbrains.java.decompiler.code.*; import org.jetbrains.java.decompiler.struct.attr.StructGeneralAttribute; import org.jetbrains.java.decompiler.struct.attr.StructLocalVariableTableAttribute; import org.jetbrains.java.decompiler.struct.consts.ConstantPool; import org.jetbrains.java.decompiler.util.DataInputFullStream; import org.jetbrains.java.decompiler.util.VBStyleCollection; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import static org.jetbrains.java.decompiler.code.CodeConstants.*; /* method_info { u2 access_flags; u2 name_index; u2 descriptor_index; u2 attributes_count; attribute_info attributes[attributes_count]; } */ public class StructMethod extends StructMember { private static final int[] opr_iconst = {-1, 0, 1, 2, 3, 4, 5}; private static final int[] opr_loadstore = {0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1, 2, 3}; private static final int[] opcs_load = {opc_iload, opc_lload, opc_fload, opc_dload, opc_aload}; private static final int[] opcs_store = {opc_istore, opc_lstore, opc_fstore, opc_dstore, opc_astore}; private final StructClass classStruct; private final String name; private final String descriptor; private boolean containsCode = false; private int localVariables = 0; private int codeLength = 0; private int codeFullLength = 0; private InstructionSequence seq; private boolean expanded = false; private Map<String, StructGeneralAttribute> codeAttributes; public StructMethod(DataInputFullStream in, StructClass clStruct) throws IOException { classStruct = clStruct; accessFlags = in.readUnsignedShort(); int nameIndex = in.readUnsignedShort(); int descriptorIndex = in.readUnsignedShort(); ConstantPool pool = clStruct.getPool(); String[] values = pool.getClassElement(ConstantPool.METHOD, clStruct.qualifiedName, nameIndex, descriptorIndex); name = values[0]; descriptor = values[1]; attributes = readAttributes(in, pool); if (codeAttributes != null) { attributes.putAll(codeAttributes); codeAttributes = null; } } @Override protected StructGeneralAttribute readAttribute(DataInputFullStream in, ConstantPool pool, String name) throws IOException { if (StructGeneralAttribute.ATTRIBUTE_CODE.getName().equals(name)) { if (!classStruct.isOwn()) { // skip code in foreign classes in.discard(8); in.discard(in.readInt()); in.discard(8 * in.readUnsignedShort()); } else { containsCode = true; in.discard(6); localVariables = in.readUnsignedShort(); codeLength = in.readInt(); in.discard(codeLength); int excLength = in.readUnsignedShort(); in.discard(excLength * 8); codeFullLength = codeLength + excLength * 8 + 2; } codeAttributes = readAttributes(in, pool); return null; } return super.readAttribute(in, pool, name); } public void expandData() throws IOException { if (containsCode && !expanded) { byte[] code = classStruct.getLoader().loadBytecode(this, codeFullLength); seq = parseBytecode(new DataInputFullStream(code), codeLength, classStruct.getPool()); expanded = true; } } public void releaseResources() { if (containsCode && expanded) { seq = null; expanded = false; } } @SuppressWarnings("AssignmentToForLoopParameter") private InstructionSequence parseBytecode(DataInputFullStream in, int length, ConstantPool pool) throws IOException { VBStyleCollection<Instruction, Integer> instructions = new VBStyleCollection<>(); int bytecode_version = classStruct.getBytecodeVersion(); for (int i = 0; i < length; ) { int offset = i; int opcode = in.readUnsignedByte(); int group = GROUP_GENERAL; boolean wide = (opcode == opc_wide); if (wide) { i++; opcode = in.readUnsignedByte(); } List<Integer> operands = new ArrayList<>(); if (opcode >= opc_iconst_m1 && opcode <= opc_iconst_5) { operands.add(opr_iconst[opcode - opc_iconst_m1]); opcode = opc_bipush; } else if (opcode >= opc_iload_0 && opcode <= opc_aload_3) { operands.add(opr_loadstore[opcode - opc_iload_0]); opcode = opcs_load[(opcode - opc_iload_0) / 4]; } else if (opcode >= opc_istore_0 && opcode <= opc_astore_3) { operands.add(opr_loadstore[opcode - opc_istore_0]); opcode = opcs_store[(opcode - opc_istore_0) / 4]; } else { switch (opcode) { case opc_bipush: operands.add((int)in.readByte()); i++; break; case opc_ldc: case opc_newarray: operands.add(in.readUnsignedByte()); i++; break; case opc_sipush: case opc_ifeq: case opc_ifne: case opc_iflt: case opc_ifge: case opc_ifgt: case opc_ifle: case opc_if_icmpeq: case opc_if_icmpne: case opc_if_icmplt: case opc_if_icmpge: case opc_if_icmpgt: case opc_if_icmple: case opc_if_acmpeq: case opc_if_acmpne: case opc_goto: case opc_jsr: case opc_ifnull: case opc_ifnonnull: if (opcode != opc_sipush) { group = GROUP_JUMP; } operands.add((int)in.readShort()); i += 2; break; case opc_ldc_w: case opc_ldc2_w: case opc_getstatic: case opc_putstatic: case opc_getfield: case opc_putfield: case opc_invokevirtual: case opc_invokespecial: case opc_invokestatic: case opc_new: case opc_anewarray: case opc_checkcast: case opc_instanceof: operands.add(in.readUnsignedShort()); i += 2; if (opcode >= opc_getstatic && opcode <= opc_putfield) { group = GROUP_FIELDACCESS; } else if (opcode >= opc_invokevirtual && opcode <= opc_invokestatic) { group = GROUP_INVOCATION; } break; case opc_invokedynamic: if (classStruct.isVersionGE_1_7()) { // instruction unused in Java 6 and before operands.add(in.readUnsignedShort()); in.discard(2); group = GROUP_INVOCATION; i += 4; } break; case opc_iload: case opc_lload: case opc_fload: case opc_dload: case opc_aload: case opc_istore: case opc_lstore: case opc_fstore: case opc_dstore: case opc_astore: case opc_ret: if (wide) { operands.add(in.readUnsignedShort()); i += 2; } else { operands.add(in.readUnsignedByte()); i++; } if (opcode == opc_ret) { group = GROUP_RETURN; } break; case opc_iinc: if (wide) { operands.add(in.readUnsignedShort()); operands.add((int)in.readShort()); i += 4; } else { operands.add(in.readUnsignedByte()); operands.add((int)in.readByte()); i += 2; } break; case opc_goto_w: case opc_jsr_w: opcode = opcode == opc_jsr_w ? opc_jsr : opc_goto; operands.add(in.readInt()); group = GROUP_JUMP; i += 4; break; case opc_invokeinterface: operands.add(in.readUnsignedShort()); operands.add(in.readUnsignedByte()); in.discard(1); group = GROUP_INVOCATION; i += 4; break; case opc_multianewarray: operands.add(in.readUnsignedShort()); operands.add(in.readUnsignedByte()); i += 3; break; case opc_tableswitch: in.discard((4 - (i + 1) % 4) % 4); i += ((4 - (i + 1) % 4) % 4); // padding operands.add(in.readInt()); i += 4; int low = in.readInt(); operands.add(low); i += 4; int high = in.readInt(); operands.add(high); i += 4; for (int j = 0; j < high - low + 1; j++) { operands.add(in.readInt()); i += 4; } group = GROUP_SWITCH; break; case opc_lookupswitch: in.discard((4 - (i + 1) % 4) % 4); i += ((4 - (i + 1) % 4) % 4); // padding operands.add(in.readInt()); i += 4; int npairs = in.readInt(); operands.add(npairs); i += 4; for (int j = 0; j < npairs; j++) { operands.add(in.readInt()); i += 4; operands.add(in.readInt()); i += 4; } group = GROUP_SWITCH; break; case opc_ireturn: case opc_lreturn: case opc_freturn: case opc_dreturn: case opc_areturn: case opc_return: case opc_athrow: group = GROUP_RETURN; } } int[] ops = null; if (!operands.isEmpty()) { ops = new int[operands.size()]; for (int j = 0; j < operands.size(); j++) { ops[j] = operands.get(j); } } Instruction instr = Instruction.create(opcode, wide, group, bytecode_version, ops); instructions.addWithKey(instr, offset); i++; } // initialize exception table List<ExceptionHandler> lstHandlers = new ArrayList<>(); int exception_count = in.readUnsignedShort(); for (int i = 0; i < exception_count; i++) { ExceptionHandler handler = new ExceptionHandler(); handler.from = in.readUnsignedShort(); handler.to = in.readUnsignedShort(); handler.handler = in.readUnsignedShort(); int excclass = in.readUnsignedShort(); if (excclass != 0) { handler.exceptionClass = pool.getPrimitiveConstant(excclass).getString(); } lstHandlers.add(handler); } InstructionSequence seq = new FullInstructionSequence(instructions, new ExceptionTable(lstHandlers)); // initialize instructions int i = seq.length() - 1; seq.setPointer(i); while (i >= 0) { Instruction instr = seq.getInstr(i--); if (instr.group != GROUP_GENERAL) { instr.initInstruction(seq); } seq.addToPointer(-1); } return seq; } public StructClass getClassStruct() { return classStruct; } public String getName() { return name; } public String getDescriptor() { return descriptor; } public boolean containsCode() { return containsCode; } public int getLocalVariables() { return localVariables; } public InstructionSequence getInstructionSequence() { return seq; } public StructLocalVariableTableAttribute getLocalVariableAttr() { return (StructLocalVariableTableAttribute)getAttribute(StructGeneralAttribute.ATTRIBUTE_LOCAL_VARIABLE_TABLE); } @Override public String toString() { return name; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs; import org.apache.hadoop.classification.InterfaceAudience; /** * This class contains constants for configuration keys used * in the common code. * * It includes all publicly documented configuration keys. In general * this class should not be used directly (use CommonConfigurationKeys * instead) * */ @InterfaceAudience.Public public class CommonConfigurationKeysPublic { // The Keys /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IO_NATIVE_LIB_AVAILABLE_KEY = "io.native.lib.available"; /** Default value for IO_NATIVE_LIB_AVAILABLE_KEY */ public static final boolean IO_NATIVE_LIB_AVAILABLE_DEFAULT = true; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_KEY = "net.topology.script.number.args"; /** Default value for NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_KEY */ public static final int NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_DEFAULT = 100; //FS keys /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String FS_DEFAULT_NAME_KEY = "fs.defaultFS"; /** Default value for FS_DEFAULT_NAME_KEY */ public static final String FS_DEFAULT_NAME_DEFAULT = "file:///"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String FS_DF_INTERVAL_KEY = "fs.df.interval"; /** Default value for FS_DF_INTERVAL_KEY */ public static final long FS_DF_INTERVAL_DEFAULT = 60000; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String FS_DU_INTERVAL_KEY = "fs.du.interval"; /** Default value for FS_DU_INTERVAL_KEY */ public static final long FS_DU_INTERVAL_DEFAULT = 600000; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String FS_CLIENT_RESOLVE_REMOTE_SYMLINKS_KEY = "fs.client.resolve.remote.symlinks"; /** Default value for FS_CLIENT_RESOLVE_REMOTE_SYMLINKS_KEY */ public static final boolean FS_CLIENT_RESOLVE_REMOTE_SYMLINKS_DEFAULT = true; //Defaults are not specified for following keys /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String NET_TOPOLOGY_SCRIPT_FILE_NAME_KEY = "net.topology.script.file.name"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY = "net.topology.node.switch.mapping.impl"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String NET_TOPOLOGY_IMPL_KEY = "net.topology.impl"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY = "net.topology.table.file.name"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String FS_TRASH_CHECKPOINT_INTERVAL_KEY = "fs.trash.checkpoint.interval"; /** Default value for FS_TRASH_CHECKPOINT_INTERVAL_KEY */ public static final long FS_TRASH_CHECKPOINT_INTERVAL_DEFAULT = 0; // TBD: Code is still using hardcoded values (e.g. "fs.automatic.close") // instead of constant (e.g. FS_AUTOMATIC_CLOSE_KEY) // /** Not used anywhere, looks like default value for FS_LOCAL_BLOCK_SIZE */ public static final long FS_LOCAL_BLOCK_SIZE_DEFAULT = 32*1024*1024; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String FS_AUTOMATIC_CLOSE_KEY = "fs.automatic.close"; /** Default value for FS_AUTOMATIC_CLOSE_KEY */ public static final boolean FS_AUTOMATIC_CLOSE_DEFAULT = true; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String FS_FILE_IMPL_KEY = "fs.file.impl"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String FS_FTP_HOST_KEY = "fs.ftp.host"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String FS_FTP_HOST_PORT_KEY = "fs.ftp.host.port"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String FS_TRASH_INTERVAL_KEY = "fs.trash.interval"; /** Default value for FS_TRASH_INTERVAL_KEY */ public static final long FS_TRASH_INTERVAL_DEFAULT = 0; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IO_MAPFILE_BLOOM_SIZE_KEY = "io.mapfile.bloom.size"; /** Default value for IO_MAPFILE_BLOOM_SIZE_KEY */ public static final int IO_MAPFILE_BLOOM_SIZE_DEFAULT = 1024*1024; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IO_MAPFILE_BLOOM_ERROR_RATE_KEY = "io.mapfile.bloom.error.rate" ; /** Default value for IO_MAPFILE_BLOOM_ERROR_RATE_KEY */ public static final float IO_MAPFILE_BLOOM_ERROR_RATE_DEFAULT = 0.005f; /** Codec class that implements Lzo compression algorithm */ public static final String IO_COMPRESSION_CODEC_LZO_CLASS_KEY = "io.compression.codec.lzo.class"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IO_MAP_INDEX_INTERVAL_KEY = "io.map.index.interval"; /** Default value for IO_MAP_INDEX_INTERVAL_DEFAULT */ public static final int IO_MAP_INDEX_INTERVAL_DEFAULT = 128; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IO_MAP_INDEX_SKIP_KEY = "io.map.index.skip"; /** Default value for IO_MAP_INDEX_SKIP_KEY */ public static final int IO_MAP_INDEX_SKIP_DEFAULT = 0; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IO_SEQFILE_COMPRESS_BLOCKSIZE_KEY = "io.seqfile.compress.blocksize"; /** Default value for IO_SEQFILE_COMPRESS_BLOCKSIZE_KEY */ public static final int IO_SEQFILE_COMPRESS_BLOCKSIZE_DEFAULT = 1000000; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IO_FILE_BUFFER_SIZE_KEY = "io.file.buffer.size"; /** Default value for IO_FILE_BUFFER_SIZE_KEY */ public static final int IO_FILE_BUFFER_SIZE_DEFAULT = 4096; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IO_SKIP_CHECKSUM_ERRORS_KEY = "io.skip.checksum.errors"; /** Default value for IO_SKIP_CHECKSUM_ERRORS_KEY */ public static final boolean IO_SKIP_CHECKSUM_ERRORS_DEFAULT = false; /** * @deprecated Moved to mapreduce, see mapreduce.task.io.sort.mb * in mapred-default.xml * See https://issues.apache.org/jira/browse/HADOOP-6801 */ public static final String IO_SORT_MB_KEY = "io.sort.mb"; /** Default value for IO_SORT_MB_DEFAULT */ public static final int IO_SORT_MB_DEFAULT = 100; /** * @deprecated Moved to mapreduce, see mapreduce.task.io.sort.factor * in mapred-default.xml * See https://issues.apache.org/jira/browse/HADOOP-6801 */ public static final String IO_SORT_FACTOR_KEY = "io.sort.factor"; /** Default value for IO_SORT_FACTOR_DEFAULT */ public static final int IO_SORT_FACTOR_DEFAULT = 100; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IO_SERIALIZATIONS_KEY = "io.serializations"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String TFILE_IO_CHUNK_SIZE_KEY = "tfile.io.chunk.size"; /** Default value for TFILE_IO_CHUNK_SIZE_DEFAULT */ public static final int TFILE_IO_CHUNK_SIZE_DEFAULT = 1024*1024; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String TFILE_FS_INPUT_BUFFER_SIZE_KEY = "tfile.fs.input.buffer.size"; /** Default value for TFILE_FS_INPUT_BUFFER_SIZE_KEY */ public static final int TFILE_FS_INPUT_BUFFER_SIZE_DEFAULT = 256*1024; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String TFILE_FS_OUTPUT_BUFFER_SIZE_KEY = "tfile.fs.output.buffer.size"; /** Default value for TFILE_FS_OUTPUT_BUFFER_SIZE_KEY */ public static final int TFILE_FS_OUTPUT_BUFFER_SIZE_DEFAULT = 256*1024; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY = "ipc.client.connection.maxidletime"; /** Default value for IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY */ public static final int IPC_CLIENT_CONNECTION_MAXIDLETIME_DEFAULT = 10000; // 10s /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IPC_CLIENT_CONNECT_TIMEOUT_KEY = "ipc.client.connect.timeout"; /** Default value for IPC_CLIENT_CONNECT_TIMEOUT_KEY */ public static final int IPC_CLIENT_CONNECT_TIMEOUT_DEFAULT = 20000; // 20s /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IPC_CLIENT_CONNECT_MAX_RETRIES_KEY = "ipc.client.connect.max.retries"; /** Default value for IPC_CLIENT_CONNECT_MAX_RETRIES_KEY */ public static final int IPC_CLIENT_CONNECT_MAX_RETRIES_DEFAULT = 10; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY = "ipc.client.connect.retry.interval"; /** Default value for IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY */ public static final int IPC_CLIENT_CONNECT_RETRY_INTERVAL_DEFAULT = 1000; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_KEY = "ipc.client.connect.max.retries.on.timeouts"; /** Default value for IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_KEY */ public static final int IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_DEFAULT = 45; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IPC_CLIENT_TCPNODELAY_KEY = "ipc.client.tcpnodelay"; /** Defalt value for IPC_CLIENT_TCPNODELAY_KEY */ public static final boolean IPC_CLIENT_TCPNODELAY_DEFAULT = false; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IPC_SERVER_LISTEN_QUEUE_SIZE_KEY = "ipc.server.listen.queue.size"; /** Default value for IPC_SERVER_LISTEN_QUEUE_SIZE_KEY */ public static final int IPC_SERVER_LISTEN_QUEUE_SIZE_DEFAULT = 128; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IPC_CLIENT_KILL_MAX_KEY = "ipc.client.kill.max"; /** Default value for IPC_CLIENT_KILL_MAX_KEY */ public static final int IPC_CLIENT_KILL_MAX_DEFAULT = 10; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IPC_CLIENT_IDLETHRESHOLD_KEY = "ipc.client.idlethreshold"; /** Default value for IPC_CLIENT_IDLETHRESHOLD_DEFAULT */ public static final int IPC_CLIENT_IDLETHRESHOLD_DEFAULT = 4000; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String IPC_SERVER_TCPNODELAY_KEY = "ipc.server.tcpnodelay"; /** Default value for IPC_SERVER_TCPNODELAY_KEY */ public static final boolean IPC_SERVER_TCPNODELAY_DEFAULT = false; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY = "hadoop.rpc.socket.factory.class.default"; public static final String HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_DEFAULT = "org.apache.hadoop.net.StandardSocketFactory"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String HADOOP_SOCKS_SERVER_KEY = "hadoop.socks.server"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String HADOOP_UTIL_HASH_TYPE_KEY = "hadoop.util.hash.type"; /** Default value for HADOOP_UTIL_HASH_TYPE_KEY */ public static final String HADOOP_UTIL_HASH_TYPE_DEFAULT = "murmur"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String HADOOP_SECURITY_GROUP_MAPPING = "hadoop.security.group.mapping"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String HADOOP_SECURITY_GROUPS_CACHE_SECS = "hadoop.security.groups.cache.secs"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final long HADOOP_SECURITY_GROUPS_CACHE_SECS_DEFAULT = 300; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String HADOOP_SECURITY_GROUPS_CACHE_WARN_AFTER_MS = "hadoop.security.groups.cache.warn.after.ms"; public static final long HADOOP_SECURITY_GROUPS_CACHE_WARN_AFTER_MS_DEFAULT = 5000; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String HADOOP_SECURITY_AUTHORIZATION = "hadoop.security.authorization"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN = "hadoop.security.instrumentation.requires.admin"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String HADOOP_SECURITY_SERVICE_USER_NAME_KEY = "hadoop.security.service.user.name.key"; /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ public static final String HADOOP_SECURITY_AUTH_TO_LOCAL = "hadoop.security.auth_to_local"; public static final String HADOOP_SSL_ENABLED_KEY = "hadoop.ssl.enabled"; public static final boolean HADOOP_SSL_ENABLED_DEFAULT = false; // HTTP policies to be used in configuration public static final String HTTP_POLICY_HTTP_ONLY = "HTTP_ONLY"; public static final String HTTP_POLICY_HTTPS_ONLY = "HTTPS_ONLY"; }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.refactoring.introduce.inplace; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.codeInsight.lookup.LookupElementBuilder; import com.intellij.codeInsight.template.ExpressionContext; import com.intellij.codeInsight.template.TextResult; import com.intellij.codeInsight.template.impl.TemplateManagerImpl; import com.intellij.codeInsight.template.impl.TemplateState; import com.intellij.lang.*; import com.intellij.lexer.Lexer; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.command.impl.StartMarkAction; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.impl.source.PostprocessReformattingAspect; import com.intellij.refactoring.rename.NameSuggestionProvider; import com.intellij.refactoring.rename.inplace.InplaceRefactoring; import com.intellij.refactoring.rename.inplace.MyLookupExpression; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; public abstract class InplaceVariableIntroducer<E extends PsiElement> extends InplaceRefactoring { protected E myExpr; protected RangeMarker myExprMarker; protected E[] myOccurrences; protected List<RangeMarker> myOccurrenceMarkers; public InplaceVariableIntroducer(PsiNamedElement elementToRename, Editor editor, final Project project, String title, E[] occurrences, @Nullable E expr) { super(editor, elementToRename, project); myTitle = title; myOccurrences = occurrences; if (expr != null) { final ASTNode node = expr.getNode(); if (node != null) { ASTNode prev = node.getTreePrev(); final ASTNode astNode = prev instanceof PsiWhiteSpace ? null : LanguageTokenSeparatorGenerators.INSTANCE.forLanguage(expr.getLanguage()) .generateWhitespaceBetweenTokens(prev, node); if (astNode != null) { final Lexer lexer = LanguageParserDefinitions.INSTANCE.forLanguage(expr.getLanguage()).createLexer(project); if (LanguageUtil.canStickTokensTogetherByLexer(prev, prev, lexer) == ParserDefinition.SpaceRequirements.MUST) { PostprocessReformattingAspect.getInstance(project).disablePostprocessFormattingInside( () -> WriteCommandAction.writeCommandAction(project).withName("Normalize declaration").run(() -> node.getTreeParent().addChild(astNode, node))); } } } myExpr = expr; } myExprMarker = myExpr != null && myExpr.isPhysical() ? createMarker(myExpr) : null; initOccurrencesMarkers(); } @Override protected boolean shouldSelectAll() { return true; } @Override protected StartMarkAction startRename() throws StartMarkAction.AlreadyStartedException { return null; } public void setOccurrenceMarkers(List<RangeMarker> occurrenceMarkers) { myOccurrenceMarkers = occurrenceMarkers; } public void setExprMarker(RangeMarker exprMarker) { myExprMarker = exprMarker; } @Nullable public E getExpr() { return myExpr != null && myExpr.isValid() && myExpr.isPhysical() ? myExpr : null; } public E[] getOccurrences() { return myOccurrences; } public List<RangeMarker> getOccurrenceMarkers() { if (myOccurrenceMarkers == null) { initOccurrencesMarkers(); } return myOccurrenceMarkers; } protected void initOccurrencesMarkers() { if (myOccurrenceMarkers != null) return; myOccurrenceMarkers = new ArrayList<>(); for (E occurrence : myOccurrences) { myOccurrenceMarkers.add(createMarker(occurrence)); } } protected RangeMarker createMarker(PsiElement element) { return myEditor.getDocument().createRangeMarker(element.getTextRange()); } public RangeMarker getExprMarker() { return myExprMarker; } @Override protected boolean performRefactoring() { return false; } @Override protected void collectAdditionalElementsToRename(@NotNull List<Pair<PsiElement, TextRange>> stringUsages) { } @Override protected String getCommandName() { return myTitle; } @Override protected void moveOffsetAfter(boolean success) { super.moveOffsetAfter(success); if (myOccurrenceMarkers != null) { for (RangeMarker marker : myOccurrenceMarkers) { marker.dispose(); } } if (myExprMarker != null && !isRestart()) { myExprMarker.dispose(); } } @Override protected MyLookupExpression createLookupExpression(PsiElement selectedElement) { return new MyIntroduceLookupExpression(getInitialName(), myNameSuggestions, myElementToRename, shouldSelectAll(), myAdvertisementText); } private static class MyIntroduceLookupExpression extends MyLookupExpression { private final SmartPsiElementPointer<PsiNamedElement> myPointer; MyIntroduceLookupExpression(final String initialName, final LinkedHashSet<String> names, final PsiNamedElement elementToRename, final boolean shouldSelectAll, final String advertisementText) { super(initialName, names, elementToRename, elementToRename, shouldSelectAll, advertisementText); myPointer = SmartPointerManager.getInstance(elementToRename.getProject()).createSmartPsiElementPointer(elementToRename); } @Override public LookupElement[] calculateLookupItems(ExpressionContext context) { return createLookupItems(myName, context.getEditor(), getElement()); } @Nullable public PsiNamedElement getElement() { return myPointer.getElement(); } private LookupElement @Nullable [] createLookupItems(String name, Editor editor, PsiNamedElement psiVariable) { TemplateState templateState = TemplateManagerImpl.getTemplateState(editor); if (psiVariable != null) { final TextResult insertedValue = templateState != null ? templateState.getVariableValue(PRIMARY_VARIABLE_NAME) : null; if (insertedValue != null) { final String text = insertedValue.getText(); if (!text.isEmpty() && !Comparing.strEqual(text, name)) { final LinkedHashSet<String> names = new LinkedHashSet<>(); names.add(text); NameSuggestionProvider.suggestNames(psiVariable, psiVariable, names); final LookupElement[] items = new LookupElement[names.size()]; final Iterator<String> iterator = names.iterator(); for (int i = 0; i < items.length; i++) { items[i] = LookupElementBuilder.create(iterator.next()); } return items; } } } return myLookupItems; } } }
// vim:filetype=java:ts=4 /* Copyright (c) 2006, 2007 Conor McDermottroe. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the author nor the names of any contributors to the software may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.mcdermottroe.exemplar.output.java; import java.io.File; import java.text.ParseException; import java.util.Map; import com.mcdermottroe.exemplar.DBC; import com.mcdermottroe.exemplar.model.XMLDocumentType; import com.mcdermottroe.exemplar.model.XMLEntity; import com.mcdermottroe.exemplar.model.XMLEntityType; import com.mcdermottroe.exemplar.model.XMLExternalIdentifier; import com.mcdermottroe.exemplar.output.OutputException; import com.mcdermottroe.exemplar.output.OutputUtils; import com.mcdermottroe.exemplar.output.XMLParserGeneratorException; import com.mcdermottroe.exemplar.output.XMLParserSourceGenerator; import com.mcdermottroe.exemplar.ui.Message; import com.mcdermottroe.exemplar.ui.Options; import com.mcdermottroe.exemplar.utils.Strings; import com.mcdermottroe.exemplar.utils.XML; import static com.mcdermottroe.exemplar.Constants.Character.EQUALS; import static com.mcdermottroe.exemplar.Constants.Character.SPACE; import static com.mcdermottroe.exemplar.Constants.EOL; import static com.mcdermottroe.exemplar.Constants.Format.Code.Java.PACKAGE; import static com.mcdermottroe.exemplar.Constants.Format.Filenames.JAVA_PARSER; import static com.mcdermottroe.exemplar.Constants.Format.Filenames.JFLEX; import static com.mcdermottroe.exemplar.Constants.NULL_STRING; import static com.mcdermottroe.exemplar.Constants.Output.Java.BUFFER_SIZE; import static com.mcdermottroe.exemplar.Constants.Output.Java.ENTITIES_FILE; import static com.mcdermottroe.exemplar.Constants.PROGRAM_NAME; /** A class which generates Java parsers that implement the SAX1 and SAX2 Parser interfaces, depending on the properties files that are loaded. @author Conor McDermottroe @since 0.1 @param <T> The type of {@link XMLJavaSourceGenerator}. */ public abstract class XMLJavaSourceGenerator<T extends XMLParserSourceGenerator<T>> extends XMLParserSourceGenerator<T> { /** Creates a source generator which produces Java parsers. Protected as this is an abstract class. @throws XMLParserGeneratorException if the super-class constructor throws one. */ protected XMLJavaSourceGenerator() throws XMLParserGeneratorException { // The parent does all the work. super(); } /** Copy constructor, see {@link XMLParserSourceGenerator#XMLParserSourceGenerator(Map, String)} for details. @param code The code fragments. @param time The timestamp. */ protected XMLJavaSourceGenerator(Map<String, String> code, String time) { super(code, time); } /** Generates the Java source for a SAX parser and places the source in the given directory. @param doctype The description of the vocabulary of XML to generate a parser for. @param targetDirectory The directory in which to place the source. @throws XMLParserGeneratorException if any of the generation methods throw one. */ @Override public void generateParser(XMLDocumentType doctype, File targetDirectory) throws XMLParserGeneratorException { DBC.REQUIRE(doctype != null); DBC.REQUIRE(targetDirectory != null); assert doctype != null; // Resolve the targetDirectory parameter into an absolute path that // exists. File sourceDirectory = getSourceDirectory(targetDirectory); DBC.ASSERT(sourceDirectory != null); // Get the vocabulary name String vocabulary = Options.getString("vocabulary"); DBC.ASSERT(vocabulary != null); // The Files to write to. File classFile = new File( sourceDirectory, String.format(JAVA_PARSER, vocabulary) ); File parseFile = new File( sourceDirectory, String.format(JFLEX, vocabulary) ); File entitiesFile = new File(sourceDirectory, ENTITIES_FILE); // Generate the four files. generateParserJavaFile(vocabulary, classFile); generateParserJFlexFile( vocabulary, doctype.hasAttlists(), doctype.entities(), parseFile ); generateEntitiesFile(doctype.entities(), entitiesFile); } /** Write the ${VOCABULARY}Parser.java file. @param vocabulary The name of the XML vocabulary that the parser will parse. @param outputFile The {@link File} to write out to @throws XMLParserGeneratorException if the code fragments could not be loaded or if the file could not be written to. */ private void generateParserJavaFile(String vocabulary, File outputFile) throws XMLParserGeneratorException { DBC.REQUIRE(vocabulary != null); DBC.REQUIRE(outputFile != null); assert vocabulary != null; assert outputFile != null; // Get the template String messageFormatTemplate = loadCodeFragment("JAVA_MAIN_TEMPLATE"); DBC.ASSERT(messageFormatTemplate != null); // Figure out the package String packageStatement = ""; String pkg = Options.getString("output-package"); if (pkg != null) { packageStatement = String.format(PACKAGE, pkg); } // Make the contents of the output file String outputFileContents = Strings.formatMessage( messageFormatTemplate, PROGRAM_NAME, timestamp, vocabulary, Integer.toString(BUFFER_SIZE), packageStatement ); // Write out the file try { OutputUtils.writeStringToFile(outputFileContents, outputFile); } catch (OutputException e) { throw new XMLParserGeneratorException( Message.FILE_WRITE_FAILED( outputFile.getAbsolutePath() ), e ); } } /** Write the ${VOCABULARY}.jflex file. @param vocabulary The name of the XML vocabulary that the parser will parse. @param usesAttlists Whether or not the {@link XMLDocumentType} declares any attribute lists. @param entities The entities that are defined in the {@link XMLDocumentType}. @param outputFile The {@link File} to write out to. @throws XMLParserGeneratorException if the code fragments could not be loaded, if an unknown entity type is encountered or if the output file could not be written to. */ private void generateParserJFlexFile( String vocabulary, boolean usesAttlists, Map<String, XMLEntity> entities, File outputFile ) throws XMLParserGeneratorException { DBC.REQUIRE(vocabulary != null); DBC.REQUIRE(outputFile != null); DBC.REQUIRE(codeFragments != null); // Get the template strings String messageFormatTemplate = loadCodeFragment("JFLEX_MAIN_TEMPLATE"); String processingInstructionProcessor = EOL; if (!Options.isSet("exclude", "PI")) { processingInstructionProcessor = loadCodeFragment( "PROCESSING_INSTRUCTION_PROCESSOR" ); } String commentProcessor = ""; if (!Options.isSet("exclude", "Comment")) { commentProcessor = loadCodeFragment("COMMENT_PROCESSOR"); } String doctypeDeclProcessor = ""; if (!Options.isSet("exclude", "doctypedecl")) { doctypeDeclProcessor = loadCodeFragment("DOCTYPEDECL_PROCESSOR"); } String cdSectProcessor = ""; if (!Options.isSet("exclude", "CDSect")) { cdSectProcessor = loadCodeFragment("CDSECT_PROCESSOR"); } String predefinedEntities = ""; String externalEntityRules = ""; String intEntResolver = ""; if (!Options.isSet("exclude", "References")) { predefinedEntities = loadCodeFragment("PREDEFINED_ENTITIES"); intEntResolver = loadCodeFragment("INTERNAL_ENT_RESOLVER"); if (Options.isSet("include", "entities")) { if (entities != null) { StringBuilder extEntities = new StringBuilder(); for (String entityName : entities.keySet()) { XMLEntity entity = entities.get(entityName); switch (entity.type()) { case INTERNAL: // Do nothing, internal entities // are handled elsewhere. break; case EXTERNAL_PARSED: // Get the external identifer (publicID and // systemID) for this entity. XMLExternalIdentifier extID; extID = entity.externalID(); String extEntPropMessageFormat = "EXT_ENT_PROP_TEXT_FMT"; Object[] args = { entityName, NULL_STRING, extID.systemID(), }; if (extID.publicID() != null) { args[1] = extID.publicID(); } extEntities.append( Strings.formatMessage( loadCodeFragment( extEntPropMessageFormat ), args ) ); break; case EXTERNAL_UNPARSED: // These are not handled by SAX, and virtually // nobody uses them either. break; } } externalEntityRules = new String(extEntities); } } } String charRefResolver = ""; if (!Options.isSet("exclude", "CharRef")) { charRefResolver = loadCodeFragment("CHAR_REF_RESOLVER"); } // Element rules String emptyElementRule; String startElementRule; if (usesAttlists) { startElementRule = loadCodeFragment("START_TAG_ATTLIST"); emptyElementRule = loadCodeFragment("EMPTY_TAG_ATTLIST"); } else { startElementRule = loadCodeFragment("START_TAG_NO_ATTLIST"); emptyElementRule = loadCodeFragment("EMPTY_TAG_NO_ATTLIST"); } // Figure out the package String packageStatement = ""; String pkg = Options.getString("output-package"); if (pkg != null) { packageStatement = String.format(PACKAGE, pkg); } // Make the contents of the output file String outputFileContents = Strings.formatMessage( messageFormatTemplate, PROGRAM_NAME, timestamp, vocabulary, processingInstructionProcessor, commentProcessor, doctypeDeclProcessor, cdSectProcessor, emptyElementRule, startElementRule, predefinedEntities, externalEntityRules, charRefResolver, intEntResolver, packageStatement ); DBC.ASSERT(outputFileContents != null); // Write out the file try { OutputUtils.writeStringToFile(outputFileContents, outputFile); } catch (OutputException e) { throw new XMLParserGeneratorException( Message.FILE_WRITE_FAILED( outputFile.getAbsolutePath() ), e ); } } /** Generate the resource file containing all of the entities. If entities are not included, then no file is created. @param entities The entities declared in the DTD. @param outputFile The {@link File} to output to. @throws XMLParserGeneratorException if the code fragments could not be loaded or if the output file could not be opened. */ private void generateEntitiesFile( Map<String, XMLEntity> entities, File outputFile ) throws XMLParserGeneratorException { DBC.REQUIRE(entities != null); DBC.REQUIRE(outputFile != null); assert entities != null; assert outputFile != null; if (Options.isSet("include", "entities")) { // Get the template String messageFormatTemplate = loadCodeFragment( "ENTITIES_MAIN_TEMPLATE" ); DBC.ASSERT(messageFormatTemplate != null); // Go through all of the entities and insert all of the internal // entities into the properties file string in sorted order. StringBuilder entityProperties = new StringBuilder(); for (String entityName : entities.keySet()) { XMLEntity entity = entities.get(entityName); if (entity.type().equals(XMLEntityType.INTERNAL)) { entityProperties.append(entityName); entityProperties.append(SPACE); entityProperties.append(EQUALS); entityProperties.append(SPACE); try { entityProperties.append( Strings.toCanonicalForm( XML.resolveCharacterReferences( entity.value() ) ) ); } catch (ParseException e) { throw new XMLParserGeneratorException(e); } entityProperties.append(EOL); } } // Make the contents of the output file String outputFileContents = Strings.formatMessage( messageFormatTemplate, PROGRAM_NAME, timestamp, entityProperties ); // Write out the file. try { OutputUtils.writeStringToFile(outputFileContents, outputFile); } catch (OutputException e) { throw new XMLParserGeneratorException( Message.FILE_WRITE_FAILED( outputFile.getAbsolutePath() ), e ); } } } /** {@inheritDoc} */ @Override public String describeLanguage() { return Message.LANGUAGE_JAVA(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.examples.datagrid.store.jdbc; import org.apache.ignite.*; import org.apache.ignite.cache.store.*; import org.apache.ignite.examples.datagrid.store.*; import org.apache.ignite.lang.*; import org.apache.ignite.resources.*; import org.jetbrains.annotations.*; import javax.cache.*; import javax.cache.integration.*; import java.sql.*; import java.util.*; /** * Example of {@link CacheStore} implementation that uses JDBC * transaction with cache transactions and maps {@link Long} to {@link Person}. * */ public class CacheJdbcPersonStore extends CacheStoreAdapter<Long, Person> { /** Transaction metadata attribute name. */ private static final String ATTR_NAME = "SIMPLE_STORE_CONNECTION"; /** Auto-injected store session. */ @CacheStoreSessionResource private CacheStoreSession ses; /** * Constructor. * * @throws IgniteException If failed. */ public CacheJdbcPersonStore() throws IgniteException { prepareDb(); } /** * Prepares database for example execution. This method will create a * table called "PERSONS" so it can be used by store implementation. * * @throws IgniteException If failed. */ private void prepareDb() throws IgniteException { try (Connection conn = openConnection(false); Statement st = conn.createStatement()) { st.execute("create table if not exists PERSONS (id number unique, firstName varchar(255), " + "lastName varchar(255))"); conn.commit(); } catch (SQLException e) { throw new IgniteException("Failed to create database table.", e); } } /** {@inheritDoc} */ @Override public void sessionEnd(boolean commit) { Map<String, Connection> props = ses.properties(); try (Connection conn = props.remove(ATTR_NAME)) { if (conn != null) { if (commit) conn.commit(); else conn.rollback(); } System.out.println(">>> Transaction ended [commit=" + commit + ']'); } catch (SQLException e) { throw new CacheWriterException("Failed to end transaction: " + ses.transaction(), e); } } /** {@inheritDoc} */ @Override public Person load(Long key) { System.out.println(">>> Loading key: " + key); Connection conn = null; try { conn = connection(); try (PreparedStatement st = conn.prepareStatement("select * from PERSONS where id=?")) { st.setString(1, key.toString()); ResultSet rs = st.executeQuery(); if (rs.next()) return new Person(rs.getLong(1), rs.getString(2), rs.getString(3)); } } catch (SQLException e) { throw new CacheLoaderException("Failed to load object: " + key, e); } finally { end(conn); } return null; } /** {@inheritDoc} */ @Override public void write(Cache.Entry<? extends Long, ? extends Person> entry) { Long key = entry.getKey(); Person val = entry.getValue(); System.out.println(">>> Putting [key=" + key + ", val=" + val + ']'); Connection conn = null; try { conn = connection(); int updated; // Try update first. If it does not work, then try insert. // Some databases would allow these to be done in one 'upsert' operation. try (PreparedStatement st = conn.prepareStatement( "update PERSONS set firstName=?, lastName=? where id=?")) { st.setString(1, val.getFirstName()); st.setString(2, val.getLastName()); st.setLong(3, val.getId()); updated = st.executeUpdate(); } // If update failed, try to insert. if (updated == 0) { try (PreparedStatement st = conn.prepareStatement( "insert into PERSONS (id, firstName, lastName) values(?, ?, ?)")) { st.setLong(1, val.getId()); st.setString(2, val.getFirstName()); st.setString(3, val.getLastName()); st.executeUpdate(); } } } catch (SQLException e) { throw new CacheLoaderException("Failed to put object [key=" + key + ", val=" + val + ']', e); } finally { end(conn); } } /** {@inheritDoc} */ @Override public void delete(Object key) { System.out.println(">>> Removing key: " + key); Connection conn = null; try { conn = connection(); try (PreparedStatement st = conn.prepareStatement("delete from PERSONS where id=?")) { st.setLong(1, (Long)key); st.executeUpdate(); } } catch (SQLException e) { throw new CacheWriterException("Failed to remove object: " + key, e); } finally { end(conn); } } /** {@inheritDoc} */ @Override public void loadCache(IgniteBiInClosure<Long, Person> clo, Object... args) { if (args == null || args.length == 0 || args[0] == null) throw new CacheLoaderException("Expected entry count parameter is not provided."); final int entryCnt = (Integer)args[0]; try (Connection conn = connection()) { try (PreparedStatement st = conn.prepareStatement("select * from PERSONS")) { try (ResultSet rs = st.executeQuery()) { int cnt = 0; while (cnt < entryCnt && rs.next()) { Person person = new Person(rs.getLong(1), rs.getString(2), rs.getString(3)); clo.apply(person.getId(), person); cnt++; } System.out.println(">>> Loaded " + cnt + " values into cache."); } } } catch (SQLException e) { throw new CacheLoaderException("Failed to load values from cache store.", e); } } /** * @return Connection. * @throws SQLException In case of error. */ private Connection connection() throws SQLException { // If there is an ongoing transaction, // we must reuse the same connection. if (ses.isWithinTransaction()) { Map<Object, Object> props = ses.properties(); Connection conn = (Connection)props.get(ATTR_NAME); if (conn == null) { conn = openConnection(false); // Store connection in session properties, so it can be accessed // for other operations on the same transaction. props.put(ATTR_NAME, conn); } return conn; } // Transaction can be null in case of simple load or put operation. else return openConnection(true); } /** * Closes allocated resources depending on transaction status. * * @param conn Allocated connection. */ private void end(@Nullable Connection conn) { if (!ses.isWithinTransaction() && conn != null) { // Close connection right away if there is no transaction. try { conn.close(); } catch (SQLException ignored) { // No-op. } } } /** * Gets connection from a pool. * * @param autocommit {@code true} If connection should use autocommit mode. * @return Pooled connection. * @throws SQLException In case of error. */ private Connection openConnection(boolean autocommit) throws SQLException { Connection conn = DriverManager.getConnection("jdbc:h2:mem:example;DB_CLOSE_DELAY=-1"); conn.setAutoCommit(autocommit); return conn; } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.plugins.groovy.swingBuilder; import com.google.common.collect.ImmutableMap; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.NlsSafe; import com.intellij.openapi.util.UserDataHolderEx; import com.intellij.psi.*; import com.intellij.psi.scope.ElementClassHint; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.extensions.NamedArgumentDescriptor; import org.jetbrains.plugins.groovy.extensions.impl.TypeCondition; import org.jetbrains.plugins.groovy.lang.psi.api.auxiliary.modifiers.GrModifierFlags; import org.jetbrains.plugins.groovy.lang.psi.impl.synthetic.GrLightMethodBuilder; import org.jetbrains.plugins.groovy.lang.psi.impl.synthetic.GrMethodWrapper; import org.jetbrains.plugins.groovy.lang.psi.util.GroovyCommonClassNames; import org.jetbrains.plugins.groovy.lang.resolve.NonCodeMembersContributor; import org.jetbrains.plugins.groovy.lang.resolve.ResolveUtil; import java.util.Collection; import java.util.HashMap; import java.util.Map; /** * @author Sergey Evdokimov */ public class SwingBuilderNonCodeMemberContributor extends NonCodeMembersContributor { private static final Key<MultiMap<String, PsiMethod>> KEY = Key.create("SwingBuilderNonCodeMemberContributor.KEY"); @NonNls private static final Object METHOD_KIND = "SwingBuilder_builder_method"; private static final class MyBuilder { private final PsiManager myManager; private final MultiMap<String, PsiMethod> myResult = new MultiMap<>(); private final GlobalSearchScope myResolveScope; private final PsiElementFactory myFactory; private final PsiClass mySwingBuilderClass; private final PsiType MANY_OBJECTS; private final Map<String, PsiType> myTypeMap = new HashMap<>(); private MyBuilder(PsiClass swingBuilderClass) { myManager = swingBuilderClass.getManager(); mySwingBuilderClass = swingBuilderClass; myResolveScope = swingBuilderClass.getResolveScope(); myFactory = JavaPsiFacade.getElementFactory(myManager.getProject()); MANY_OBJECTS = new PsiEllipsisType(type(CommonClassNames.JAVA_LANG_OBJECT)); } public static class MyMethodBuilder extends GrLightMethodBuilder { private String myNavigationClass; public MyMethodBuilder(PsiManager manager, String name) { super(manager, name); setMethodKind(METHOD_KIND); setOriginInfo("SwingBuilder method"); } @NotNull @Override public PsiElement getNavigationElement() { PsiElement res = super.getNavigationElement(); if (res != this || myNavigationClass == null) return res; PsiClass aClass = JavaPsiFacade.getInstance(getProject()).findClass(myNavigationClass, getResolveScope()); if (aClass == null) return res; PsiMethod[] methods = aClass.findMethodsByName("newInstance", false); if (methods.length != 1) return aClass; return methods[0]; } public void setNavigationClass(String navigationClass) { myNavigationClass = navigationClass; } } @NotNull private PsiType type(@NotNull String typeName) { PsiType res = myTypeMap.get(typeName); if (res == null) { res = myFactory.createTypeByFQClassName(typeName, myResolveScope); myTypeMap.put(typeName, res); } return res; } private void add(@NotNull PsiMethod method) { myResult.putValue(method.getName(), method); } private MyMethodBuilder method(String name, String returnType, @Nullable String navigationClass) { MyMethodBuilder res = new MyMethodBuilder(myManager, name); res.setModifiers(GrModifierFlags.PUBLIC_MASK); res.setReturnType(type(returnType)); res.setContainingClass(mySwingBuilderClass); if (navigationClass != null) { assert navigationClass.startsWith("groovy.swing."); // NON-NLS res.setNavigationClass(navigationClass); } return res; } private void methodObject(@NlsSafe String name, String returnType, @Nullable String navigationClass) { methodObject(name, returnType, navigationClass, null); } private void methodObject(@NlsSafe String name, @NlsSafe String returnType, @Nullable String navigationClass, @Nullable Map<@NlsSafe String, NamedArgumentDescriptor> namedArg) { MyMethodBuilder method = method(name, returnType, navigationClass); method.addParameter("map", type(CommonClassNames.JAVA_UTIL_MAP), true); method.addParameter("params", MANY_OBJECTS); if (namedArg != null) { method.setNamedParameters(namedArg); } add(method); } //private MyMethodBuilder methodWithAttr(String name, String returnType, @Nullable String navigationClass) { // return method(name, returnType, navigationClass).addParameter("attr", CommonClassNames.JAVA_UTIL_MAP, true); //} //private MyMethodBuilder method(String name, String returnType, @Nullable String navigationClass, String paramName, @Nullable String paramType, boolean isOptional) { // MyMethodBuilder res = methodWithAttr(name, returnType, navigationClass); // res.addParameter(paramName, paramType == null ? returnType : paramType, isOptional); // res.addClosureParam(); // // return res; //} //private void beanFactory(@Nullable String factoryName, String name, String returnType, boolean parameterOptional) { // add(method(name, returnType, factoryName, "value", CommonClassNames.JAVA_LANG_STRING, false)); // add(method(name, returnType, factoryName,"value", returnType, parameterOptional)); //} //private MyMethodBuilder acceptAllMethod(String name, String returnType, @Nullable String navigationClass) { // return acceptAllMethodLeaf(name, returnType, navigationClass).addClosureParam(); //} //private MyMethodBuilder acceptAllMethodLeaf(String name, String returnType, @Nullable String navigationClass) { // return methodWithAttr(name, returnType, navigationClass).addParameter("value", CommonClassNames.JAVA_LANG_OBJECT, true); //} private void registerExplicitMethod(String name, String realMethodName) { for (PsiMethod method : mySwingBuilderClass.findMethodsByName(realMethodName, false)) { add(GrMethodWrapper.wrap(method)); } } private void generateMethods() { // registerSupportNodes() methodObject("action", "javax.swing.Action", "groovy.swing.factory.ActionFactory"); methodObject("actions", CommonClassNames.JAVA_UTIL_LIST, "groovy.swing.factory.CollectionFactory"); methodObject("map", CommonClassNames.JAVA_UTIL_MAP, "groovy.swing.factory.MapFactory"); methodObject("imageIcon", "javax.swing.ImageIcon", "groovy.swing.factory.ImageIconFactory", Map.of( "image", new TypeCondition(type("java.awt.Image")), "url", new TypeCondition(type("java.net.URL")), "file", NamedArgumentDescriptor.SIMPLE_ON_TOP, "resource", NamedArgumentDescriptor.SIMPLE_ON_TOP, "class", NamedArgumentDescriptor.SIMPLE_ON_TOP, "description", NamedArgumentDescriptor.TYPE_STRING)); methodObject("buttonGroup", "javax.swing.BoxLayout", "groovy.swing.factory.ButtonGroupFactory"); methodObject("noparent", CommonClassNames.JAVA_UTIL_LIST, "groovy.swing.factory.CollectionFactory"); registerExplicitMethod("keyStrokeAction", "createKeyStrokeAction"); //registerExplicitMethod("shortcut", "shortcut"); // registerBinding() methodObject("bind", "org.codehaus.groovy.binding.FullBinding", "groovy.swing.factory.BindFactory", ContainerUtil.<@NlsSafe String, NamedArgumentDescriptor>immutableMapBuilder() .put("source", NamedArgumentDescriptor.SIMPLE_ON_TOP) .put("target", NamedArgumentDescriptor.SIMPLE_ON_TOP) .put("update", NamedArgumentDescriptor.SIMPLE_ON_TOP) .put("targetProperty", NamedArgumentDescriptor.TYPE_STRING) .put("mutual", NamedArgumentDescriptor.SIMPLE_ON_TOP) .put("sourceEvent", NamedArgumentDescriptor.TYPE_STRING) .put("sourceValue", NamedArgumentDescriptor.TYPE_CLOSURE) .put("sourceProperty", NamedArgumentDescriptor.TYPE_STRING) .put("value", NamedArgumentDescriptor.SIMPLE_ON_TOP) .put("bind", NamedArgumentDescriptor.SIMPLE_ON_TOP) .put("group", NamedArgumentDescriptor.SIMPLE_ON_TOP) .build()); methodObject("bindProxy", "org.codehaus.groovy.binding.BindingProxy", "groovy.swing.factory.BindProxyFactory", ImmutableMap.of("bind", NamedArgumentDescriptor.SIMPLE_ON_TOP)); methodObject("bindGroup", "org.codehaus.groovy.binding.AggregateBinding", "groovy.swing.factory.BindGroupFactory", ImmutableMap.of("bind", NamedArgumentDescriptor.SIMPLE_ON_TOP)); // registerPassThruNodes() methodObject("widget", "java.awt.Component", "groovy.swing.factory.WidgetFactory", ImmutableMap .of("widget", new TypeCondition(type("java.awt.Component")))); methodObject("container", "java.awt.Component", "groovy.swing.factory.WidgetFactory", ImmutableMap .of("container", new TypeCondition(type("java.awt.Component")))); methodObject("bean", CommonClassNames.JAVA_LANG_OBJECT, "groovy.swing.factory.WidgetFactory", ImmutableMap.of("bean", NamedArgumentDescriptor.SIMPLE_ON_TOP)); // registerWindows() methodObject("dialog", "javax.swing.JDialog", "groovy.swing.factory.DialogFactory", Map.of("owner", NamedArgumentDescriptor.SIMPLE_ON_TOP, "defaultButtonProperty", NamedArgumentDescriptor.SIMPLE_ON_TOP, "pack", NamedArgumentDescriptor.SIMPLE_ON_TOP, "show", NamedArgumentDescriptor.SIMPLE_ON_TOP)); methodObject("fileChooser", "javax.swing.JFileChooser", null); methodObject("frame", "javax.swing.JFrame", "groovy.swing.factory.FrameFactory", Map .of("pack", NamedArgumentDescriptor.SIMPLE_ON_TOP, "show", NamedArgumentDescriptor.SIMPLE_ON_TOP)); methodObject("optionPane", "javax.swing.JOptionPane", null); methodObject("window", "javax.swing.JWindow", "groovy.swing.factory.WindowFactory", Map.of("pack", NamedArgumentDescriptor.SIMPLE_ON_TOP, "show", NamedArgumentDescriptor.SIMPLE_ON_TOP, "owner", NamedArgumentDescriptor.SIMPLE_ON_TOP)); // registerActionButtonWidgets() methodObject("button", "javax.swing.JButton", "groovy.swing.factory.RichActionWidgetFactory"); methodObject("checkBox", "javax.swing.JCheckBox", "groovy.swing.factory.RichActionWidgetFactory"); methodObject("checkBoxMenuItem", "javax.swing.JCheckBoxMenuItem", "groovy.swing.factory.RichActionWidgetFactory"); methodObject("menuItem", "javax.swing.JMenuItem", "groovy.swing.factory.RichActionWidgetFactory"); methodObject("radioButton", "javax.swing.JRadioButton", "groovy.swing.factory.RichActionWidgetFactory"); methodObject("radioButtonMenuItem", "javax.swing.JRadioButtonMenuItem", "groovy.swing.factory.RichActionWidgetFactory"); methodObject("toggleButton", "javax.swing.JToggleButton", "groovy.swing.factory.RichActionWidgetFactory"); // registerTextWidgets() methodObject("editorPane", "javax.swing.JEditorPane", "groovy.swing.factory.TextArgWidgetFactory"); methodObject("label", "javax.swing.JLabel", "groovy.swing.factory.TextArgWidgetFactory"); methodObject("passwordField", "javax.swing.JPasswordField", "groovy.swing.factory.TextArgWidgetFactory"); methodObject("textArea", "javax.swing.JTextArea", "groovy.swing.factory.TextArgWidgetFactory"); methodObject("textField", "javax.swing.JTextField", "groovy.swing.factory.TextArgWidgetFactory"); methodObject("textPane", "javax.swing.JTextPane", "groovy.swing.factory.TextArgWidgetFactory"); methodObject("formattedTextField", "javax.swing.JFormattedTextField", "groovy.swing.factory.FormattedTextFactory", ImmutableMap.of( "format", new TypeCondition(type("java.text.Format")), "value", NamedArgumentDescriptor.SIMPLE_ON_TOP)); // registerMDIWidgets() methodObject("desktopPane", "javax.swing.JDesktopPane", null); methodObject("internalFrame", "javax.swing.JInternalFrame", "groovy.swing.factory.InternalFrameFactory"); // registerBasicWidgets() methodObject("colorChooser", "javax.swing.JColorChooser", null); methodObject("comboBox", "javax.swing.JComboBox", "groovy.swing.factory.ComboBoxFactory", ImmutableMap.of("items", NamedArgumentDescriptor.SIMPLE_ON_TOP)); methodObject("list", "javax.swing.JList", "groovy.swing.factory.ListFactory", ImmutableMap.of("items", NamedArgumentDescriptor.SIMPLE_ON_TOP)); methodObject("progressBar", "javax.swing.JProgressBar", null); methodObject("separator", "javax.swing.JSeparator", "groovy.swing.factory.SeparatorFactory"); methodObject("scrollBar", "javax.swing.JScrollBar", null); methodObject("slider", "javax.swing.JSlider", null); methodObject("spinner", "javax.swing.JSpinner", null); methodObject("tree", "javax.swing.JTree", null); //registerMenuWidgets() methodObject("menu", "javax.swing.JMenu", null); methodObject("menuBar", "javax.swing.JMenuBar", null); methodObject("popupMenu", "javax.swing.JPopupMenu", null); // registerContainers() methodObject("panel", "javax.swing.JPanel", null); methodObject("scrollPane", "javax.swing.JScrollPane", "groovy.swing.factory.ScrollPaneFactory"); methodObject("splitPane", "javax.swing.JSplitPane", "groovy.swing.factory.SplitPaneFactory"); methodObject("tabbedPane", "javax.swing.JTabbedPane", "groovy.swing.factory.TabbedPaneFactory"); methodObject("toolBar", "javax.swing.JToolBar", null); methodObject("viewport", "javax.swing.JViewport", null); methodObject("layeredPane", "javax.swing.JLayeredPane", null); // registerDataModels() methodObject("boundedRangeModel", "javax.swing.DefaultBoundedRangeModel", null); methodObject("spinnerDateModel", "javax.swing.SpinnerDateModel", null); methodObject("spinnerListModel", "javax.swing.SpinnerListModel", null); methodObject("spinnerNumberModel", "javax.swing.SpinnerNumberModel", null); // registerTableComponents() methodObject("table", "javax.swing.JTable", "groovy.swing.factory.TableFactory"); methodObject("tableColumn", "javax.swing.table.TableColumn", null); methodObject("tableModel", "javax.swing.table.TableModel", "groovy.swing.factory.TableModelFactory", ImmutableMap.of( "tableModel", new TypeCondition(type("javax.swing.table.TableModel")), "model", new TypeCondition(type("groovy.model.ValueModel")), "list", NamedArgumentDescriptor.SIMPLE_ON_TOP )); methodObject("propertyColumn", "javax.swing.table.TableColumn", "groovy.swing.factory.PropertyColumnFactory", ImmutableMap.of( "propertyName", NamedArgumentDescriptor.TYPE_STRING, "header", NamedArgumentDescriptor.SIMPLE_ON_TOP, "type", new TypeCondition(type(CommonClassNames.JAVA_LANG_CLASS)), "editable", NamedArgumentDescriptor.SIMPLE_ON_TOP )); methodObject("closureColumn", "javax.swing.table.TableColumn", "groovy.swing.factory.ClosureColumnFactory", ImmutableMap.of( "header", NamedArgumentDescriptor.SIMPLE_ON_TOP, "read", new TypeCondition(type(GroovyCommonClassNames.GROOVY_LANG_CLOSURE)), "write", new TypeCondition(type(GroovyCommonClassNames.GROOVY_LANG_CLOSURE)), "type", new TypeCondition(type(CommonClassNames.JAVA_LANG_CLASS)) )); methodObject("columnModel", "javax.swing.table.TableColumnModel", "groovy.swing.factory.ColumnModelFactory"); methodObject("column", "javax.swing.table.TableColumn", "groovy.swing.factory.ColumnFactory", ImmutableMap.of("width", NamedArgumentDescriptor.SIMPLE_ON_TOP)); // registerBasicLayouts() methodObject("borderLayout", "java.awt.BorderLayout", "groovy.swing.factory.LayoutFactory"); methodObject("cardLayout", "java.awt.CardLayout", "groovy.swing.factory.LayoutFactory"); methodObject("flowLayout", "java.awt.FlowLayout", "groovy.swing.factory.LayoutFactory"); methodObject("gridLayout", "java.awt.GridLayout", "groovy.swing.factory.LayoutFactory"); methodObject("overlayLayout", "javax.swing.OverlayLayout", "groovy.swing.factory.LayoutFactory"); methodObject("springLayout", "javax.swing.SpringLayout", "groovy.swing.factory.LayoutFactory"); methodObject("gridBagLayout", "java.awt.GridBagLayout", "groovy.swing.factory.LayoutFactory"); methodObject("gridBagConstraints", "java.awt.GridBagConstraints", "groovy.swing.factory.LayoutFactory"); methodObject("gbc", "java.awt.GridBagConstraints", "groovy.swing.factory.LayoutFactory"); // registerBoxLayout() methodObject("boxLayout", "javax.swing.BoxLayout", "groovy.swing.factory.BoxLayoutFactory", ImmutableMap.of("axis", NamedArgumentDescriptor.SIMPLE_ON_TOP)); methodObject("box", "javax.swing.Box", "groovy.swing.factory.BoxFactory", ImmutableMap.of( "axis", new TypeCondition(type("java.lang.Number")))); methodObject("hbox", "javax.swing.Box", "groovy.swing.factory.HBoxFactory"); methodObject("hglue", "java.awt.Component", "groovy.swing.factory.HGlueFactory"); methodObject("hstrut", "java.awt.Component", "groovy.swing.factory.HStrutFactory", ImmutableMap.of( "width", new TypeCondition(type("java.lang.Number")))); methodObject("vbox", "javax.swing.Box", "groovy.swing.factory.VBoxFactory"); methodObject("vglue", "java.awt.Component", "groovy.swing.factory.VGlueFactory"); methodObject("vstrut", "java.awt.Component", "groovy.swing.factory.VStrutFactory", ImmutableMap.of( "height", new TypeCondition(type("java.lang.Number")))); methodObject("glue", "java.awt.Component", "groovy.swing.factory.GlueFactory"); methodObject("rigidArea", "java.awt.Component", "groovy.swing.factory.RigidAreaFactory", ImmutableMap.of( "size", new TypeCondition(type("java.awt.Dimension")), "height", new TypeCondition(type("java.lang.Number")), "width", new TypeCondition(type("java.lang.Number")) )); // registerTableLayout() methodObject("tableLayout", "groovy.swing.impl.TableLayout", "groovy.swing.factory.TableLayoutFactory"); methodObject("tr", "groovy.swing.impl.TableLayoutRow", "groovy.swing.factory.TRFactory"); methodObject("td", "groovy.swing.impl.TableLayoutCell", "groovy.swing.factory.TDFactory"); // registerBorders() methodObject("lineBorder", "javax.swing.border.LineBorder", "groovy.swing.factory.LineBorderFactory", ImmutableMap.of( "parent", NamedArgumentDescriptor.SIMPLE_ON_TOP, "color", NamedArgumentDescriptor.SIMPLE_ON_TOP, "thickness", NamedArgumentDescriptor.SIMPLE_ON_TOP, "roundedCorners", NamedArgumentDescriptor.SIMPLE_ON_TOP )); NamedArgumentDescriptor namedArgColor = new TypeCondition(type("java.awt.Color")); Map<@NlsSafe String, NamedArgumentDescriptor> m = Map.of( "parent", NamedArgumentDescriptor.SIMPLE_ON_TOP, "highlight", namedArgColor, "shadow", namedArgColor, "highlightOuter", namedArgColor, "highlightInner", namedArgColor, "shadowOuter", namedArgColor, "shadowInner", namedArgColor); methodObject("loweredBevelBorder", "javax.swing.border.Border", "groovy.swing.factory.BevelBorderFactory", m); methodObject("raisedBevelBorder", "javax.swing.border.Border", "groovy.swing.factory.BevelBorderFactory", m); m = Map.of( "parent", NamedArgumentDescriptor.SIMPLE_ON_TOP, "highlight", namedArgColor, "shadow", namedArgColor); methodObject("etchedBorder", "javax.swing.border.Border", "groovy.swing.factory.EtchedBorderFactory", m); methodObject("loweredEtchedBorder", "javax.swing.border.Border", "groovy.swing.factory.EtchedBorderFactory", m); methodObject("raisedEtchedBorder", "javax.swing.border.Border", "groovy.swing.factory.EtchedBorderFactory", m); methodObject("titledBorder", "javax.swing.border.TitledBorder", "groovy.swing.factory.TitledBorderFactory", Map.of( "parent", NamedArgumentDescriptor.SIMPLE_ON_TOP, "title", NamedArgumentDescriptor.SIMPLE_ON_TOP, "position", NamedArgumentDescriptor.SIMPLE_ON_TOP, "justification", NamedArgumentDescriptor.SIMPLE_ON_TOP, "border", new TypeCondition(type("javax.swing.border.Border")), "color", namedArgColor, "font", new TypeCondition(type("java.awt.Font")))); methodObject("emptyBorder", "javax.swing.border.Border", "groovy.swing.factory.EmptyBorderFactory"); methodObject("emptyBorder", "javax.swing.border.Border", "groovy.swing.factory.EmptyBorderFactory"); methodObject("emptyBorder", "javax.swing.border.Border", "groovy.swing.factory.EmptyBorderFactory", Map.of( "parent", NamedArgumentDescriptor.SIMPLE_ON_TOP, "top", NamedArgumentDescriptor.TYPE_INTEGER, "left", NamedArgumentDescriptor.TYPE_INTEGER, "bottom", NamedArgumentDescriptor.TYPE_INTEGER, "right", NamedArgumentDescriptor.TYPE_INTEGER )); methodObject("compoundBorder", "javax.swing.border.CompoundBorder", "groovy.swing.factory.CompoundBorderFactory", Map.of( "parent", NamedArgumentDescriptor.SIMPLE_ON_TOP, "inner", new TypeCondition(type("javax.swing.border.Border")), "outer", new TypeCondition(type("javax.swing.border.Border")) )); methodObject("matteBorder", "javax.swing.border.Border", "groovy.swing.factory.MatteBorderFactory", Map.of( "parent", NamedArgumentDescriptor.SIMPLE_ON_TOP, "icon", NamedArgumentDescriptor.SIMPLE_ON_TOP, "color", NamedArgumentDescriptor.SIMPLE_ON_TOP, "size", NamedArgumentDescriptor.SIMPLE_ON_TOP, "top", NamedArgumentDescriptor.SIMPLE_ON_TOP, "left", NamedArgumentDescriptor.SIMPLE_ON_TOP, "bottom", NamedArgumentDescriptor.SIMPLE_ON_TOP, "right", NamedArgumentDescriptor.SIMPLE_ON_TOP)); // registerRenderers() methodObject("tableCellRenderer", "groovy.swing.impl.ClosureRenderer", "groovy.swing.factory.RendererFactory"); methodObject("listCellRenderer", "groovy.swing.impl.ClosureRenderer", "groovy.swing.factory.RendererFactory"); methodObject("cellRenderer", "groovy.swing.impl.ClosureRenderer", "groovy.swing.factory.RendererFactory"); methodObject("headerRenderer", "groovy.swing.impl.ClosureRenderer", "groovy.swing.factory.RendererFactory"); methodObject("onRender", CommonClassNames.JAVA_UTIL_MAP, "groovy.swing.factory.RendererUpdateFactory"); // registerEditors() methodObject("cellEditor", "groovy.swing.impl.ClosureCellEditor", "groovy.swing.factory.CellEditorFactory"); methodObject("editorValue", CommonClassNames.JAVA_UTIL_MAP, "groovy.swing.factory.CellEditorGetValueFactory"); methodObject("prepareEditor", CommonClassNames.JAVA_UTIL_MAP, "groovy.swing.factory.CellEditorPrepareFactory"); } //private void generateMethods() { // // registerSupportNodes() // beanFactory("groovy.swing.factory.ActionFactory", "action", "javax.swing.Action", true); // // add(method("actions", CommonClassNames.JAVA_UTIL_LIST, "groovy.swing.factory.CollectionFactory").addClosureParam()); // // add(methodWithAttr("map", CommonClassNames.JAVA_UTIL_MAP, "groovy.swing.factory.MapFactory").addClosureParam()); // // add(acceptAllMethod("imageIcon", "javax.swing.ImageIcon", "groovy.swing.factory.ImageIconFactory") // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>builder() // .put("image", new NamedArgumentDescriptor.TypeCondition(type("java.awt.Image"))) // .put("url", new NamedArgumentDescriptor.TypeCondition(type("java.net.URL"))) // .put("file", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("resource", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("class", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("description", NamedArgumentDescriptor.TYPE_STRING) // .build() // )); // // beanFactory("groovy.swing.factory.ButtonGroupFactory", "buttonGroup", "javax.swing.BoxLayout", true); // // add(methodWithAttr("noparent", CommonClassNames.JAVA_UTIL_LIST, "groovy.swing.factory.CollectionFactory").addClosureParam()); // // registerExplicitMethod("keyStrokeAction", "createKeyStrokeAction"); // //registerExplicitMethod("shortcut", "shortcut"); // // // registerBinding() // add(acceptAllMethod("bind", "org.codehaus.groovy.binding.FullBinding", "groovy.swing.factory.BindFactory") // .setNamedParameters( // ImmutableMap.<String, NamedArgumentDescriptor>builder() // .put("source", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("target", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("update", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("targetProperty", NamedArgumentDescriptor.TYPE_STRING) // .put("mutual", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("sourceEvent", NamedArgumentDescriptor.TYPE_STRING) // .put("sourceValue", NamedArgumentDescriptor.TYPE_CLOSURE) // .put("sourceProperty", NamedArgumentDescriptor.TYPE_STRING) // .put("value", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("bind", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("group", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .build() // )); // // add(acceptAllMethodLeaf("bindProxy", "org.codehaus.groovy.binding.BindingProxy", "groovy.swing.factory.BindProxyFactory") // .setNamedParameters( // ImmutableMap.<String, NamedArgumentDescriptor>builder() // .put("bind", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .build() // )); // // add(acceptAllMethod("bindGroup", "org.codehaus.groovy.binding.AggregateBinding", "groovy.swing.factory.BindGroupFactory") // .setNamedParameters( // ImmutableMap.<String, NamedArgumentDescriptor>builder() // .put("bind", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .build() // )); // // // registerPassThruNodes() // add(methodWithAttr("widget", "java.awt.Component", "groovy.swing.factory.WidgetFactory") // .addParameter("component", "java.awt.Component", true) // .setNamedParameters( // ImmutableMap.<String, NamedArgumentDescriptor>of("widget", new NamedArgumentDescriptor.TypeCondition(type("java.awt.Component"))) // )); // add(methodWithAttr("container", "java.awt.Component", "groovy.swing.factory.WidgetFactory") // .addParameter("component", "java.awt.Component", true) // .addClosureParam() // .setNamedParameters( // ImmutableMap.<String, NamedArgumentDescriptor>of("container", new NamedArgumentDescriptor.TypeCondition(type("java.awt.Component"))) // )); // add(methodWithAttr("bean", CommonClassNames.JAVA_LANG_OBJECT, "groovy.swing.factory.WidgetFactory") // .addParameter("bean", CommonClassNames.JAVA_LANG_OBJECT, true) // .setNamedParameters( // ImmutableMap.<String, NamedArgumentDescriptor>builder() // .put("bean", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .build() // )); // // // registerWindows() // add(methodWithAttr("dialog", "javax.swing.JDialog", "groovy.swing.factory.DialogFactory").addParameter("dialog", "javax.swing.JDialog", true).addClosureParam() // .setNamedParameters( // ImmutableMap.<String, NamedArgumentDescriptor>builder() // .put("owner", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("defaultButtonProperty", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("pack", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("show", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .build() // )); // // beanFactory(null, "fileChooser", "javax.swing.JFileChooser", true); // // add(method("frame", "javax.swing.JFrame", "groovy.swing.factory.FrameFactory", "value", "javax.swing.JFrame", true) // .setNamedParameters( // ImmutableMap.<String, NamedArgumentDescriptor>of("pack", NamedArgumentDescriptor.SIMPLE_ON_TOP, "show", NamedArgumentDescriptor.SIMPLE_ON_TOP) // ) // ); // // beanFactory(null, "optionPane", "javax.swing.JOptionPane", true); // // add(method("window", "javax.swing.JWindow", "groovy.swing.factory.WindowFactory", "window", "javax.swing.JWindow", true) // .setNamedParameters( // ImmutableMap.<String, NamedArgumentDescriptor>of("pack", NamedArgumentDescriptor.SIMPLE_ON_TOP, // "show", NamedArgumentDescriptor.SIMPLE_ON_TOP, // "owner", NamedArgumentDescriptor.SIMPLE_ON_TOP) // ) // ); // // // registerActionButtonWidgets() // add(acceptAllMethod("button", "javax.swing.JButton", "groovy.swing.factory.RichActionWidgetFactory")); // add(acceptAllMethod("checkBox", "javax.swing.JCheckBox", "groovy.swing.factory.RichActionWidgetFactory")); // add(acceptAllMethod("checkBoxMenuItem", "javax.swing.JCheckBoxMenuItem", "groovy.swing.factory.RichActionWidgetFactory")); // add(acceptAllMethod("menuItem", "javax.swing.JMenuItem", "groovy.swing.factory.RichActionWidgetFactory")); // add(acceptAllMethod("radioButton", "javax.swing.JRadioButton", "groovy.swing.factory.RichActionWidgetFactory")); // add(acceptAllMethod("radioButtonMenuItem", "javax.swing.JRadioButtonMenuItem", "groovy.swing.factory.RichActionWidgetFactory")); // add(acceptAllMethod("toggleButton", "javax.swing.JToggleButton", "groovy.swing.factory.RichActionWidgetFactory")); // // // registerTextWidgets() // beanFactory("groovy.swing.factory.TextArgWidgetFactory", "editorPane", "javax.swing.JEditorPane", true); // beanFactory("groovy.swing.factory.TextArgWidgetFactory", "label", "javax.swing.JLabel", true); // beanFactory("groovy.swing.factory.TextArgWidgetFactory", "passwordField", "javax.swing.JPasswordField", true); // beanFactory("groovy.swing.factory.TextArgWidgetFactory", "textArea", "javax.swing.JTextArea", true); // beanFactory("groovy.swing.factory.TextArgWidgetFactory", "textField", "javax.swing.JTextField", true); // beanFactory("groovy.swing.factory.TextArgWidgetFactory", "textPane", "javax.swing.JTextPane", true); // add(methodWithAttr("formattedTextField", "javax.swing.JFormattedTextField", "groovy.swing.factory.FormattedTextFactory") // .addClosureParam() // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of( // "format", new NamedArgumentDescriptor.TypeCondition(type("java.text.Format")), // "value", NamedArgumentDescriptor.SIMPLE_ON_TOP) // ) // ); // // // registerMDIWidgets() // beanFactory(null, "desktopPane", "javax.swing.JDesktopPane", true); // add(method("internalFrame", "javax.swing.JInternalFrame", "groovy.swing.factory.InternalFrameFactory", "value", // "javax.swing.JInternalFrame", true)); // // // registerBasicWidgets() // beanFactory(null, "colorChooser", "javax.swing.JColorChooser", true); // add(method("comboBox", "javax.swing.JComboBox", "groovy.swing.factory.ComboBoxFactory", "value", "javax.swing.JComboBox", true) // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of("items", NamedArgumentDescriptor.SIMPLE_ON_TOP)) // ); // add(acceptAllMethod("list", "javax.swing.JList", "groovy.swing.factory.ListFactory") // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of("items", NamedArgumentDescriptor.SIMPLE_ON_TOP)) // ); // beanFactory(null, "progressBar", "javax.swing.JProgressBar", true); // add(methodWithAttr("separator", "javax.swing.JSeparator", "groovy.swing.factory.SeparatorFactory").addClosureParam()); // beanFactory(null, "scrollBar", "javax.swing.JScrollBar", true); // beanFactory(null, "slider", "javax.swing.JSlider", true); // beanFactory(null, "spinner", "javax.swing.JSpinner", true); // beanFactory(null, "tree", "javax.swing.JTree", true); // // //registerMenuWidgets() // beanFactory(null, "menu", "javax.swing.JMenu", true); // beanFactory(null, "menuBar", "javax.swing.JMenuBar", true); // beanFactory(null, "popupMenu", "javax.swing.JPopupMenu", true); // // // registerContainers() // beanFactory(null, "panel", "javax.swing.JPanel", true); // beanFactory("groovy.swing.factory.ScrollPaneFactory", "scrollPane", "javax.swing.JScrollPane", true); // beanFactory("groovy.swing.factory.SplitPaneFactory", "splitPane", "javax.swing.JSplitPane", true); // beanFactory("groovy.swing.factory.TabbedPaneFactory", "tabbedPane", "javax.swing.JTabbedPane", true); // // beanFactory(null, "toolBar", "javax.swing.JToolBar", true); // beanFactory(null, "viewport", "javax.swing.JViewport", true); // beanFactory(null, "layeredPane", "javax.swing.JLayeredPane", true); // // // registerDataModels() // beanFactory(null, "boundedRangeModel", "javax.swing.DefaultBoundedRangeModel", true); // beanFactory(null, "spinnerDateModel", "javax.swing.SpinnerDateModel", true); // beanFactory(null, "spinnerListModel", "javax.swing.SpinnerListModel", true); // beanFactory(null, "spinnerNumberModel", "javax.swing.SpinnerNumberModel", true); // // // registerTableComponents() // beanFactory("groovy.swing.factory.TableFactory", "table", "javax.swing.JTable", true); // beanFactory(null, "tableColumn", "javax.swing.table.TableColumn", true); // add(method("tableModel", "javax.swing.table.TableModel", "groovy.swing.factory.TableModelFactory", "model", // "javax.swing.table.TableModel", true) // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of( // "tableModel", new NamedArgumentDescriptor.TypeCondition(type("javax.swing.table.TableModel")), // "model", new NamedArgumentDescriptor.TypeCondition(type("groovy.model.ValueModel")), // "list", NamedArgumentDescriptor.SIMPLE_ON_TOP // )) // ); // add(methodWithAttr("propertyColumn", "javax.swing.table.TableColumn", "groovy.swing.factory.PropertyColumnFactory") // .addClosureParam() // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of( // "propertyName", NamedArgumentDescriptor.TYPE_STRING, // "header", NamedArgumentDescriptor.SIMPLE_ON_TOP, // "type", new NamedArgumentDescriptor.TypeCondition(type(CommonClassNames.JAVA_LANG_CLASS)), // "editable", NamedArgumentDescriptor.SIMPLE_ON_TOP // )) // ); // add(methodWithAttr("closureColumn", "javax.swing.table.TableColumn", "groovy.swing.factory.ClosureColumnFactory") // .addClosureParam() // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of( // "header", NamedArgumentDescriptor.SIMPLE_ON_TOP, // "read", new NamedArgumentDescriptor.TypeCondition(type(GroovyCommonClassNames.GROOVY_LANG_CLOSURE)), // "write", new NamedArgumentDescriptor.TypeCondition(type(GroovyCommonClassNames.GROOVY_LANG_CLOSURE)), // "type", new NamedArgumentDescriptor.TypeCondition(type(CommonClassNames.JAVA_LANG_CLASS)) // )) // ); // add(method("columnModel", "javax.swing.table.TableColumnModel", "groovy.swing.factory.ColumnModelFactory", "model", "javax.swing.table.TableColumnModel", true)); // // add(acceptAllMethod("column", "javax.swing.table.TableColumn", "groovy.swing.factory.ColumnFactory") // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of("width", NamedArgumentDescriptor.SIMPLE_ON_TOP)) // ); // // // registerBasicLayouts() // beanFactory("groovy.swing.factory.LayoutFactory", "borderLayout", "java.awt.BorderLayout", true); // beanFactory("groovy.swing.factory.LayoutFactory", "cardLayout", "java.awt.CardLayout", true); // beanFactory("groovy.swing.factory.LayoutFactory", "flowLayout", "java.awt.FlowLayout", true); // beanFactory("groovy.swing.factory.LayoutFactory", "gridLayout", "java.awt.GridLayout", true); // beanFactory("groovy.swing.factory.LayoutFactory", "overlayLayout", "javax.swing.OverlayLayout", true); // beanFactory("groovy.swing.factory.LayoutFactory", "springLayout", "javax.swing.SpringLayout", true); // beanFactory("groovy.swing.factory.LayoutFactory", "gridBagLayout", "java.awt.GridBagLayout", true); // beanFactory(null, "gridBagConstraints", "java.awt.GridBagConstraints", true); // beanFactory(null, "gbc", "java.awt.GridBagConstraints", true); // // // registerBoxLayout() // add(methodWithAttr("boxLayout", "javax.swing.BoxLayout", "groovy.swing.factory.BoxLayoutFactory").addClosureParam() // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of("axis", NamedArgumentDescriptor.SIMPLE_ON_TOP)) // ); // add(method("box", "javax.swing.Box", "groovy.swing.factory.BoxFactory", "box", null, true) // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of( // "axis", new NamedArgumentDescriptor.TypeCondition(type("java.lang.Number")))) // ); // add(methodWithAttr("hbox", "javax.swing.Box", "groovy.swing.factory.HBoxFactory").addClosureParam()); // add(methodWithAttr("hglue", "java.awt.Component", "groovy.swing.factory.HGlueFactory").addClosureParam()); // add(method("hstrut", "java.awt.Component", "groovy.swing.factory.HStrutFactory", "width", "java.lang.Number", true) // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of( // "width", new NamedArgumentDescriptor.TypeCondition(type("java.lang.Number")))) // ); // add(methodWithAttr("vbox", "javax.swing.Box", "groovy.swing.factory.VBoxFactory").addClosureParam()); // add(methodWithAttr("vglue", "java.awt.Component", "groovy.swing.factory.VGlueFactory").addClosureParam()); // add(method("vstrut", "java.awt.Component", "groovy.swing.factory.VStrutFactory", "height", "java.lang.Number", true) // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of( // "height", new NamedArgumentDescriptor.TypeCondition(type("java.lang.Number")))) // ); // add(methodWithAttr("glue", "java.awt.Component", "groovy.swing.factory.GlueFactory").addClosureParam()); // add(methodWithAttr("rigidArea", "java.awt.Component", "groovy.swing.factory.RigidAreaFactory").addClosureParam() // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of( // "size", new NamedArgumentDescriptor.TypeCondition(type("java.awt.Dimension")), // "height", new NamedArgumentDescriptor.TypeCondition(type("java.lang.Number")), // "width", new NamedArgumentDescriptor.TypeCondition(type("java.lang.Number")) // )) // ); // // // registerTableLayout() // add(method("tableLayout", "groovy.swing.impl.TableLayout", "groovy.swing.factory.TableLayoutFactory", "layout", null, true)); // add(methodWithAttr("tr", "groovy.swing.impl.TableLayoutRow", "groovy.swing.factory.TRFactory").addClosureParam()); // add(methodWithAttr("td", "groovy.swing.impl.TableLayoutCell", "groovy.swing.factory.TDFactory").addClosureParam()); // // // registerBorders() // add(acceptAllMethod("lineBorder", "javax.swing.border.LineBorder", "groovy.swing.factory.LineBorderFactory") // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>of( // "parent", NamedArgumentDescriptor.SIMPLE_ON_TOP, // "color", NamedArgumentDescriptor.SIMPLE_ON_TOP, // "thickness", NamedArgumentDescriptor.SIMPLE_ON_TOP, // "roundedCorners", NamedArgumentDescriptor.SIMPLE_ON_TOP // )).setMethodKind(null) // ); // // NamedArgumentDescriptor namedArgColor = new NamedArgumentDescriptor.TypeCondition(type("java.awt.Color")); // // Map<String, NamedArgumentDescriptor> m = ImmutableMap.<String, NamedArgumentDescriptor>builder() // .put("parent", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("highlight", namedArgColor) // .put("shadow", namedArgColor) // .put("highlightOuter", namedArgColor) // .put("highlightInner", namedArgColor) // .put("shadowOuter", namedArgColor) // .put("shadowInner", namedArgColor) // .build(); // // add(acceptAllMethod("loweredBevelBorder", "javax.swing.border.Border", "groovy.swing.factory.BevelBorderFactory") // .setNamedParameters(m).setMethodKind(null)); // add(acceptAllMethod("raisedBevelBorder", "javax.swing.border.Border", "groovy.swing.factory.BevelBorderFactory") // .setNamedParameters(m).setMethodKind(null)); // // // m = ImmutableMap.of( // "parent", NamedArgumentDescriptor.SIMPLE_ON_TOP, // "highlight", namedArgColor, // "shadow", namedArgColor // ); // // add(acceptAllMethod("etchedBorder", "javax.swing.border.Border", "groovy.swing.factory.EtchedBorderFactory") // .setNamedParameters(m).setMethodKind(null)); // add(acceptAllMethod("loweredEtchedBorder", "javax.swing.border.Border", "groovy.swing.factory.EtchedBorderFactory") // .setNamedParameters(m).setMethodKind(null)); // add(acceptAllMethod("raisedEtchedBorder", "javax.swing.border.Border", "groovy.swing.factory.EtchedBorderFactory") // .setNamedParameters(m).setMethodKind(null)); // // add(acceptAllMethod("titledBorder", "javax.swing.border.TitledBorder", "groovy.swing.factory.TitledBorderFactory") // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>builder() // .put("parent", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("title", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("position", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("justification", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("border", new NamedArgumentDescriptor.TypeCondition(type("javax.swing.border.Border"))) // .put("color", namedArgColor) // .put("font", new NamedArgumentDescriptor.TypeCondition(type("java.awt.Font"))) // .build() // ).setMethodKind(null)); // // add(method("emptyBorder", "javax.swing.border.Border", "groovy.swing.factory.EmptyBorderFactory", "size", // CommonClassNames.JAVA_LANG_INTEGER, false).setMethodKind(null)); // add(method("emptyBorder", "javax.swing.border.Border", "groovy.swing.factory.EmptyBorderFactory", "sizesList", // CommonClassNames.JAVA_UTIL_LIST, false) // .setMethodKind(null)); // add(methodWithAttr("emptyBorder", "javax.swing.border.Border", "groovy.swing.factory.EmptyBorderFactory").addClosureParam() // .setNamedParameters(ImmutableMap.of( // "parent", NamedArgumentDescriptor.SIMPLE_ON_TOP, // "top", NamedArgumentDescriptor.TYPE_INTEGER, // "left", NamedArgumentDescriptor.TYPE_INTEGER, // "bottom", NamedArgumentDescriptor.TYPE_INTEGER, // "right", NamedArgumentDescriptor.TYPE_INTEGER // )) // ); // add(method("compoundBorder", "javax.swing.border.CompoundBorder", "groovy.swing.factory.CompoundBorderFactory", "value",CommonClassNames.JAVA_UTIL_LIST, false) // .setNamedParameters(ImmutableMap.of( // "parent", NamedArgumentDescriptor.SIMPLE_ON_TOP, // "inner", new NamedArgumentDescriptor.TypeCondition(type("javax.swing.border.Border")), // "outer", new NamedArgumentDescriptor.TypeCondition(type("javax.swing.border.Border")) // ))); // // add(acceptAllMethod("matteBorder", "javax.swing.border.Border", "groovy.swing.factory.MatteBorderFactory") // .setNamedParameters(ImmutableMap.<String, NamedArgumentDescriptor>builder() // .put("parent", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("icon", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("color", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("size", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("top", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("left", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("bottom", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .put("right", NamedArgumentDescriptor.SIMPLE_ON_TOP) // .build() // ).setMethodKind(null)); // // // registerRenderers() // add(methodWithAttr("tableCellRenderer", "groovy.swing.impl.ClosureRenderer", "groovy.swing.factory.RendererFactory").addClosureParam()); // add(methodWithAttr("listCellRenderer", "groovy.swing.impl.ClosureRenderer", "groovy.swing.factory.RendererFactory").addClosureParam()); // add(methodWithAttr("cellRenderer", "groovy.swing.impl.ClosureRenderer", "groovy.swing.factory.RendererFactory").addClosureParam()); // add(methodWithAttr("headerRenderer", "groovy.swing.impl.ClosureRenderer", "groovy.swing.factory.RendererFactory").addClosureParam()); // add(acceptAllMethod("onRender", CommonClassNames.JAVA_UTIL_MAP, "groovy.swing.factory.RendererUpdateFactory")); // // // registerEditors() // add(methodWithAttr("cellEditor", "groovy.swing.impl.ClosureCellEditor", "groovy.swing.factory.CellEditorFactory").addClosureParam()); // add(acceptAllMethod("editorValue", CommonClassNames.JAVA_UTIL_MAP, "groovy.swing.factory.CellEditorGetValueFactory")); // add(acceptAllMethod("prepareEditor", CommonClassNames.JAVA_UTIL_MAP, "groovy.swing.factory.CellEditorPrepareFactory")); //} } @Override public void processDynamicElements(@NotNull PsiType qualifierType, @Nullable PsiClass aClass, @NotNull PsiScopeProcessor processor, @NotNull PsiElement place, @NotNull ResolveState state) { if (aClass == null) return; if (!ResolveUtil.shouldProcessMethods(processor.getHint(ElementClassHint.KEY))) return; MultiMap<String, PsiMethod> methodMap = aClass.getUserData(KEY); if (methodMap == null) { MyBuilder builder = new MyBuilder(aClass); builder.generateMethods(); methodMap = ((UserDataHolderEx)aClass).putUserDataIfAbsent(KEY, builder.myResult); } String nameHint = ResolveUtil.getNameHint(processor); Collection<? extends PsiMethod> methods = nameHint == null ? methodMap.values() : methodMap.get(nameHint); for (PsiMethod method : methods) { if (!processor.execute(method, state)) return; } } @Override protected String getParentClassName() { return "groovy.swing.SwingBuilder"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.webresources; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.jar.Manifest; import org.apache.catalina.WebResource; import org.apache.catalina.WebResourceRoot; import org.apache.catalina.util.ResourceSet; public abstract class AbstractArchiveResourceSet extends AbstractResourceSet { private final HashMap<String,JarEntry> jarFileEntries = new HashMap<>(); private URL baseUrl; private String baseUrlString; private JarFile archive = null; private final Object archiveLock = new Object(); private long archiveUseCount = 0; protected final void setBaseUrl(URL baseUrl) { this.baseUrl = baseUrl; if (baseUrl == null) { this.baseUrlString = null; } else { this.baseUrlString = baseUrl.toString(); } } @Override public final URL getBaseUrl() { return baseUrl; } protected final String getBaseUrlString() { return baseUrlString; } protected final HashMap<String,JarEntry> getJarFileEntries() { return jarFileEntries; } @Override public final String[] list(String path) { checkPath(path); String webAppMount = getWebAppMount(); ArrayList<String> result = new ArrayList<>(); if (path.startsWith(webAppMount)) { String pathInJar = getInternalPath() + path.substring(webAppMount.length()); // Always strip off the leading '/' to get the JAR path if (pathInJar.length() > 0 && pathInJar.charAt(0) == '/') { pathInJar = pathInJar.substring(1); } Iterator<String> entries = jarFileEntries.keySet().iterator(); while (entries.hasNext()) { String name = entries.next(); if (name.length() > pathInJar.length() && name.startsWith(pathInJar)) { if (name.charAt(name.length() - 1) == '/') { name = name.substring( pathInJar.length(), name.length() - 1); } else { name = name.substring(pathInJar.length()); } if (name.length() == 0) { continue; } if (name.charAt(0) == '/') { name = name.substring(1); } if (name.length() > 0 && name.lastIndexOf('/') == -1) { result.add(name); } } } } else { if (!path.endsWith("/")) { path = path + "/"; } if (webAppMount.startsWith(path)) { int i = webAppMount.indexOf('/', path.length()); if (i == -1) { return new String[] {webAppMount.substring(path.length())}; } else { return new String[] { webAppMount.substring(path.length(), i)}; } } } return result.toArray(new String[result.size()]); } @Override public final Set<String> listWebAppPaths(String path) { checkPath(path); String webAppMount = getWebAppMount(); ResourceSet<String> result = new ResourceSet<>(); if (path.startsWith(webAppMount)) { String pathInJar = getInternalPath() + path.substring(webAppMount.length()); // Always strip off the leading '/' to get the JAR path and make // sure it ends in '/' if (pathInJar.length() > 0) { if (pathInJar.charAt(pathInJar.length() - 1) != '/') { pathInJar = pathInJar.substring(1) + '/'; } if (pathInJar.charAt(0) == '/') { pathInJar = pathInJar.substring(1); } } Iterator<String> entries = jarFileEntries.keySet().iterator(); while (entries.hasNext()) { String name = entries.next(); if (name.length() > pathInJar.length() && name.startsWith(pathInJar)) { int nextSlash = name.indexOf('/', pathInJar.length()); if (nextSlash == -1 || nextSlash == name.length() - 1) { if (name.startsWith(pathInJar)) { result.add(webAppMount + '/' + name.substring(getInternalPath().length())); } } } } } else { if (!path.endsWith("/")) { path = path + "/"; } if (webAppMount.startsWith(path)) { int i = webAppMount.indexOf('/', path.length()); if (i == -1) { result.add(webAppMount + "/"); } else { result.add(webAppMount.substring(0, i + 1)); } } } result.setLocked(true); return result; } @Override public final boolean mkdir(String path) { checkPath(path); return false; } @Override public final boolean write(String path, InputStream is, boolean overwrite) { checkPath(path); if (is == null) { throw new NullPointerException( sm.getString("dirResourceSet.writeNpe")); } return false; } @Override public final WebResource getResource(String path) { checkPath(path); String webAppMount = getWebAppMount(); WebResourceRoot root = getRoot(); /* * Implementation notes * * The path parameter passed into this method always starts with '/'. * * The path parameter passed into this method may or may not end with a * '/'. JarFile.getEntry() will return a matching directory entry * whether or not the name ends in a '/'. However, if the entry is * requested without the '/' subsequent calls to JarEntry.isDirectory() * will return false. * * Paths in JARs never start with '/'. Leading '/' need to be removed * before any JarFile.getEntry() call. */ // If the JAR has been mounted below the web application root, return // an empty resource for requests outside of the mount point. if (path.startsWith(webAppMount)) { String pathInJar = getInternalPath() + path.substring( webAppMount.length(), path.length()); // Always strip off the leading '/' to get the JAR path if (pathInJar.length() > 0 && pathInJar.charAt(0) == '/') { pathInJar = pathInJar.substring(1); } if (pathInJar.equals("")) { // Special case // This is a directory resource so the path must end with / if (!path.endsWith("/")) { path = path + "/"; } return new JarResourceRoot(root, new File(getBase()), baseUrlString, path); } else { JarEntry jarEntry = null; if (!(pathInJar.charAt(pathInJar.length() - 1) == '/')) { jarEntry = jarFileEntries.get(pathInJar + '/'); if (jarEntry != null) { path = path + '/'; } } if (jarEntry == null) { jarEntry = jarFileEntries.get(pathInJar); } if (jarEntry == null) { return new EmptyResource(root, path); } else { return createArchiveResource(jarEntry, path, getManifest()); } } } else { return new EmptyResource(root, path); } } protected abstract WebResource createArchiveResource(JarEntry jarEntry, String webAppPath, Manifest manifest); @Override public final boolean isReadOnly() { return true; } @Override public void setReadOnly(boolean readOnly) { if (readOnly) { // This is the hard-coded default - ignore the call return; } throw new IllegalArgumentException( sm.getString("abstractArchiveResourceSet.setReadOnlyFalse")); } protected JarFile openJarFile() throws IOException { synchronized (archiveLock) { if (archive == null) { archive = new JarFile(getBase()); } archiveUseCount++; return archive; } } protected void closeJarFile() { synchronized (archiveLock) { archiveUseCount--; } } @Override public void gc() { synchronized (archiveLock) { if (archive != null && archiveUseCount == 0) { try { archive.close(); } catch (IOException e) { // Log at least WARN } archive = null; } } } }
package view_and_controller; import model.*; import java.awt.*; import java.awt.event.*; import java.util.Observable; import java.util.Observer; import javax.swing.*; import javax.swing.border.LineBorder; import org.jfree.chart.*; import org.jfree.chart.axis.NumberAxis; import org.jfree.chart.plot.XYPlot; import org.jfree.data.general.*; @SuppressWarnings("serial") public class TabPanel extends JPanel implements SeriesChangeListener, Observer, MouseListener { /** The standard fonts used within this panel */ private static final String STD_FONT_STR = "Planer"; /** The chart data associated with this Tab */ private ElectricalDataModel dataModel; /** The chart panel displayed within this Tab */ private ChartPanel miniChartPanel; /** Contains the tab title and the data concerning the tabs associated electrical data type */ private JPanel infoPanel; /** Displays the Mean, Min and Max value concerning the electrical data type */ private JPanel statisticsPanel; /** The Electrical Data Type the tab panel represents */ private ElectricalDataTypes elecDataType; /** The color of the Tab */ private Color backColor; /** Font size of the data statistics */ private int fontSize; /** * * @param monitorPanel register as a mouse click listener on the chart panel * @param dataModel used for the chart within the tab and the tab statistics * @param elecDataType the electrical type of the tab */ public TabPanel(MonitorPanel monitorPanel, ElectricalDataModel dataModel, ElectricalDataTypes elecDataType) { fontSize = 14; backColor = Color.WHITE; // Store the dataModel and the Electrical Data Type of the model this.dataModel = dataModel; this.elecDataType = elecDataType; // The title of the tab e.g. Power, Current String tabTitle = dataModel.getElectricalDataTitle(); // Initialise the chat content miniChartPanel = createMiniGraphPanel(monitorPanel, dataModel); JPanel infoPanel = createInfoPanel(tabTitle); // Chart panel to the left of the title and statistics setLayout(new GridLayout(1,2)); add(miniChartPanel); add(infoPanel); // Panel listens for change in power series data dataModel.addDataCollectionListener(this); addMouseListener(this); } /** * Initialise the text part of the tab * @param title of the tab * @return panel containing the text */ private JPanel createInfoPanel(String title) { JLabel titleLbl = new JLabel(title); // stores the title on top of the data infoPanel = new JPanel(new GridLayout(2,1)); // background color is initially white infoPanel.setBackground(backColor); // title titleLbl.setFont(new Font(STD_FONT_STR, Font.PLAIN, 28)); titleLbl.setHorizontalAlignment(SwingConstants.CENTER); infoPanel.add(titleLbl); // Initialise the data panel with the three statistics updateTabInfo(); return infoPanel; } /** * Creates a new tab with the updated statistical information, * old tab is then swapped out for the freshly created tab. */ private void updateTabInfo() { JPanel statPanel; JPanel newStatisticsPanel = new JPanel(new GridLayout(3,1)); // Add the mean, opaqueness ensures every pixel in the label is painted statPanel = createStatisticPanel("Mean"); newStatisticsPanel.add(statPanel); // Add the min statPanel = createStatisticPanel("Min"); newStatisticsPanel.add(statPanel); // Add the max statPanel = createStatisticPanel("Max"); newStatisticsPanel.add(statPanel); if(statisticsPanel != null) infoPanel.remove(statisticsPanel); infoPanel.add(newStatisticsPanel); statisticsPanel = newStatisticsPanel; infoPanel.revalidate(); } private JPanel createStatisticPanel(String title) { JPanel statPanel = new JPanel(new GridLayout(1,3)); Font font = new Font(STD_FONT_STR, Font.PLAIN, fontSize); JLabel statTitle; if(title.length() < 4) statTitle = new JLabel(title + " "); else statTitle = new JLabel(title); JLabel statValu = new JLabel(dataModel.getFormattedStatistics().get(title).getDataStr()); JLabel statUnit = new JLabel(dataModel.getFormattedStatistics().get(title).getUnitStr() +" "); statUnit.setHorizontalAlignment(SwingConstants.RIGHT); statTitle.setFont(font); statValu.setFont(font); statUnit.setFont(font); statTitle.setOpaque(false); statValu.setOpaque(false); statUnit.setOpaque(false); statPanel.setBackground(backColor); statPanel.setOpaque(true); statPanel.add(statTitle); statPanel.add(statValu); statPanel.add(statUnit); return statPanel; } /** * * @param monitorPanel registered as a listener for clicks on the tab chart * @param dataModel contains the chart panel for this tab * @return the initialised mini-chart panel */ private ChartPanel createMiniGraphPanel(MonitorPanel monitorPanel, ElectricalDataModel dataModel) { ChartPanel chartPanel; Color lineColor = null; // generate the graph JFreeChart lineChart = ChartFactory.createTimeSeriesChart( "", // Title "", // x-axis Label "", // y-axis Label dataModel.getDataCollection(), // data false, // show legend? false, // use tooltips? false ); // generate URLs? XYPlot plot = lineChart.getXYPlot(); //XYItemRenderer renderer = plot.getRenderer(); MyXYAreaRenderer renderer = new MyXYAreaRenderer(); plot.setRenderer(renderer); // initialise the plot settings plot.setBackgroundPaint(Color.WHITE); plot.setDomainGridlinePaint(Color.GRAY); plot.setRangeGridlinePaint(Color.GRAY); // hide the x and y axis plot.getDomainAxis().setVisible(false); plot.getRangeAxis().setVisible(false); plot.getDomainAxis().setLowerMargin(0.0); plot.getDomainAxis().setUpperMargin(0.0); ((NumberAxis)plot.getRangeAxis()).setAutoRangeIncludesZero(true); // increase the thickness of the graph line lineColor = dataModel.getElectricalColour(); renderer.setSeriesOutlinePaint(0, lineColor); renderer.setBaseOutlineStroke(new BasicStroke(1.0f)); renderer.setSeriesFillPaint(0, new Color(lineColor.getRed(), lineColor.getGreen(), lineColor.getBlue(), 50)); renderer.setOutline(true); // add the graph to the panel chartPanel = new ChartPanel(lineChart); chartPanel.addMouseListener(monitorPanel); // disable the chart options and zoom for side tab charts chartPanel.setPopupMenu(null); chartPanel.setDomainZoomable(false); chartPanel.setRangeZoomable(false); return chartPanel; } /* Get and Set */ /** * Used by the monitor panel to determine which electrical data type is currently * being displayed in the main panel. * @return */ public ElectricalDataTypes getElecDataType() { return elecDataType; } /** * Used by the MonitorPanel to set the tab panel background color based on whether it is selected, * the mouse is hovering over the tab, or it is NOT selected. * @param color */ public void setTabBackground(Color color) { backColor = color; // background color of the chart miniChartPanel.getChart().setBackgroundPaint(color); infoPanel.setBackground(color); for(Component comp : statisticsPanel.getComponents()) { comp.setBackground(color); } if(color.getAlpha()+50 <= 255) setBorder(new LineBorder( new Color(color.getRed(),color.getGreen(), color.getGreen(), (int) (color.getAlpha()+100)),2)); else setBorder(new LineBorder(color,2)); } /* Listeners */ @Override public void update(Observable o, Object arg) { if(o instanceof ObTimeSeriesCollection) { // A channel has been added or removed from the model // Set the default statistics listeners to the first channel dataModel.addSeriesChangeListener(this, 0); } } @Override public void seriesChanged(SeriesChangeEvent arg0) { updateTabInfo(); } /* Unused mouse listener methods*/ @Override public void mouseClicked(MouseEvent arg0) {} @Override public void mouseEntered(MouseEvent e) {} @Override public void mouseExited(MouseEvent arg0) {} @Override public void mousePressed(MouseEvent arg0) {} @Override public void mouseReleased(MouseEvent arg0) {} }
/* * Copyright 2014 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.executor; import azkaban.jobExecutor.ProcessJob; import azkaban.utils.Props; import java.io.BufferedOutputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Properties; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Layout; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; public class JavaJobRunnerMain { public static final String JOB_CLASS = "job.class"; public static final String DEFAULT_RUN_METHOD = "run"; public static final String DEFAULT_CANCEL_METHOD = "cancel"; // This is the Job interface method to get the properties generated by the // job. public static final String GET_GENERATED_PROPERTIES_METHOD = "getJobGeneratedProperties"; public static final String CANCEL_METHOD_PARAM = "method.cancel"; public static final String RUN_METHOD_PARAM = "method.run"; public static final String[] PROPS_CLASSES = new String[]{ "azkaban.utils.Props", "azkaban.common.utils.Props"}; private static final Layout DEFAULT_LAYOUT = new PatternLayout("%p %m\n"); public final Logger _logger; public String _cancelMethod; public String _jobName; public Object _javaObject; private boolean _isFinished = false; public JavaJobRunnerMain() throws Exception { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { cancelJob(); } }); try { this._jobName = System.getenv(ProcessJob.JOB_NAME_ENV); final String propsFile = System.getenv(ProcessJob.JOB_PROP_ENV); this._logger = Logger.getRootLogger(); this._logger.removeAllAppenders(); final ConsoleAppender appender = new ConsoleAppender(DEFAULT_LAYOUT); appender.activateOptions(); this._logger.addAppender(appender); final Properties prop = new Properties(); prop.load(Files.newBufferedReader(Paths.get(propsFile), StandardCharsets.UTF_8)); this._logger.info("Running job " + this._jobName); final String className = prop.getProperty(JOB_CLASS); if (className == null) { throw new Exception("Class name is not set."); } this._logger.info("Class name " + className); // Create the object using proxy this._javaObject = getObject(this._jobName, className, prop, this._logger); if (this._javaObject == null) { this._logger.info("Could not create java object to run job: " + className); throw new Exception("Could not create running object"); } this._cancelMethod = prop.getProperty(CANCEL_METHOD_PARAM, DEFAULT_CANCEL_METHOD); final String runMethod = prop.getProperty(RUN_METHOD_PARAM, DEFAULT_RUN_METHOD); this._logger.info("Invoking method " + runMethod); this._logger.info("Proxy check failed, not proxying run."); runMethod(this._javaObject, runMethod); this._isFinished = true; // Get the generated properties and store them to disk, to be read // by ProcessJob. try { final Method generatedPropertiesMethod = this._javaObject.getClass().getMethod(GET_GENERATED_PROPERTIES_METHOD, new Class<?>[]{}); final Object outputGendProps = generatedPropertiesMethod.invoke(this._javaObject, new Object[]{}); if (outputGendProps != null) { final Method toPropertiesMethod = outputGendProps.getClass().getMethod("toProperties", new Class<?>[]{}); final Properties properties = (Properties) toPropertiesMethod.invoke(outputGendProps, new Object[]{}); final Props outputProps = new Props(null, properties); outputGeneratedProperties(outputProps); } else { outputGeneratedProperties(new Props()); } } catch (final NoSuchMethodException e) { this._logger .info(String .format( "Apparently there isn't a method[%s] on object[%s], using empty Props object instead.", GET_GENERATED_PROPERTIES_METHOD, this._javaObject)); outputGeneratedProperties(new Props()); } } catch (final Exception e) { this._isFinished = true; throw e; } } public static void main(final String[] args) throws Exception { final JavaJobRunnerMain wrapper = new JavaJobRunnerMain(); } private static Object getObject(final String jobName, final String className, final Properties properties, final Logger logger) throws Exception { final Class<?> runningClass = JavaJobRunnerMain.class.getClassLoader().loadClass(className); if (runningClass == null) { throw new Exception("Class " + className + " was not found. Cannot run job."); } Class<?> propsClass = null; for (final String propClassName : PROPS_CLASSES) { try { propsClass = JavaJobRunnerMain.class.getClassLoader().loadClass(propClassName); } catch (final ClassNotFoundException e) { } if (propsClass != null && getConstructor(runningClass, String.class, propsClass) != null) { // is this the props class break; } propsClass = null; } Object obj = null; if (propsClass != null && getConstructor(runningClass, String.class, propsClass) != null) { // Create props class final Constructor<?> propsCon = getConstructor(propsClass, propsClass, Properties[].class); final Object props = propsCon.newInstance(null, new Properties[]{properties}); final Constructor<?> con = getConstructor(runningClass, String.class, propsClass); logger.info("Constructor found " + con.toGenericString()); obj = con.newInstance(jobName, props); } else if (getConstructor(runningClass, String.class, Properties.class) != null) { final Constructor<?> con = getConstructor(runningClass, String.class, Properties.class); logger.info("Constructor found " + con.toGenericString()); obj = con.newInstance(jobName, properties); } else if (getConstructor(runningClass, String.class, Map.class) != null) { final Constructor<?> con = getConstructor(runningClass, String.class, Map.class); logger.info("Constructor found " + con.toGenericString()); final HashMap<Object, Object> map = new HashMap<>(); for (final Map.Entry<Object, Object> entry : properties.entrySet()) { map.put(entry.getKey(), entry.getValue()); } obj = con.newInstance(jobName, map); } else if (getConstructor(runningClass, String.class) != null) { final Constructor<?> con = getConstructor(runningClass, String.class); logger.info("Constructor found " + con.toGenericString()); obj = con.newInstance(jobName); } else if (getConstructor(runningClass) != null) { final Constructor<?> con = getConstructor(runningClass); logger.info("Constructor found " + con.toGenericString()); obj = con.newInstance(); } else { logger.error("Constructor not found. Listing available Constructors."); for (final Constructor<?> c : runningClass.getConstructors()) { logger.info(c.toGenericString()); } } return obj; } private static Constructor<?> getConstructor(final Class<?> c, final Class<?>... args) { try { final Constructor<?> cons = c.getConstructor(args); return cons; } catch (final NoSuchMethodException e) { return null; } } private void runMethod(final Object obj, final String runMethod) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { obj.getClass().getMethod(runMethod, new Class<?>[]{}).invoke(obj); } private void outputGeneratedProperties(final Props outputProperties) { if (outputProperties == null) { this._logger.info(" no gend props"); return; } for (final String key : outputProperties.getKeySet()) { this._logger .info(" gend prop " + key + " value:" + outputProperties.get(key)); } final String outputFileStr = System.getenv(ProcessJob.JOB_OUTPUT_PROP_FILE); if (outputFileStr == null) { return; } this._logger.info("Outputting generated properties to " + outputFileStr); final Map<String, String> properties = new LinkedHashMap<>(); for (final String key : outputProperties.getKeySet()) { properties.put(key, outputProperties.get(key)); } OutputStream writer = null; try { writer = new BufferedOutputStream(new FileOutputStream(outputFileStr)); // Manually serialize into JSON instead of adding org.json to // external classpath. Reduces one dependency for something that's // essentially easy. writer.write("{\n".getBytes(StandardCharsets.UTF_8)); for (final Map.Entry<String, String> entry : properties.entrySet()) { writer.write(String.format(" \"%s\":\"%s\",\n", entry.getKey().replace("\"", "\\\\\""), entry.getValue().replace("\"", "\\\\\"")).getBytes(StandardCharsets.UTF_8)); } writer.write("}".getBytes(StandardCharsets.UTF_8)); } catch (final Exception e) { throw new RuntimeException("Unable to store output properties to: " + outputFileStr); } finally { try { if (writer != null) { writer.close(); } } catch (final IOException e) { } } } public void cancelJob() { if (this._isFinished) { return; } this._logger.info("Attempting to call cancel on this job"); if (this._javaObject != null) { Method method = null; try { method = this._javaObject.getClass().getMethod(this._cancelMethod); } catch (final SecurityException e) { } catch (final NoSuchMethodException e) { } if (method != null) { try { method.invoke(this._javaObject); } catch (final Exception e) { if (this._logger != null) { this._logger.error("Cancel method failed! ", e); } } } else { throw new RuntimeException("Job " + this._jobName + " does not have cancel method " + this._cancelMethod); } } } }
package com.thinkaurelius.titan.diskstorage; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.nio.ByteBuffer; import java.util.*; import java.util.concurrent.*; import com.google.common.collect.ImmutableList; import com.thinkaurelius.titan.diskstorage.keycolumnvalue.ttl.TTLKVCSManager; import com.thinkaurelius.titan.diskstorage.util.*; import com.thinkaurelius.titan.testcategory.BrittleTests; import com.thinkaurelius.titan.testutil.TestGraphConfigs; import org.junit.*; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.thinkaurelius.titan.diskstorage.keycolumnvalue.*; import com.thinkaurelius.titan.testcategory.OrderedKeyStoreTests; import com.thinkaurelius.titan.testcategory.UnorderedKeyStoreTests; import com.thinkaurelius.titan.testutil.RandomGenerator; public abstract class KeyColumnValueStoreTest extends AbstractKCVSTest { @Rule public TestName name = new TestName(); private Logger log = LoggerFactory.getLogger(KeyColumnValueStoreTest.class); int numKeys = 500; int numColumns = 50; protected String storeName = "testStore1"; public KeyColumnValueStoreManager manager; public StoreTransaction tx; public KeyColumnValueStore store; @Before public void setUp() throws Exception { StoreManager m = openStorageManager(); m.clearStorage(); m.close(); open(); } public abstract KeyColumnValueStoreManager openStorageManager() throws BackendException; public void open() throws BackendException { manager = openStorageManager(); store = manager.openDatabase(storeName); tx = startTx(); } public StoreTransaction startTx() throws BackendException { return manager.beginTransaction(getTxConfig()); } public StoreFeatures storeFeatures() { return manager.getFeatures(); } public void clopen() throws BackendException { close(); open(); } @After public void tearDown() throws Exception { close(); } public void close() throws BackendException { if (tx != null) tx.commit(); if (null != store) store.close(); if (null != manager) manager.close(); } public void newTx() throws BackendException { if (tx!=null) tx.commit(); tx = startTx(); } @Test public void createDatabase() { //Just setup and shutdown } public String[][] generateValues() { return KeyValueStoreUtil.generateData(numKeys, numColumns); } public void loadValues(String[][] values) throws BackendException { loadValues(store,values); } public void loadValues(KeyColumnValueStore store, String[][] values) throws BackendException { loadValues(store, values, -1, -1); } public void loadValues(String[][] values, int shiftEveryNthRow, int shiftSliceLength) throws BackendException { loadValues(store, values, shiftEveryNthRow, shiftSliceLength); } public void loadValues(KeyColumnValueStore store, String[][] values, int shiftEveryNthRow, int shiftSliceLength) throws BackendException { for (int i = 0; i < values.length; i++) { List<Entry> entries = new ArrayList<Entry>(); for (int j = 0; j < values[i].length; j++) { StaticBuffer col; if (0 < shiftEveryNthRow && 0 == i/* +1 */ % shiftEveryNthRow) { ByteBuffer bb = ByteBuffer.allocate(shiftSliceLength + 9); for (int s = 0; s < shiftSliceLength; s++) { bb.put((byte) -1); } bb.put(KeyValueStoreUtil.getBuffer(j + 1).asByteBuffer()); bb.flip(); col = StaticArrayBuffer.of(bb); // col = KeyValueStoreUtil.getBuffer(j + values[i].length + // 100); } else { col = KeyValueStoreUtil.getBuffer(j); } entries.add(StaticArrayEntry.of(col, KeyValueStoreUtil .getBuffer(values[i][j]))); } if (!entries.isEmpty()) { store.mutate(KeyValueStoreUtil.getBuffer(i), entries, KeyColumnValueStore.NO_DELETIONS, tx); } } } /** * Load a bunch of key-column-values in a way that vaguely resembles a lower * triangular matrix. * <p/> * Iterate over key values {@code k} in the half-open long interval * {@code [offset, offset + dimension -1)}. For each {@code k}, iterate over * the column values {@code c} in the half-open integer interval * {@code [offset, k]}. * <p/> * For each key-column coordinate specified by a {@code (k, c} pair in the *iteration, write a value one byte long with all bits set (unsigned -1 or *signed 255). * * @param dimension size of loaded data (must be positive) * @param offset offset (must be positive) * @throws StorageException unexpected failure */ public void loadLowerTriangularValues(int dimension, int offset) throws BackendException { Preconditions.checkArgument(0 < dimension); ByteBuffer val = ByteBuffer.allocate(1); val.put((byte) -1); StaticBuffer staticVal = StaticArrayBuffer.of(val); List<Entry> rowAdditions = new ArrayList<Entry>(dimension); for (int k = 0; k < dimension; k++) { rowAdditions.clear(); ByteBuffer key = ByteBuffer.allocate(8); key.putInt(0); key.putInt(k + offset); key.flip(); StaticBuffer staticKey = StaticArrayBuffer.of(key); for (int c = 0; c <= k; c++) { ByteBuffer col = ByteBuffer.allocate(4); col.putInt(c + offset); col.flip(); StaticBuffer staticCol = StaticArrayBuffer.of(col); rowAdditions.add(StaticArrayEntry.of(staticCol, staticVal)); } store.mutate(staticKey, rowAdditions, Collections.<StaticBuffer>emptyList(), tx); } } public Set<KeyColumn> deleteValues(int every) throws BackendException { Set<KeyColumn> removed = new HashSet<KeyColumn>(); int counter = 0; for (int i = 0; i < numKeys; i++) { List<StaticBuffer> deletions = new ArrayList<StaticBuffer>(); for (int j = 0; j < numColumns; j++) { counter++; if (counter % every == 0) { //remove removed.add(new KeyColumn(i, j)); deletions.add(KeyValueStoreUtil.getBuffer(j)); } } store.mutate(KeyValueStoreUtil.getBuffer(i), KeyColumnValueStore.NO_ADDITIONS, deletions, tx); } return removed; } public Set<Integer> deleteKeys(int every) throws BackendException { Set<Integer> removed = new HashSet<Integer>(); for (int i = 0; i < numKeys; i++) { if (i % every == 0) { removed.add(i); List<StaticBuffer> deletions = new ArrayList<StaticBuffer>(); for (int j = 0; j < numColumns; j++) { deletions.add(KeyValueStoreUtil.getBuffer(j)); } store.mutate(KeyValueStoreUtil.getBuffer(i), KeyColumnValueStore.NO_ADDITIONS, deletions, tx); } } return removed; } public void checkKeys(Set<Integer> removed) throws BackendException { for (int i = 0; i < numKeys; i++) { if (removed.contains(i)) { Assert.assertFalse(KCVSUtil.containsKey(store, KeyValueStoreUtil.getBuffer(i), tx)); } else { Assert.assertTrue(KCVSUtil.containsKey(store, KeyValueStoreUtil.getBuffer(i), tx)); } } } public void checkValueExistence(String[][] values) throws BackendException { checkValueExistence(values, new HashSet<KeyColumn>()); } public void checkValueExistence(String[][] values, Set<KeyColumn> removed) throws BackendException { for (int i = 0; i < numKeys; i++) { for (int j = 0; j < numColumns; j++) { boolean result = KCVSUtil.containsKeyColumn(store, KeyValueStoreUtil.getBuffer(i), KeyValueStoreUtil.getBuffer(j), tx); if (removed.contains(new KeyColumn(i, j))) { Assert.assertFalse(result); } else { Assert.assertTrue(result); } } } } public void checkValues(String[][] values) throws BackendException { checkValues(values, new HashSet<KeyColumn>()); } public void checkValues(String[][] values, Set<KeyColumn> removed) throws BackendException { for (int i = 0; i < numKeys; i++) { for (int j = 0; j < numColumns; j++) { StaticBuffer result = KCVSUtil.get(store, KeyValueStoreUtil.getBuffer(i), KeyValueStoreUtil.getBuffer(j), tx); if (removed.contains(new KeyColumn(i, j))) { Assert.assertNull(result); } else { Assert.assertEquals(values[i][j], KeyValueStoreUtil.getString(result)); } } } } @Test public void storeAndRetrieve() throws BackendException { String[][] values = generateValues(); log.debug("Loading values..."); loadValues(values); //print(values); log.debug("Checking values..."); checkValueExistence(values); checkValues(values); } //@Test public void compareStores() throws BackendException { int keys = 1000, columns = 2000; boolean normalMode=true; String[][] values = new String[keys*2][]; for (int i = 0; i < keys*2; i++) { if(i%2==0) { if (normalMode) { values[i]=new String[columns + 4]; } else { values[i]=new String[4]; } } else { if (normalMode) { values[i]=new String[0]; } else { values[i]=new String[columns]; } } for (int j = 0; j < values[i].length; j++) { values[i][j] = RandomGenerator.randomString(30,35); } } log.debug("Loading values: " + keys + "x" + columns); long time = System.currentTimeMillis(); loadValues(values); clopen(); System.out.println("Loading time (ms): " + (System.currentTimeMillis() - time)); //print(values); Random r = new Random(); int trials = 500; log.debug("Reading values: " + trials + " trials"); for (int i=0; i<10;i++) { time = System.currentTimeMillis(); for (int t = 0; t < trials; t++) { int key = r.nextInt(keys)*2; assertEquals(2,store.getSlice(new KeySliceQuery(KeyValueStoreUtil.getBuffer(key), KeyValueStoreUtil.getBuffer(2002), KeyValueStoreUtil.getBuffer(2004)), tx).size()); } System.out.println("Reading time (ms): " + (System.currentTimeMillis() - time)); } } @Test public void storeAndRetrievePerformance() throws BackendException { int multiplier = 4; int keys = 50 * multiplier, columns = 200; String[][] values = KeyValueStoreUtil.generateData(keys, columns); log.debug("Loading values: " + keys + "x" + columns); long time = System.currentTimeMillis(); loadValues(values); clopen(); System.out.println("Loading time (ms): " + (System.currentTimeMillis() - time)); //print(values); Random r = new Random(); int trials = 500 * multiplier; int delta = 10; log.debug("Reading values: " + trials + " trials"); for (int i=0; i<1;i++) { time = System.currentTimeMillis(); for (int t = 0; t < trials; t++) { int key = r.nextInt(keys); int start = r.nextInt(columns - delta); store.getSlice(new KeySliceQuery(KeyValueStoreUtil.getBuffer(key), KeyValueStoreUtil.getBuffer(start), KeyValueStoreUtil.getBuffer(start + delta)), tx); } //multiQuery version // List<StaticBuffer> keylist = new ArrayList<StaticBuffer>(); // for (int t = 0; t < trials; t++) keylist.add(KeyValueStoreUtil.getBuffer(r.nextInt(keys))); // int start = r.nextInt(columns - delta); // store.getSlice(keylist, new SliceQuery(KeyValueStoreUtil.getBuffer(start), KeyValueStoreUtil.getBuffer(start + delta)), tx); System.out.println("Reading time (ms): " + (System.currentTimeMillis() - time)); } } @Test public void storeAndRetrieveWithClosing() throws BackendException { String[][] values = generateValues(); log.debug("Loading values..."); loadValues(values); clopen(); log.debug("Checking values..."); checkValueExistence(values); checkValues(values); } @Test public void deleteColumnsTest1() throws BackendException { String[][] values = generateValues(); log.debug("Loading values..."); loadValues(values); clopen(); Set<KeyColumn> deleted = deleteValues(7); log.debug("Checking values..."); checkValueExistence(values, deleted); checkValues(values, deleted); } @Test public void deleteColumnsTest2() throws BackendException { String[][] values = generateValues(); log.debug("Loading values..."); loadValues(values); newTx(); Set<KeyColumn> deleted = deleteValues(7); clopen(); log.debug("Checking values..."); checkValueExistence(values, deleted); checkValues(values, deleted); } @Test public void deleteKeys() throws BackendException { String[][] values = generateValues(); log.debug("Loading values..."); loadValues(values); newTx(); Set<Integer> deleted = deleteKeys(11); clopen(); checkKeys(deleted); } /** * Loads a block of data where keys are longs on [idOffset, idOffset + * numKeys) and the columns are longs on [idOffset, idOffset + numColumns). * {@code idOffset} is {@link KeyValueStoreUtil#idOffset}. Note that * identical columns appear on every key. The loaded values are randomly * generated strings converted to bytes. * <p/> * Calls the store's supported {@code getKeys} method depending on whether * it supports ordered or unordered scan. This logic is delegated to * {@link KCVSUtil#getKeys(KeyColumnValueStore, StoreFeatures, int, int, StoreTransaction)} * . That method uses all-zero and all-one buffers for the key and column * limits and retrieves every key. * <p/> * This method does nothing and returns immediately if the store supports no * scans. */ @Test public void scanTest() throws BackendException { if (manager.getFeatures().hasScan()) { String[][] values = generateValues(); loadValues(values); KeyIterator iterator0 = KCVSUtil.getKeys(store, storeFeatures(), 8, 4, tx); verifyIterator(iterator0,numKeys,1); clopen(); KeyIterator iterator1 = KCVSUtil.getKeys(store, storeFeatures(), 8, 4, tx); KeyIterator iterator2 = KCVSUtil.getKeys(store, storeFeatures(), 8, 4, tx); // The idea is to open an iterator without using it // to make sure that closing a transaction will clean it up. // (important for BerkeleyJE where leaving cursors open causes exceptions) @SuppressWarnings("unused") KeyIterator iterator3 = KCVSUtil.getKeys(store, storeFeatures(), 8, 4, tx); verifyIterator(iterator1,numKeys,1); verifyIterator(iterator2,numKeys,1); } } private void verifyIterator(KeyIterator iter, int expectedKeys, int exepctedCols) { int keys = 0; while (iter.hasNext()) { StaticBuffer b = iter.next(); assertTrue(b!=null && b.length()>0); keys++; RecordIterator<Entry> entries = iter.getEntries(); int cols = 0; while (entries.hasNext()) { Entry e = entries.next(); assertTrue(e!=null && e.length()>0); cols++; } assertEquals(exepctedCols,cols); } assertEquals(expectedKeys,keys); } /** * Verify that * {@link KeyColumnValueStore#getKeys(KeyRangeQuery, StoreTransaction)} * treats the lower key bound as inclusive and the upper key bound as * exclusive. Verify that keys less than the start and greater than or equal * to the end containing matching columns are not returned. * * @throws BackendException */ @Test @Category({OrderedKeyStoreTests.class}) public void testOrderedGetKeysRespectsKeyLimit() throws BackendException { if (!manager.getFeatures().hasOrderedScan()) { log.warn("Can't test key-ordered features on incompatible store. " + "This warning could indicate reduced test coverage and " + "a broken JUnit configuration. Skipping test {}.", name.getMethodName()); return; } Preconditions.checkArgument(4 <= numKeys); Preconditions.checkArgument(4 <= numColumns); final long minKey = KeyValueStoreUtil.idOffset + 1; final long maxKey = KeyValueStoreUtil.idOffset + numKeys - 2; final long expectedKeyCount = maxKey - minKey; String[][] values = generateValues(); loadValues(values); final SliceQuery columnSlice = new SliceQuery(BufferUtil.zeroBuffer(8), BufferUtil.oneBuffer(8)).setLimit(1); KeyIterator keys; keys = store.getKeys(new KeyRangeQuery(BufferUtil.getLongBuffer(minKey), BufferUtil.getLongBuffer(maxKey), columnSlice), tx); assertEquals(expectedKeyCount, KeyValueStoreUtil.count(keys)); clopen(); keys = store.getKeys(new KeyRangeQuery(BufferUtil.getLongBuffer(minKey), BufferUtil.getLongBuffer(maxKey), columnSlice), tx); assertEquals(expectedKeyCount, KeyValueStoreUtil.count(keys)); } /** * Check that {@code getKeys} methods respect column slice bounds. Uses * nearly the same data as {@link #testOrderedGetKeysRespectsKeyLimit()}, * except that all columns on every 10th row exceed the {@code getKeys} * slice limit. * <p/> * For each row in this test, either all columns match the slice bounds or * all columns fall outside the slice bounds. For this reason, it could be * described as a "coarse-grained" or "simple" test of {@code getKeys}'s * column bounds checking. * * @throws BackendException */ @Test public void testGetKeysColumnSlicesSimple() throws BackendException { if (manager.getFeatures().hasScan()) { final int shiftEveryNthRows = 10; final int expectedKeyCount = numKeys / shiftEveryNthRows * (shiftEveryNthRows - 1); Preconditions.checkArgument(0 == numKeys % shiftEveryNthRows); Preconditions.checkArgument(10 < numKeys / shiftEveryNthRows); String[][] values = generateValues(); loadValues(values, shiftEveryNthRows, 4); RecordIterator<StaticBuffer> i; i = KCVSUtil.getKeys(store, storeFeatures(), 8, 4, tx); Assert.assertEquals(expectedKeyCount, KeyValueStoreUtil.count(i)); clopen(); i = KCVSUtil.getKeys(store, storeFeatures(), 8, 4, tx); Assert.assertEquals(expectedKeyCount, KeyValueStoreUtil.count(i)); } } /** * Test {@code getKeys} with columns slice values chosen to trigger * potential fencepost bugs. * <p/> * Description of data generated for and queried by this test: * <p/> * Generate a sequence of keys as unsigned integers, starting at zero. Each * row has as many columns as the key value. The columns are generated in * the same way as the keys. This results in a sort of "lower triangular" * data space, with no values above the diagonal. * * @throws BackendException shouldn't happen * @throws IOException shouldn't happen */ @Test public void testGetKeysColumnSlicesOnLowerTriangular() throws BackendException, IOException { if (manager.getFeatures().hasScan()) { final int offset = 10; final int size = 10; final int midpoint = size / 2 + offset; final int upper = offset + size; final int step = 1; Preconditions.checkArgument(0 == size % 2); Preconditions.checkArgument(0 == offset % 2); Preconditions.checkArgument(4 <= size); Preconditions.checkArgument(1 <= offset); loadLowerTriangularValues(size, offset); boolean executed = false; if (manager.getFeatures().hasUnorderedScan()) { Collection<StaticBuffer> expected = new HashSet<StaticBuffer>(size); for (int start = midpoint; start >= offset - step; start -= step) { for (int end = midpoint + 1; end <= upper + step; end += step) { Preconditions.checkArgument(start < end); // Set column bounds StaticBuffer startCol = BufferUtil.getIntBuffer(start); StaticBuffer endCol = BufferUtil.getIntBuffer(end); SliceQuery sq = new SliceQuery(startCol, endCol); // Compute expectation expected.clear(); for (long l = Math.max(start, offset); l < upper; l++) { expected.add(BufferUtil.getLongBuffer(l)); } // Compute actual KeyIterator i = store.getKeys(sq, tx); Collection<StaticBuffer> actual = Sets.newHashSet(i); // Check log.debug("Checking bounds [{}, {}) (expect {} keys)", new Object[]{startCol, endCol, expected.size()}); Assert.assertEquals(expected, actual); i.close(); executed = true; } } } else if (manager.getFeatures().hasOrderedScan()) { Collection<StaticBuffer> expected = new ArrayList<StaticBuffer>(size); for (int start = midpoint; start >= offset - step; start -= step) { for (int end = midpoint + 1; end <= upper + step; end += step) { Preconditions.checkArgument(start < end); // Set column bounds StaticBuffer startCol = BufferUtil.getIntBuffer(start); StaticBuffer endCol = BufferUtil.getIntBuffer(end); SliceQuery sq = new SliceQuery(startCol, endCol); // Set key bounds StaticBuffer keyStart = BufferUtil.getLongBuffer(start); StaticBuffer keyEnd = BufferUtil.getLongBuffer(end); KeyRangeQuery krq = new KeyRangeQuery(keyStart, keyEnd, sq); // Compute expectation expected.clear(); for (long l = Math.max(start, offset); l < Math.min(upper, end); l++) { expected.add(BufferUtil.getLongBuffer(l)); } // Compute actual KeyIterator i = store.getKeys(krq, tx); Collection<StaticBuffer> actual = Lists.newArrayList(i); log.debug("Checking bounds key:[{}, {}) & col:[{}, {}) (expect {} keys)", new Object[]{keyStart, keyEnd, startCol, endCol, expected.size()}); Assert.assertEquals(expected, actual); i.close(); executed = true; } } } else { throw new UnsupportedOperationException( "Illegal store configuration: supportsScan()=true but supportsOrderedScan()=supportsUnorderedScan()=false"); } Preconditions.checkArgument(executed); } } public void checkSlice(String[][] values, Set<KeyColumn> removed, int key, int start, int end, int limit) throws BackendException { tx.rollback(); tx = startTx(); List<Entry> entries; if (limit <= 0) entries = store.getSlice(new KeySliceQuery(KeyValueStoreUtil.getBuffer(key), KeyValueStoreUtil.getBuffer(start), KeyValueStoreUtil.getBuffer(end)), tx); else entries = store.getSlice(new KeySliceQuery(KeyValueStoreUtil.getBuffer(key), KeyValueStoreUtil.getBuffer(start), KeyValueStoreUtil.getBuffer(end)).setLimit(limit), tx); int pos = 0; for (int i = start; i < end; i++) { if (removed.contains(new KeyColumn(key, i))) { log.debug("Skipping deleted ({},{})", key, i); continue; } if (limit <= 0 || pos < limit) { log.debug("Checking k={}[c_start={},c_end={}](limit={}): column index={}/pos={}", key, start, end, limit, i, pos); Assert.assertTrue(entries.size() > pos); Entry entry = entries.get(pos); int col = KeyValueStoreUtil.getID(entry.getColumn()); String str = KeyValueStoreUtil.getString(entry.getValueAs(StaticBuffer.STATIC_FACTORY)); Assert.assertEquals(i, col); Assert.assertEquals(values[key][i], str); } pos++; } Assert.assertNotNull(entries); if (limit > 0 && pos > limit) Assert.assertEquals(limit, entries.size()); else Assert.assertEquals(pos, entries.size()); } @Test public void intervalTest1() throws BackendException { String[][] values = generateValues(); log.debug("Loading values..."); loadValues(values); Set<KeyColumn> deleted = Sets.newHashSet(); clopen(); int trails = 5000; for (int t = 0; t < trails; t++) { int key = RandomGenerator.randomInt(0, numKeys); int start = RandomGenerator.randomInt(0, numColumns); int end = RandomGenerator.randomInt(start, numColumns); int limit = RandomGenerator.randomInt(1, 30); checkSlice(values, deleted, key, start, end, limit); checkSlice(values, deleted, key, start, end, -1); } } @Test public void intervalTest2() throws BackendException { String[][] values = generateValues(); log.debug("Loading values..."); loadValues(values); newTx(); Set<KeyColumn> deleted = deleteValues(7); clopen(); int trails = 5000; for (int t = 0; t < trails; t++) { int key = RandomGenerator.randomInt(0, numKeys); int start = RandomGenerator.randomInt(0, numColumns); int end = RandomGenerator.randomInt(start, numColumns); int limit = RandomGenerator.randomInt(1, 30); checkSlice(values, deleted, key, start, end, limit); checkSlice(values, deleted, key, start, end, -1); } } @Test public void testConcurrentGetSlice() throws ExecutionException, InterruptedException, BackendException { testConcurrentStoreOps(false); } @Test public void testConcurrentGetSliceAndMutate() throws BackendException, ExecutionException, InterruptedException { testConcurrentStoreOps(true); } private void testConcurrentStoreOps(boolean deletionEnabled) throws BackendException, ExecutionException, InterruptedException { // Load data fixture String[][] values = generateValues(); loadValues(values); /* * Must reopen transaction prior to deletes. * * This is due to the tx timestamps semantics. The timestamp is set once * during the lifetime of the transaction, and multiple calls to mutate will * use the same timestamp on each call. This causes deletions and additions of the * same k-v coordinates made in the same tx to conflict. On Cassandra, the * addition will win and the delete will appear to be dropped. * * The transaction open right now has already loaded the test fixtures, so any * attempt to delete some of the fixture will appear to fail if carried out in this * transaction. */ tx.commit(); tx = startTx(); // Setup executor and runnables final int NUM_THREADS = 64; ExecutorService es = Executors.newFixedThreadPool(NUM_THREADS); List<Runnable> tasks = new ArrayList<Runnable>(NUM_THREADS); for (int i = 0; i < NUM_THREADS; i++) { Set<KeyColumn> deleted = Sets.newHashSet(); if (!deletionEnabled) { tasks.add(new ConcurrentRandomSliceReader(values, deleted)); } else { tasks.add(new ConcurrentRandomSliceReader(values, deleted, i)); } } List<Future<?>> futures = new ArrayList<Future<?>>(NUM_THREADS); // Execute for (Runnable r : tasks) { futures.add(es.submit(r)); } // Block to completion (and propagate any ExecutionExceptions that fall out of get) int collected = 0; for (Future<?> f : futures) { f.get(); collected++; } assertEquals(NUM_THREADS, collected); } private class ConcurrentRandomSliceReader implements Runnable { private final String[][] values; private final Set<KeyColumn> d; private final int startKey; private final int endKey; private final boolean deletionEnabled; public ConcurrentRandomSliceReader(String[][] values, Set<KeyColumn> deleted) { this.values = values; this.d = deleted; this.startKey = 0; this.endKey = values.length; this.deletionEnabled = false; } public ConcurrentRandomSliceReader(String[][] values, Set<KeyColumn> deleted, int key) { this.values = values; this.d = deleted; this.startKey = key % values.length; this.endKey = startKey + 1; this.deletionEnabled = true; } @Override public void run() { int trials = 5000; for (int t = 0; t < trials; t++) { int key = RandomGenerator.randomInt(startKey, endKey); log.debug("Random key chosen: {} (start={}, end={})", key, startKey, endKey); int start = RandomGenerator.randomInt(0, numColumns); if (start == numColumns - 1) { start = numColumns - 2; } int end = RandomGenerator.randomInt(start + 1, numColumns); int limit = RandomGenerator.randomInt(1, 30); try { if (deletionEnabled) { int delCol = RandomGenerator.randomInt(start, end); ImmutableList<StaticBuffer> deletions = ImmutableList.of(KeyValueStoreUtil.getBuffer(delCol)); store.mutate(KeyValueStoreUtil.getBuffer(key), KeyColumnValueStore.NO_ADDITIONS, deletions, tx); log.debug("Deleting ({},{})", key, delCol); d.add(new KeyColumn(key, delCol)); tx.commit(); tx = startTx(); } //clopen(); checkSlice(values, d, key, start, end, limit); checkSlice(values, d, key, start, end, -1); } catch (BackendException e) { throw new RuntimeException(e); } } } } @Test public void getNonExistentKeyReturnsNull() throws Exception { Assert.assertEquals(null, KeyColumnValueStoreUtil.get(store, tx, 0, "col0")); Assert.assertEquals(null, KeyColumnValueStoreUtil.get(store, tx, 0, "col1")); } @Test public void insertingGettingAndDeletingSimpleDataWorks() throws Exception { KeyColumnValueStoreUtil.insert(store, tx, 0, "col0", "val0"); KeyColumnValueStoreUtil.insert(store, tx, 0, "col1", "val1"); tx.commit(); tx = startTx(); Assert.assertEquals("val0", KeyColumnValueStoreUtil.get(store, tx, 0, "col0")); Assert.assertEquals("val1", KeyColumnValueStoreUtil.get(store, tx, 0, "col1")); KeyColumnValueStoreUtil.delete(store, tx, 0, "col0"); KeyColumnValueStoreUtil.delete(store, tx, 0, "col1"); tx.commit(); tx = startTx(); Assert.assertEquals(null, KeyColumnValueStoreUtil.get(store, tx, 0, "col0")); Assert.assertEquals(null, KeyColumnValueStoreUtil.get(store, tx, 0, "col1")); } @Test public void getSliceRespectsColumnLimit() throws Exception { StaticBuffer key = KeyColumnValueStoreUtil.longToByteBuffer(0); final int cols = 1024; List<Entry> entries = new LinkedList<Entry>(); for (int i = 0; i < cols; i++) { StaticBuffer col = KeyColumnValueStoreUtil.longToByteBuffer(i); entries.add(StaticArrayEntry.of(col, col)); } store.mutate(key, entries, KeyColumnValueStore.NO_DELETIONS, tx); tx.commit(); tx = startTx(); /* * When limit is greater than or equal to the matching column count , * all matching columns must be returned. */ StaticBuffer columnStart = KeyColumnValueStoreUtil.longToByteBuffer(0); StaticBuffer columnEnd = KeyColumnValueStoreUtil.longToByteBuffer(cols); List<Entry> result = store.getSlice(new KeySliceQuery(key, columnStart, columnEnd).setLimit(cols), tx); Assert.assertEquals(cols, result.size()); for (int i = 0; i < result.size(); i++) { Entry src = entries.get(i); Entry dst = result.get(i); if (!src.equals(dst)) { int x = 1; } } Assert.assertEquals(entries, result); result = store.getSlice(new KeySliceQuery(key, columnStart, columnEnd).setLimit(cols + 10), tx); Assert.assertEquals(cols, result.size()); Assert.assertEquals(entries, result); /* * When limit is less the matching column count, the columns up to the * limit (ordered bytewise) must be returned. */ result = store.getSlice(new KeySliceQuery(key, columnStart, columnEnd).setLimit(cols - 1), tx); Assert.assertEquals(cols - 1, result.size()); entries.remove(entries.size() - 1); Assert.assertEquals(entries, result); result = store.getSlice(new KeySliceQuery(key, columnStart, columnEnd).setLimit(1), tx); Assert.assertEquals(1, result.size()); List<Entry> firstEntrySingleton = Arrays.asList(entries.get(0)); Assert.assertEquals(firstEntrySingleton, result); } @Test public void getSliceRespectsAllBoundsInclusionArguments() throws Exception { // Test case where endColumn=startColumn+1 StaticBuffer key = KeyColumnValueStoreUtil.longToByteBuffer(0); StaticBuffer columnBeforeStart = KeyColumnValueStoreUtil.longToByteBuffer(776); StaticBuffer columnStart = KeyColumnValueStoreUtil.longToByteBuffer(777); StaticBuffer columnEnd = KeyColumnValueStoreUtil.longToByteBuffer(778); StaticBuffer columnAfterEnd = KeyColumnValueStoreUtil.longToByteBuffer(779); // First insert four test Entries List<Entry> entries = Arrays.asList( StaticArrayEntry.of(columnBeforeStart, columnBeforeStart), StaticArrayEntry.of(columnStart, columnStart), StaticArrayEntry.of(columnEnd, columnEnd), StaticArrayEntry.of(columnAfterEnd, columnAfterEnd)); store.mutate(key, entries, KeyColumnValueStore.NO_DELETIONS, tx); tx.commit(); // getSlice() with only start inclusive tx = startTx(); List<Entry> result = store.getSlice(new KeySliceQuery(key, columnStart, columnEnd), tx); Assert.assertEquals(1, result.size()); Assert.assertEquals(777, KeyColumnValueStoreUtil.bufferToLong(result.get(0).getColumn())); } @Test public void containsKeyReturnsTrueOnExtantKey() throws Exception { StaticBuffer key1 = KeyColumnValueStoreUtil.longToByteBuffer(1); Assert.assertFalse(KCVSUtil.containsKey(store, key1, tx)); KeyColumnValueStoreUtil.insert(store, tx, 1, "c", "v"); tx.commit(); tx = startTx(); Assert.assertTrue(KCVSUtil.containsKey(store, key1, tx)); } @Test public void containsKeyReturnsFalseOnNonexistentKey() throws Exception { StaticBuffer key1 = KeyColumnValueStoreUtil.longToByteBuffer(1); Assert.assertFalse(KCVSUtil.containsKey(store, key1, tx)); } @Test public void containsKeyColumnReturnsFalseOnNonexistentInput() throws Exception { StaticBuffer key1 = KeyColumnValueStoreUtil.longToByteBuffer(1); StaticBuffer c = KeyColumnValueStoreUtil.stringToByteBuffer("c"); Assert.assertFalse(KCVSUtil.containsKeyColumn(store, key1, c, tx)); } @Test public void containsKeyColumnReturnsTrueOnExtantInput() throws Exception { KeyColumnValueStoreUtil.insert(store, tx, 1, "c", "v"); tx.commit(); tx = startTx(); StaticBuffer key1 = KeyColumnValueStoreUtil.longToByteBuffer(1); StaticBuffer c = KeyColumnValueStoreUtil.stringToByteBuffer("c"); Assert.assertTrue(KCVSUtil.containsKeyColumn(store, key1, c, tx)); } @Test public void testGetSlices() throws Exception { if (!manager.getFeatures().hasMultiQuery()) return; populateDBWith100Keys(); tx.commit(); tx = startTx(); List<StaticBuffer> keys = new ArrayList<StaticBuffer>(100); for (int i = 1; i <= 100; i++) { keys.add(KeyColumnValueStoreUtil.longToByteBuffer(i)); } StaticBuffer start = KeyColumnValueStoreUtil.stringToByteBuffer("a"); StaticBuffer end = KeyColumnValueStoreUtil.stringToByteBuffer("d"); Map<StaticBuffer,EntryList> results = store.getSlice(keys, new SliceQuery(start, end), tx); Assert.assertEquals(100, results.size()); for (List<Entry> entries : results.values()) { Assert.assertEquals(3, entries.size()); } } @Test @Category({UnorderedKeyStoreTests.class}) public void testGetKeysWithSliceQuery() throws Exception { if (!manager.getFeatures().hasUnorderedScan()) { log.warn("Can't test key-unordered features on incompatible store. " + "This warning could indicate reduced test coverage and " + "a broken JUnit configuration. Skipping test {}.", name.getMethodName()); return; } populateDBWith100Keys(); tx.commit(); tx = startTx(); KeyIterator keyIterator = store.getKeys( new SliceQuery(new ReadArrayBuffer("b".getBytes()), new ReadArrayBuffer("c".getBytes())), tx); examineGetKeysResults(keyIterator, 0, 100, 1); } @Test @Category({OrderedKeyStoreTests.class}) public void testGetKeysWithKeyRange() throws Exception { if (!manager.getFeatures().hasOrderedScan()) { log.warn("Can't test ordered scans on incompatible store. " + "This warning could indicate reduced test coverage and " + "shouldn't happen in an ideal JUnit configuration. " + "Skipping test {}.", name.getMethodName()); return; } populateDBWith100Keys(); tx.commit(); tx = startTx(); KeyIterator keyIterator = store.getKeys(new KeyRangeQuery( KeyColumnValueStoreUtil.longToByteBuffer(10), // key start KeyColumnValueStoreUtil.longToByteBuffer(40), // key end new ReadArrayBuffer("b".getBytes()), // column start new ReadArrayBuffer("c".getBytes())), tx); examineGetKeysResults(keyIterator, 10, 40, 1); } @Category({ BrittleTests.class }) @Test public void testTtl() throws Exception { if (!manager.getFeatures().hasCellTTL()) { return; } StaticBuffer key = KeyColumnValueStoreUtil.longToByteBuffer(0); int ttls[] = new int[]{0, 1, 2}; List<Entry> additions = new LinkedList<Entry>(); for (int i = 0; i < ttls.length; i++) { StaticBuffer col = KeyColumnValueStoreUtil.longToByteBuffer(i); StaticArrayEntry entry = (StaticArrayEntry) StaticArrayEntry.of(col, col); entry.setMetaData(EntryMetaData.TTL, ttls[i]); additions.add(entry); } store.mutate(key, additions, KeyColumnValueStore.NO_DELETIONS, tx); tx.commit(); // commitTime starts just after the commit, so we won't check for expiration too early long commitTime = System.currentTimeMillis(); tx = startTx(); StaticBuffer columnStart = KeyColumnValueStoreUtil.longToByteBuffer(0); StaticBuffer columnEnd = KeyColumnValueStoreUtil.longToByteBuffer(ttls.length); List<Entry> result = store.getSlice(new KeySliceQuery(key, columnStart, columnEnd).setLimit(ttls.length), tx); Assert.assertEquals(ttls.length, result.size()); // wait for one cell to expire Thread.sleep(commitTime + 1001 - System.currentTimeMillis()); // cells immediately expire upon TTL, even before rollback() result = store.getSlice(new KeySliceQuery(key, columnStart, columnEnd).setLimit(ttls.length), tx); Assert.assertEquals(ttls.length - 1, result.size()); tx.rollback(); result = store.getSlice(new KeySliceQuery(key, columnStart, columnEnd).setLimit(ttls.length), tx); Assert.assertEquals(ttls.length - 1, result.size()); Thread.sleep(commitTime + 2001 - System.currentTimeMillis()); tx.rollback(); result = store.getSlice(new KeySliceQuery(key, columnStart, columnEnd).setLimit(ttls.length), tx); Assert.assertEquals(ttls.length - 2, result.size()); // cell 0 doesn't expire due to TTL of 0 (infinite) Thread.sleep(commitTime + 4001 - System.currentTimeMillis()); tx.rollback(); result = store.getSlice(new KeySliceQuery(key, columnStart, columnEnd).setLimit(ttls.length), tx); Assert.assertEquals(ttls.length - 2, result.size()); } @Test public void testStoreTTL() throws Exception { KeyColumnValueStoreManager storeManager = manager; if (storeManager.getFeatures().hasCellTTL()) { storeManager = new TTLKVCSManager(storeManager,101); } else if (!storeManager.getFeatures().hasStoreTTL()) { return; } assertTrue(storeManager.getFeatures().hasStoreTTL()); assertTrue(storeManager instanceof CustomizeStoreKCVSManager); final TimeUnit sec = TimeUnit.SECONDS; final int storeTTLSeconds = (int)TestGraphConfigs.getTTL(sec); KeyColumnValueStore storeWithTTL = ((CustomizeStoreKCVSManager) storeManager). openDatabase("testStore_with_TTL", storeTTLSeconds); populateDBWith100Keys(storeWithTTL); tx.commit(); tx = startTx(); final StaticBuffer key = KeyColumnValueStoreUtil.longToByteBuffer(2); StaticBuffer start = KeyColumnValueStoreUtil.stringToByteBuffer("a"); StaticBuffer end = KeyColumnValueStoreUtil.stringToByteBuffer("d"); EntryList results = storeWithTTL.getSlice(new KeySliceQuery(key, new SliceQuery(start, end)), tx); Assert.assertEquals(3, results.size()); Thread.sleep(TimeUnit.MILLISECONDS.convert((long)Math.ceil(storeTTLSeconds * 1.25), sec)); tx.commit(); tx = startTx(); results = storeWithTTL.getSlice(new KeySliceQuery(key, new SliceQuery(start, end)), tx); Assert.assertEquals(0, results.size()); // should be empty if TTL was applied properly storeWithTTL.close(); } protected void populateDBWith100Keys() throws Exception { populateDBWith100Keys(store); } protected void populateDBWith100Keys(KeyColumnValueStore store) throws Exception { Random random = new Random(); for (int i = 1; i <= 100; i++) { KeyColumnValueStoreUtil.insert(store, tx, i, "a", "v" + random.nextLong()); KeyColumnValueStoreUtil.insert(store, tx, i, "b", "v" + random.nextLong()); KeyColumnValueStoreUtil.insert(store, tx, i, "c", "v" + random.nextLong()); } } protected void examineGetKeysResults(KeyIterator keyIterator, long startKey, long endKey, int expectedColumns) throws BackendException { Assert.assertNotNull(keyIterator); int count = 0; int expectedNumKeys = (int) (endKey - startKey); List<StaticBuffer> existingKeys = new ArrayList<StaticBuffer>(expectedNumKeys); for (int i = (int) (startKey == 0 ? 1 : startKey); i <= endKey; i++) { existingKeys.add(KeyColumnValueStoreUtil.longToByteBuffer(i)); } while (keyIterator.hasNext()) { StaticBuffer key = keyIterator.next(); Assert.assertNotNull(key); Assert.assertTrue(existingKeys.contains(key)); RecordIterator<Entry> entries = keyIterator.getEntries(); Assert.assertNotNull(entries); int entryCount = 0; while (entries.hasNext()) { Assert.assertNotNull(entries.next()); entryCount++; } Assert.assertEquals(expectedColumns, entryCount); count++; } Assert.assertEquals(expectedNumKeys, count); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.applicationcostprofiler.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/AWSApplicationCostProfiler-2020-09-10/UpdateReportDefinition" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateReportDefinitionRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * Required. ID of the report to update. * </p> */ private String reportId; /** * <p> * Required. Description of the report. * </p> */ private String reportDescription; /** * <p> * Required. The cadence to generate the report. * </p> */ private String reportFrequency; /** * <p> * Required. The format to use for the generated report. * </p> */ private String format; /** * <p> * Required. Amazon Simple Storage Service (Amazon S3) location where Application Cost Profiler uploads the report. * </p> */ private S3Location destinationS3Location; /** * <p> * Required. ID of the report to update. * </p> * * @param reportId * Required. ID of the report to update. */ public void setReportId(String reportId) { this.reportId = reportId; } /** * <p> * Required. ID of the report to update. * </p> * * @return Required. ID of the report to update. */ public String getReportId() { return this.reportId; } /** * <p> * Required. ID of the report to update. * </p> * * @param reportId * Required. ID of the report to update. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateReportDefinitionRequest withReportId(String reportId) { setReportId(reportId); return this; } /** * <p> * Required. Description of the report. * </p> * * @param reportDescription * Required. Description of the report. */ public void setReportDescription(String reportDescription) { this.reportDescription = reportDescription; } /** * <p> * Required. Description of the report. * </p> * * @return Required. Description of the report. */ public String getReportDescription() { return this.reportDescription; } /** * <p> * Required. Description of the report. * </p> * * @param reportDescription * Required. Description of the report. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateReportDefinitionRequest withReportDescription(String reportDescription) { setReportDescription(reportDescription); return this; } /** * <p> * Required. The cadence to generate the report. * </p> * * @param reportFrequency * Required. The cadence to generate the report. * @see ReportFrequency */ public void setReportFrequency(String reportFrequency) { this.reportFrequency = reportFrequency; } /** * <p> * Required. The cadence to generate the report. * </p> * * @return Required. The cadence to generate the report. * @see ReportFrequency */ public String getReportFrequency() { return this.reportFrequency; } /** * <p> * Required. The cadence to generate the report. * </p> * * @param reportFrequency * Required. The cadence to generate the report. * @return Returns a reference to this object so that method calls can be chained together. * @see ReportFrequency */ public UpdateReportDefinitionRequest withReportFrequency(String reportFrequency) { setReportFrequency(reportFrequency); return this; } /** * <p> * Required. The cadence to generate the report. * </p> * * @param reportFrequency * Required. The cadence to generate the report. * @return Returns a reference to this object so that method calls can be chained together. * @see ReportFrequency */ public UpdateReportDefinitionRequest withReportFrequency(ReportFrequency reportFrequency) { this.reportFrequency = reportFrequency.toString(); return this; } /** * <p> * Required. The format to use for the generated report. * </p> * * @param format * Required. The format to use for the generated report. * @see Format */ public void setFormat(String format) { this.format = format; } /** * <p> * Required. The format to use for the generated report. * </p> * * @return Required. The format to use for the generated report. * @see Format */ public String getFormat() { return this.format; } /** * <p> * Required. The format to use for the generated report. * </p> * * @param format * Required. The format to use for the generated report. * @return Returns a reference to this object so that method calls can be chained together. * @see Format */ public UpdateReportDefinitionRequest withFormat(String format) { setFormat(format); return this; } /** * <p> * Required. The format to use for the generated report. * </p> * * @param format * Required. The format to use for the generated report. * @return Returns a reference to this object so that method calls can be chained together. * @see Format */ public UpdateReportDefinitionRequest withFormat(Format format) { this.format = format.toString(); return this; } /** * <p> * Required. Amazon Simple Storage Service (Amazon S3) location where Application Cost Profiler uploads the report. * </p> * * @param destinationS3Location * Required. Amazon Simple Storage Service (Amazon S3) location where Application Cost Profiler uploads the * report. */ public void setDestinationS3Location(S3Location destinationS3Location) { this.destinationS3Location = destinationS3Location; } /** * <p> * Required. Amazon Simple Storage Service (Amazon S3) location where Application Cost Profiler uploads the report. * </p> * * @return Required. Amazon Simple Storage Service (Amazon S3) location where Application Cost Profiler uploads the * report. */ public S3Location getDestinationS3Location() { return this.destinationS3Location; } /** * <p> * Required. Amazon Simple Storage Service (Amazon S3) location where Application Cost Profiler uploads the report. * </p> * * @param destinationS3Location * Required. Amazon Simple Storage Service (Amazon S3) location where Application Cost Profiler uploads the * report. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateReportDefinitionRequest withDestinationS3Location(S3Location destinationS3Location) { setDestinationS3Location(destinationS3Location); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getReportId() != null) sb.append("ReportId: ").append(getReportId()).append(","); if (getReportDescription() != null) sb.append("ReportDescription: ").append(getReportDescription()).append(","); if (getReportFrequency() != null) sb.append("ReportFrequency: ").append(getReportFrequency()).append(","); if (getFormat() != null) sb.append("Format: ").append(getFormat()).append(","); if (getDestinationS3Location() != null) sb.append("DestinationS3Location: ").append(getDestinationS3Location()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateReportDefinitionRequest == false) return false; UpdateReportDefinitionRequest other = (UpdateReportDefinitionRequest) obj; if (other.getReportId() == null ^ this.getReportId() == null) return false; if (other.getReportId() != null && other.getReportId().equals(this.getReportId()) == false) return false; if (other.getReportDescription() == null ^ this.getReportDescription() == null) return false; if (other.getReportDescription() != null && other.getReportDescription().equals(this.getReportDescription()) == false) return false; if (other.getReportFrequency() == null ^ this.getReportFrequency() == null) return false; if (other.getReportFrequency() != null && other.getReportFrequency().equals(this.getReportFrequency()) == false) return false; if (other.getFormat() == null ^ this.getFormat() == null) return false; if (other.getFormat() != null && other.getFormat().equals(this.getFormat()) == false) return false; if (other.getDestinationS3Location() == null ^ this.getDestinationS3Location() == null) return false; if (other.getDestinationS3Location() != null && other.getDestinationS3Location().equals(this.getDestinationS3Location()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getReportId() == null) ? 0 : getReportId().hashCode()); hashCode = prime * hashCode + ((getReportDescription() == null) ? 0 : getReportDescription().hashCode()); hashCode = prime * hashCode + ((getReportFrequency() == null) ? 0 : getReportFrequency().hashCode()); hashCode = prime * hashCode + ((getFormat() == null) ? 0 : getFormat().hashCode()); hashCode = prime * hashCode + ((getDestinationS3Location() == null) ? 0 : getDestinationS3Location().hashCode()); return hashCode; } @Override public UpdateReportDefinitionRequest clone() { return (UpdateReportDefinitionRequest) super.clone(); } }
package com.planet_ink.siplet.support; import java.applet.*; import java.net.*; import java.util.*; import com.planet_ink.siplet.applet.Siplet; /* Copyright 2000-2014 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings({"unchecked","rawtypes"}) public class MSP { public MSP(){super();} private static Hashtable cache=new Hashtable(); private String defMusicPath=null; private String defSoundPath=null; private String defPath=null; private MSPplayer musicClip = null; private MSPplayer soundClip = null; private final StringBuffer jscriptBuffer=new StringBuffer(""); public String getAnyJScript() { synchronized(jscriptBuffer) { if(jscriptBuffer.length()==0) return ""; final String s=jscriptBuffer.toString(); jscriptBuffer.setLength(0); return s; } } public String trimQuotes(String s) { s=s.trim(); if(s.startsWith("\"")) { s=s.substring(1); if(s.endsWith("\"")) s=s.substring(0,s.length()-1); } return s.trim(); } public int process(StringBuffer buf, int i, Siplet applet, boolean useExternal) { final int oldI=i; if(i+12>=buf.length()) return -2; final String tag=buf.substring(i+2,i+7).toUpperCase(); if((!tag.equals("SOUND"))&&(!tag.equals("MUSIC"))) return -2; if(buf.charAt(i+7)!='(') return -2; i+=7; final Vector parts=new Vector(); final StringBuffer part=new StringBuffer(""); boolean done=false; while(((++i)<buf.length())&&(!done)) { switch(buf.charAt(i)) { case ')': if(part.length()>0) parts.addElement(part.toString()); done=true; break; case ' ': if(part.length()>0) parts.addElement(part.toString()); part.setLength(0); break; case '\n': if(part.length()>0) parts.addElement(part.toString()); done=true; //$FALL-THROUGH$ default: part.append(buf.charAt(i)); break; } } if(!done) return oldI; buf.delete(oldI,i+1); if(parts.size()==0) return -1; MSPplayer currentClip=tag.equals("MUSIC")?musicClip:soundClip; if((currentClip!=null)&&(!currentClip.playing)) { if(tag.equals("MUSIC")) musicClip=null; else if(tag.equals("SOUND")) soundClip=null; currentClip=null; } if(((String)parts.firstElement()).equalsIgnoreCase("off")) { if(tag.equals("MUSIC")&&(musicClip!=null)) { jscriptBuffer.append(musicClip.stopPlaying("musicplayer",useExternal)); } if(tag.equals("SOUND")&&(soundClip!=null)) { jscriptBuffer.append(soundClip.stopPlaying("soundplayer",useExternal)); } for(int v=1;v<parts.size();v++) { String s=((String)parts.elementAt(v)).toUpperCase(); if(s.startsWith("V=")) { s=trimQuotes(s.substring(2)); if(!s.endsWith("/")) s=s+"/"; if(tag.equals("MUSIC")) defMusicPath=s; else if(tag.equals("SOUND")) defSoundPath=s; if(defPath==null) defPath=s; break; } } return -1; } final MSPplayer newOne=new MSPplayer(applet); newOne.key=(String)parts.firstElement(); newOne.url=(tag.equals("MUSIC")?defMusicPath:defSoundPath); if(newOne.url==null) newOne.url=defPath; final String defaultUrl=newOne.url; for(int v=1;v<parts.size();v++) { final String s=(String)parts.elementAt(v); if((s.startsWith("V="))||(s.startsWith("v="))) newOne.volume=Util.s_int(trimQuotes(s.substring(2)).trim()); if((s.startsWith("L="))||(s.startsWith("l="))) newOne.repeats=Util.s_int(trimQuotes(s.substring(2)).trim()); if((s.startsWith("P="))||(s.startsWith("p="))) newOne.priority=Util.s_int(trimQuotes(s.substring(2)).trim()); if((s.startsWith("C="))||(s.startsWith("c="))) newOne.continueValue=Util.s_int(trimQuotes(s.substring(2)).trim()); if((s.startsWith("U="))||(s.startsWith("u="))) newOne.url=trimQuotes(s.substring(2)).trim(); } if((newOne.url!=null)&&(!newOne.url.trim().endsWith("/"))) newOne.url=newOne.url.trim()+"/"; if((currentClip!=null)&&(currentClip.priority>=newOne.priority)) return -1; if((currentClip!=null)&&(currentClip.key.equals(newOne.key))) { currentClip.repeats=newOne.repeats; if(currentClip.continueValue==0) currentClip.iterations=0; return -1; } if(currentClip!=null) { jscriptBuffer.append(currentClip.stopPlaying(currentClip.tag.equals("MUSIC")?"musicplayer":"soundplayer",useExternal)); if(tag.equals("MUSIC")) musicClip=null; else if(tag.equals("SOUND")) soundClip=null; currentClip=null; } currentClip=(MSPplayer)cache.get(newOne.key.toUpperCase()); if(currentClip==null) { currentClip=newOne; currentClip.tag=tag; cache.put(newOne.key,currentClip); } else { if(newOne.volume!=100) currentClip.volume=newOne.volume; if(newOne.repeats!=1) currentClip.repeats=newOne.repeats; if(newOne.priority!=50) currentClip.priority=newOne.priority; if(newOne.continueValue!=1) currentClip.continueValue=newOne.continueValue; if( !newOne.url.equals( defaultUrl ) ) currentClip.url=newOne.url; } jscriptBuffer.append(currentClip.startPlaying(tag.equals("MUSIC")?"musicplayer":"soundplayer",useExternal)); if(tag.equals("MUSIC")) musicClip=currentClip; else if(tag.equals("SOUND")) soundClip=currentClip; return -1; } }
package evemanutool.gui.manu.frameworks; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.Font; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.BorderFactory; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.event.TableModelEvent; import javax.swing.event.TableModelListener; import javax.swing.text.Document; import evemanutool.constants.UserPrefConstants; import evemanutool.data.database.Blueprint; import evemanutool.data.database.ManuQuote; import evemanutool.data.database.Material; import evemanutool.gui.corp.ProductionPanel; import evemanutool.gui.general.components.LabelBox; import evemanutool.gui.general.components.NumberLabel; import evemanutool.gui.general.tabel.BooleanCellRenderer; import evemanutool.gui.general.tabel.ScrollableTablePanel; import evemanutool.gui.main.EMT; import evemanutool.gui.manu.components.MaterialModel; import evemanutool.prefs.Preferences; import evemanutool.utils.databases.BlueprintDB; import evemanutool.utils.databases.CorpApiDB; import evemanutool.utils.databases.PriceDB; @SuppressWarnings("serial") public abstract class InspectPanel extends JPanel implements UserPrefConstants{ //DB:s and references. protected Preferences prefs; protected PriceDB pdb; protected BlueprintDB bdb; private CorpApiDB cdb; private ProductionPanel prodPanel; //Quote data. protected Blueprint modBpo; private ManuQuote currentQuote; //Main panels. private JPanel headerPanel = new JPanel(); private JPanel pricePanel = new JPanel(); private JPanel profitPanel = new JPanel(); private JPanel actionPanel = new JPanel(); private ScrollableTablePanel<Material> matPanel; //Graphical components. private JLabel bpoName = new JLabel("BPO name"); private NumberLabel bpoId = new NumberLabel(false, ""); private NumberLabel bpoMaxRuns = new NumberLabel(false, ""); private NumberLabel bpoNPCCost = new NumberLabel(false, " ISK"); private JButton addQuoteBtn = new JButton("Add To Corp"); private NumberLabel manCost = new NumberLabel(true, " ISK"); private NumberLabel manCostAll = new NumberLabel(true, " ISK"); private NumberLabel sellPrice = new NumberLabel(true, " ISK"); private NumberLabel sellPriceAll = new NumberLabel(true, " ISK"); private NumberLabel profit = new NumberLabel(true, " ISK"); private NumberLabel profitAll = new NumberLabel(true, " ISK"); private NumberLabel profitPerH = new NumberLabel(true, " ISK"); private NumberLabel profitPercent = new NumberLabel(true, "%"); //Listener instance. private InputListener listener = new InputListener(); public InspectPanel(Preferences prefs, PriceDB pdb, BlueprintDB bdb, CorpApiDB cdb, ProductionPanel prodPanel) { //Set fields. this.prefs = prefs; this.pdb = pdb; this.bdb = bdb; this.cdb = cdb; this.prodPanel = prodPanel; //Setup Layout. setLayout(new BoxLayout(this, BoxLayout.Y_AXIS)); //Header. headerPanel.setLayout(new BoxLayout(headerPanel, BoxLayout.Y_AXIS)); JPanel p1 = new JPanel(new FlowLayout(FlowLayout.LEADING, 15, 10)); bpoName.setFont(new Font(Font.SANS_SERIF, Font.BOLD, 16)); p1.add(bpoName); JPanel p2 = new JPanel(new FlowLayout(FlowLayout.LEADING, 15, 10)); bpoId.setFont(new Font(Font.SANS_SERIF, Font.BOLD, 11)); bpoMaxRuns.setFont(new Font(Font.SANS_SERIF, Font.BOLD, 11)); bpoNPCCost.setFont(new Font(Font.SANS_SERIF, Font.BOLD, 11)); p2.add(new LabelBox("Bpo Type Id", bpoId, BoxLayout.X_AXIS)); p2.add(new LabelBox("Max Runs", bpoMaxRuns, BoxLayout.X_AXIS)); p2.add(new LabelBox("NPC Cost", bpoNPCCost, BoxLayout.X_AXIS)); headerPanel.add(p1); headerPanel.add(p2); //Price. pricePanel.setLayout(new FlowLayout(FlowLayout.LEADING, 15, 10)); //First box. JPanel labelBox1 = new JPanel(); labelBox1.setLayout(new BoxLayout(labelBox1, BoxLayout.Y_AXIS)); labelBox1.add(new JLabel("Manufacture Cost")); labelBox1.add(new JLabel("Sell Income")); labelBox1.add(new JLabel("Profit")); JPanel valueBox1 = new JPanel(); valueBox1.setLayout(new BoxLayout(valueBox1, BoxLayout.Y_AXIS)); valueBox1.setPreferredSize(new Dimension(120, 50)); //Adjust components. manCost.setAlignmentX(RIGHT_ALIGNMENT); sellPrice.setAlignmentX(RIGHT_ALIGNMENT); profit.setAlignmentX(RIGHT_ALIGNMENT); valueBox1.add(manCost); valueBox1.add(sellPrice); valueBox1.add(profit); //Second box JPanel labelBox2 = new JPanel(); labelBox2.setLayout(new BoxLayout(labelBox2, BoxLayout.Y_AXIS)); labelBox2.add(new JLabel("Manufacture Cost (All runs)")); labelBox2.add(new JLabel("Sell Income (All runs)")); labelBox2.add(new JLabel("Profit (All runs)")); JPanel valueBox2 = new JPanel(); valueBox2.setLayout(new BoxLayout(valueBox2, BoxLayout.Y_AXIS)); valueBox2.setPreferredSize(new Dimension(120, 50)); //Adjust components. manCostAll.setAlignmentX(RIGHT_ALIGNMENT); sellPriceAll.setAlignmentX(RIGHT_ALIGNMENT); profitAll.setAlignmentX(RIGHT_ALIGNMENT); valueBox2.add(manCostAll); valueBox2.add(sellPriceAll); valueBox2.add(profitAll); pricePanel.add(labelBox1); pricePanel.add(valueBox1); pricePanel.add(Box.createHorizontalStrut(10)); pricePanel.add(labelBox2); pricePanel.add(valueBox2); //Profit. profitPanel.setLayout(new FlowLayout(FlowLayout.LEADING, 15, 10)); JPanel labelBox3 = new JPanel(); labelBox3.setLayout(new BoxLayout(labelBox3, BoxLayout.Y_AXIS)); labelBox3.add(new JLabel("Profit/hour")); labelBox3.add(new JLabel("Profit %")); JPanel valueBox3 = new JPanel(); valueBox3.setLayout(new BoxLayout(valueBox3, BoxLayout.Y_AXIS)); valueBox3.setPreferredSize(new Dimension(120, 30)); //Adjust components. profitPerH.setAlignmentX(RIGHT_ALIGNMENT); profitPercent.setAlignmentX(RIGHT_ALIGNMENT); valueBox3.add(profitPerH); valueBox3.add(profitPercent); profitPanel.add(labelBox3); profitPanel.add(valueBox3); //Action panel. actionPanel.setLayout(new FlowLayout(FlowLayout.LEADING, 15, 0)); actionPanel.add(addQuoteBtn); addQuoteBtn.addActionListener(listener); //Material panel. matPanel = new ScrollableTablePanel<>(new MaterialModel()); matPanel.setBorder(BorderFactory.createTitledBorder("Materials")); //To make non-applicable checkboxes invisible. matPanel.getTable().getColumnModel().getColumn(2).setCellRenderer(new BooleanCellRenderer()); //Set update listener. matPanel.getModel().addTableModelListener(listener); } public void setBpo(Blueprint b) { //Set bpo. modBpo = new Blueprint(b); currentQuote = null; updateMainComponents(initQuote(modBpo)); } protected abstract ManuQuote initQuote(Blueprint b); protected abstract ManuQuote makeQuote(ManuQuote currentQuote); protected abstract void updateFields(ManuQuote q); protected void updateMainComponents(ManuQuote q) { //Set the current quote. currentQuote = q; matPanel.getModel().setData(q.getMatList()); bpoName.setText(q.getBpo().getBlueprintItem().getName()); bpoId.setValue(q.getBpo().getBlueprintItem().getTypeId()); bpoMaxRuns.setValue(q.getBpo().getMaxRuns()); // Display a message if it's not available. if (q.isBaseBPOSeededOnMarket()) { bpoNPCCost.setValue(q.getBpo().getBlueprintItem().getBasePrice()); } else { bpoNPCCost.setText("Not available"); } manCost.setValue(q.getManuCost() / q.getRuns()); manCostAll.setValue(q.getManuCost()); sellPrice.setValue(q.getSellIncome() / q.getRuns()); sellPriceAll.setValue(q.getSellIncome()); profit.setValue(q.getProfit() / q.getRuns()); profitAll.setValue(q.getProfit()); profitPerH.setValue(q.getProfitPerHour()); profitPercent.setValue((q.getProfit() / q.getManuCost()) * 100); //Call subclass method. updateFields(q); } protected void updateQuote() { if (currentQuote != null) { ManuQuote q = makeQuote(currentQuote); if (q == null) { return; } else { updateMainComponents(q); } } } protected void assignListener(Document d) { d.addDocumentListener(listener); } protected void assignListener(JComboBox<?> c) { c.addActionListener(listener); } protected JPanel getHeaderRow() { return headerPanel; } protected JPanel getPriceRow() { return pricePanel; } protected JPanel getProfitRow() { return profitPanel; } protected JPanel getActionRow() { return actionPanel; } protected JPanel getMaterialPanel() { return matPanel; } private class InputListener implements DocumentListener, ActionListener, TableModelListener { @Override public void changedUpdate(DocumentEvent e) {} @Override public void insertUpdate(DocumentEvent e) { updateQuote(); } @Override public void removeUpdate(DocumentEvent e) { updateQuote(); } @Override public void actionPerformed(ActionEvent e) { //Subclass component i.e ComboBox or AddButton has called. //Create a new quote for the InspectPanel to avoid using the as same in production calculations. ManuQuote q = currentQuote; updateQuote(); if (e.getSource() == addQuoteBtn) { //Add quote to db. if (q != null && cdb.isComplete()) { if (!cdb.addProductionQuote(q)) { //If not successful, show dialog. JOptionPane.showMessageDialog(EMT.MAIN, "The selected quote could not be added and may already exist", "Info", JOptionPane.INFORMATION_MESSAGE); }else { cdb.updateSupplyData(); prodPanel.updateGUI(); } } } } @Override public void tableChanged(TableModelEvent e) { if (e.getColumn() != TableModelEvent.ALL_COLUMNS) { updateQuote(); } } } }
/* * ============================================================================= * * Copyright (c) 2014, Fernando Aspiazu * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ============================================================================= */ package it.f2informatica.core.model.builder; import it.f2informatica.core.model.*; import org.joda.time.DateTime; import org.joda.time.Period; import java.util.Collections; import java.util.Date; import java.util.List; public class ConsultantModelBuilder { private ConsultantModel consultant = new ConsultantModel(); public static ConsultantModelBuilder consultantModel() { return new ConsultantModelBuilder(); } public ConsultantModelBuilder withId(String id) { consultant.setId(id); return this; } public ConsultantModelBuilder withRegistrationDate(Date registrationDate) { consultant.setRegistrationDate(registrationDate); return this; } public ConsultantModelBuilder withConsultantNo(String consultantNo) { consultant.setConsultantNo(consultantNo); return this; } public ConsultantModelBuilder withFiscalCode(String fiscalCode) { consultant.setFiscalCode(fiscalCode); return this; } public ConsultantModelBuilder withEmail(String email) { consultant.setEmail(email); return this; } public ConsultantModelBuilder withFirstName(String firstName) { consultant.setFirstName(firstName); return this; } public ConsultantModelBuilder withLastName(String lastName) { consultant.setLastName(lastName); return this; } public ConsultantModelBuilder withGender(String gender) { consultant.setGender(gender); return this; } public ConsultantModelBuilder withPhoneNumber(String phoneNumber) { consultant.setPhoneNumber(phoneNumber); return this; } public ConsultantModelBuilder withMobileNo(String mobileNo) { consultant.setMobileNumber(mobileNo); return this; } public ConsultantModelBuilder withBirthDate(Date birthDate) { consultant.setBirthDate(birthDate); if (birthDate != null) { Period periodFromBirthdayToday = new Period(new DateTime(birthDate), new DateTime()); consultant.setAge(periodFromBirthdayToday.getYears()); } return this; } public ConsultantModelBuilder withBirthCountry(String birthCountry) { consultant.setBirthCountry(birthCountry); return this; } public ConsultantModelBuilder withBirthCity(String birthCity) { consultant.setBirthCity(birthCity); return this; } public ConsultantModelBuilder withNationality(String nationality) { consultant.setNationality(nationality); return this; } public ConsultantModelBuilder withIdentityCardNo(String identityCardNo) { consultant.setIdentityCardNo(identityCardNo); return this; } public ConsultantModelBuilder withPassportNo(String passportNo) { consultant.setPassportNo(passportNo); return this; } public ConsultantModelBuilder withExperienceIn(ExperienceModelBuilder experience) { return withExperienceIn(experience.build()); } public ConsultantModelBuilder withExperienceIn(ExperienceModel experience) { consultant.getExperiences().add(experience); return this; } public ConsultantModelBuilder withExperiencesIn(List<ExperienceModel> experiences) { Collections.sort(experiences, Collections.reverseOrder()); consultant.getExperiences().addAll(experiences); return this; } public ConsultantModelBuilder withEducationIn(EducationModelBuilder education) { return withEducationIn(education.build()); } public ConsultantModelBuilder withEducationIn(EducationModel education) { consultant.getEducationList().add(education); return this; } public ConsultantModelBuilder withEducationIn(List<EducationModel> educations) { // Collections.sort(educations, Collections.reverseOrder()); consultant.getEducationList().addAll(educations); return this; } public ConsultantModelBuilder withSkill(String skill) { consultant.getSkills().add(skill); return this; } public ConsultantModelBuilder withSkills(List<String> skills) { consultant.getSkills().addAll(skills); return this; } public ConsultantModelBuilder speaking(LanguageModelBuilder language) { return speaking(language.build()); } public ConsultantModelBuilder speaking(LanguageModel language) { consultant.getLanguages().add(language); return this; } public ConsultantModelBuilder speakingLanguages(List<LanguageModel> languages) { consultant.getLanguages().addAll(languages); return this; } public ConsultantModelBuilder withInterestsIn(String interestsIn) { consultant.setInterests(interestsIn); return this; } public ConsultantModelBuilder withResidence(AddressModelBuilder residence) { return withResidence(residence.build()); } public ConsultantModelBuilder withResidence(AddressModel residence) { consultant.setResidence(residence); return this; } public ConsultantModelBuilder withDomicile(AddressModelBuilder domicile) { return withDomicile(domicile.build()); } public ConsultantModelBuilder withDomicile(AddressModel domicile) { consultant.setDomicile(domicile); return this; } public ConsultantModel build() { return consultant; } }
package org.apache.velocity.runtime.parser.node; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; import org.apache.commons.lang3.text.StrBuilder; import org.apache.velocity.context.InternalContextAdapter; import org.apache.velocity.exception.TemplateInitException; import org.apache.velocity.exception.VelocityException; import org.apache.velocity.runtime.RuntimeConstants; import org.apache.velocity.runtime.log.Log; import org.apache.velocity.runtime.parser.ParseException; import org.apache.velocity.runtime.parser.Parser; import org.apache.velocity.runtime.parser.Token; /** * ASTStringLiteral support. Will interpolate! * * @author <a href="mailto:geirm@optonline.net">Geir Magnusson Jr.</a> * @author <a href="mailto:jvanzyl@apache.org">Jason van Zyl</a> * @version $Id: ASTStringLiteral.java 1032134 2010-11-06 20:19:39Z cbrisson $ */ public class ASTStringLiteral extends SimpleNode { /* cache the value of the interpolation switch */ private boolean interpolate = true; private SimpleNode nodeTree = null; private String image = ""; private String interpolateimage = ""; /** true if the string contains a line comment (##) */ private boolean containsLineComment; /** * @param id */ public ASTStringLiteral(int id) { super(id); } /** * @param p * @param id */ public ASTStringLiteral(Parser p, int id) { super(p, id); } /** * init : we don't have to do much. Init the tree (there shouldn't be one) * and then see if interpolation is turned on. * * @param context * @param data * @return Init result. * @throws TemplateInitException */ public Object init(InternalContextAdapter context, Object data) throws TemplateInitException { /* * simple habit... we prollie don't have an AST beneath us */ super.init(context, data); /* * the stringlit is set at template parse time, so we can do this here * for now. if things change and we can somehow create stringlits at * runtime, this must move to the runtime execution path * * so, only if interpolation is turned on AND it starts with a " AND it * has a directive or reference, then we can interpolate. Otherwise, * don't bother. */ interpolate = rsvc.getBoolean( RuntimeConstants.INTERPOLATE_STRINGLITERALS, true) && getFirstToken().image.startsWith("\"") && ((getFirstToken().image.indexOf('$') != -1) || (getFirstToken().image .indexOf('#') != -1)); /* * get the contents of the string, minus the '/" at each end */ String img = getFirstToken().image; image = img.substring(1, img.length() - 1); if (img.startsWith("\"")) { image = unescape(image); } if (img.charAt(0) == '"' || img.charAt(0) == '\'' ) { // replace double-double quotes like "" with a single double quote " // replace double single quotes '' with a single quote ' image = replaceQuotes(image, img.charAt(0)); } /** * note. A kludge on a kludge. The first part, Geir calls this the * dreaded <MORE> kludge. Basically, the use of the <MORE> token eats * the last character of an interpolated string. EXCEPT when a line * comment (##) is in the string this isn't an issue. * * So, to solve this we look for a line comment. If it isn't found we * add a space here and remove it later. */ /** * Note - this should really use a regexp to look for [^\]## but * apparently escaping of line comments isn't working right now anyway. */ containsLineComment = (image.indexOf("##") != -1); /* * if appropriate, tack a space on the end (dreaded <MORE> kludge) */ if (!containsLineComment) { interpolateimage = image + " "; } else { interpolateimage = image; } if (interpolate) { /* * now parse and init the nodeTree */ StringReader br = new StringReader(interpolateimage); /* * it's possible to not have an initialization context - or we don't * want to trust the caller - so have a fallback value if so * * Also, do *not* dump the VM namespace for this template */ String templateName = (context != null) ? context.getCurrentTemplateName() : "StringLiteral"; try { nodeTree = rsvc.parse(br, templateName, false); } catch (ParseException e) { String msg = "Failed to parse String literal at "+ Log.formatFileString(templateName, getLine(), getColumn()); throw new TemplateInitException(msg, e, templateName, getColumn(), getLine()); } adjTokenLineNums(nodeTree); /* * init with context. It won't modify anything */ nodeTree.init(context, rsvc); } return data; } /** * Adjust all the line and column numbers that comprise a node so that they * are corrected for the string literals position within the template file. * This is neccessary if an exception is thrown while processing the node so * that the line and column position reported reflects the error position * within the template and not just relative to the error position within * the string literal. */ public void adjTokenLineNums(Node node) { Token tok = node.getFirstToken(); // Test against null is probably not neccessary, but just being safe while(tok != null && tok != node.getLastToken()) { // If tok is on the first line, then the actual column is // offset by the template column. if (tok.beginLine == 1) tok.beginColumn += getColumn(); if (tok.endLine == 1) tok.endColumn += getColumn(); tok.beginLine += getLine()- 1; tok.endLine += getLine() - 1; tok = tok.next; } } /** * Replaces double double-quotes with a single double quote ("" to "). * Replaces double single quotes with a single quote ('' to '). * * @param s StringLiteral without the surrounding quotes * @param literalQuoteChar char that starts the StringLiteral (" or ') */ private String replaceQuotes(String s, char literalQuoteChar) { if( (literalQuoteChar == '"' && s.indexOf("\"") == -1) || (literalQuoteChar == '\'' && s.indexOf("'") == -1) ) { return s; } StrBuilder result = new StrBuilder(s.length()); char prev = ' '; for(int i = 0, is = s.length(); i < is; i++) { char c = s.charAt(i); result.append(c); if( i + 1 < is ) { char next = s.charAt(i + 1); // '""' -> "", "''" -> '' // thus it is not necessary to double quotes if the "surrounding" quotes // of the StringLiteral are different. See VELOCITY-785 if( (literalQuoteChar == '"' && (next == '"' && c == '"')) || (literalQuoteChar == '\'' && (next == '\'' && c == '\'')) ) { i++; } } } return result.toString(); } /** * @since 1.6 */ public static String unescape(final String string) { int u = string.indexOf("\\u"); if (u < 0) return string; StrBuilder result = new StrBuilder(); int lastCopied = 0; for (;;) { result.append(string.substring(lastCopied, u)); /* we don't worry about an exception here, * because the lexer checked that string is correct */ char c = (char) Integer.parseInt(string.substring(u + 2, u + 6), 16); result.append(c); lastCopied = u + 6; u = string.indexOf("\\u", lastCopied); if (u < 0) { result.append(string.substring(lastCopied)); return result.toString(); } } } /** * @see org.apache.velocity.runtime.parser.node.SimpleNode#jjtAccept(org.apache.velocity.runtime.parser.node.ParserVisitor, * java.lang.Object) */ public Object jjtAccept(ParserVisitor visitor, Object data) { return visitor.visit(this, data); } /** * Check to see if this is an interpolated string. * @return true if this is constant (not an interpolated string) * @since 1.6 */ public boolean isConstant() { return !interpolate; } /** * renders the value of the string literal If the properties allow, and the * string literal contains a $ or a # the literal is rendered against the * context Otherwise, the stringlit is returned. * * @param context * @return result of the rendering. */ public Object value(InternalContextAdapter context) { if (interpolate) { try { /* * now render against the real context */ StringWriter writer = new StringWriter(); nodeTree.render(context, writer); /* * and return the result as a String */ String ret = writer.toString(); /* * if appropriate, remove the space from the end (dreaded <MORE> * kludge part deux) */ if (!containsLineComment && ret.length() > 0) { return ret.substring(0, ret.length() - 1); } else { return ret; } } /** * pass through application level runtime exceptions */ catch (RuntimeException e) { throw e; } catch (IOException e) { String msg = "Error in interpolating string literal"; log.error(msg, e); throw new VelocityException(msg, e); } } /* * ok, either not allowed to interpolate, there wasn't a ref or * directive, or we failed, so just output the literal */ return image; } }
package coding.lien.charles.locationreportsender; import android.app.Activity; import android.content.Intent; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.app.ActivityCompat; import android.support.v4.content.ContextCompat; import android.util.Log; import android.view.View; import android.widget.AdapterView; import android.widget.AutoCompleteTextView; import android.widget.Button; import com.google.android.gms.appindexing.Action; import com.google.android.gms.appindexing.AppIndex; import com.google.android.gms.appindexing.Thing; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.location.LocationServices; import java.io.Serializable; import coding.lien.charles.locationreportsender.listener.StartTrackListener; import coding.lien.charles.locationreportsender.listener.StopTrackListener; import coding.lien.charles.locationreportsender.util.BookMarkAdapter; import coding.lien.charles.locationreportsender.util.BookMarkHolder; import coding.lien.charles.locationreportsender.util.CompassManager; import coding.lien.charles.locationreportsender.util.EnvironmentCheck; import coding.lien.charles.locationreportsender.util.IoEBookMarkHelper; import coding.lien.charles.locationreportsender.util.JSONSender; import coding.lien.charles.locationreportsender.util.LocationManager; import coding.lien.charles.locationreportsender.util.MessageWrapper; import io.realm.Realm; import io.realm.RealmConfiguration; /** * Author: lienching * Description: This class is MainActivity */ public class MainActivity extends Activity implements GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener, Serializable { private final String TAG = "MainActivity"; private GoogleApiClient mGoogleApiClient; private AutoCompleteTextView serveraddress_ET; private AutoCompleteTextView groupid_ET; private AutoCompleteTextView memberid_ET; private AutoCompleteTextView status_ET; private AutoCompleteTextView intreval_ET; private Button starttrack_btn; private Button stoptrack_btn; private Button savebm_btn;// Save BookMark private Button iebm_btn; // Import / Export Bookmark Button private LocationManager locationManager; private EnvironmentCheck checker; private StartTrackListener startTrackListener; private StopTrackListener stopTrackListener; private CompassManager compassManager; private Realm realm; private GoogleApiClient client; private IoEBookMarkHelper helper; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); buildGoogleApiClient(); RealmConfiguration configuration = new RealmConfiguration.Builder(this).build(); Realm.setDefaultConfiguration(configuration); realm = Realm.getDefaultInstance(); LocationManager.initLocationManager(this, mGoogleApiClient); locationManager = LocationManager.getInstance(); EnvironmentCheck.initEnvironmentCheck(this, mGoogleApiClient, locationManager); checker = EnvironmentCheck.getInstance(); serveraddress_ET = (AutoCompleteTextView) findViewById(R.id.serveraddress_editText); groupid_ET = (AutoCompleteTextView) findViewById(R.id.partyid_editText); memberid_ET = (AutoCompleteTextView) findViewById(R.id.memberid_editText); status_ET = (AutoCompleteTextView) findViewById(R.id.devicestatus_editText); intreval_ET = (AutoCompleteTextView) findViewById(R.id.intervaltime_editText); starttrack_btn = (Button) findViewById(R.id.start_btn); stoptrack_btn = (Button) findViewById(R.id.stop_btn); savebm_btn = (Button) findViewById(R.id.savebk_btn); iebm_btn = (Button) findViewById(R.id.iebk_btn); startTrackListener = new StartTrackListener(this, realm, checker, serveraddress_ET, groupid_ET, memberid_ET, status_ET, intreval_ET, stoptrack_btn); starttrack_btn.setOnClickListener(startTrackListener); starttrack_btn.setClickable(true); stopTrackListener = new StopTrackListener(this, serveraddress_ET, groupid_ET, memberid_ET, status_ET, intreval_ET, starttrack_btn); stoptrack_btn.setOnClickListener(stopTrackListener); stoptrack_btn.setClickable(false); savebm_btn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { String server, party, member; server = serveraddress_ET.getText().toString(); party = groupid_ET.getText().toString(); member = memberid_ET.getText().toString(); BookMarkHolder holder = new BookMarkHolder(server, party, member); realm.beginTransaction(); realm.insert(holder); realm.commitTransaction(); } }); helper = new IoEBookMarkHelper(realm, this); iebm_btn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { getIOPermission(); helper.showDialog(); } }); JSONSender.CreateSender(this); CompassManager.initManager(this); compassManager = CompassManager.getInstance(); client = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build(); } // onCreate( Bundle ) protected void onStart() { mGoogleApiClient.connect(); super.onStart(); client.connect(); getLocationPermission(); // IP BookMarkAdapter serverAdapter = new BookMarkAdapter(this,realm.where(BookMarkHolder.class).findAll(),"IP"); serveraddress_ET.setAdapter(serverAdapter); serveraddress_ET.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { serveraddress_ET.setText(realm.where(BookMarkHolder.class).findAll().get(i).serverip); } }); // GROUP BookMarkAdapter groupAdapter = new BookMarkAdapter(this,realm.where(BookMarkHolder.class).findAll(),"GROUP"); groupid_ET.setAdapter(groupAdapter); groupid_ET.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { groupid_ET.setText(realm.where(BookMarkHolder.class).findAll().get(i).partyid); } }); // USER BookMarkAdapter userAdapter = new BookMarkAdapter(this,realm.where(BookMarkHolder.class).findAll(),"USER"); memberid_ET.setAdapter(userAdapter); memberid_ET.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { memberid_ET.setText(realm.where(BookMarkHolder.class).findAll().get(i).memberid); } }); } // onStart() protected void onStop() { super.onStop(); client.disconnect(); } // onStop() protected void onDestroy() { if (mGoogleApiClient.isConnected()) { mGoogleApiClient.disconnect(); } super.onDestroy(); realm.close(); } @Override protected void onResume() { super.onResume(); compassManager.SensorResume(); } @Override protected void onPause() { super.onPause(); compassManager.SensorPause(); } protected void getLocationPermission() { int permissionCheck = ContextCompat.checkSelfPermission(this, "android.permission.ACCESS_FINE_LOCATION"); if (permissionCheck != PackageManager.PERMISSION_GRANTED) { if (ActivityCompat.shouldShowRequestPermissionRationale(this, "android.permission.ACCESS_FINE_LOCATION")) { MessageWrapper.SendMessage(this, "Location Permission Granted!"); } // if else { ActivityCompat.requestPermissions(this, new String[]{"android.permission.ACCESS_FINE_LOCATION"}, 1); } // else } // if } // getLocationPermission() protected void getIOPermission() { int readpermissionCheck = ContextCompat.checkSelfPermission(this, "android.permission.READ_EXTERNAL_STORAGE"); int writepermissionCheck = ContextCompat.checkSelfPermission(this, "android.permission.WRITE_EXTERNAL_STORAGE"); if (readpermissionCheck != PackageManager.PERMISSION_GRANTED) { if (ActivityCompat.shouldShowRequestPermissionRationale(this, "android.permission.READ_EXTERNAL_STORAGE")) { MessageWrapper.SendMessage(this, "Location Permission Granted!"); } // if else { ActivityCompat.requestPermissions(this, new String[]{"android.permission.READ_EXTERNAL_STORAGE"}, 1); } // else } // if if (writepermissionCheck != PackageManager.PERMISSION_GRANTED) { if (ActivityCompat.shouldShowRequestPermissionRationale(this, "android.permission.WRITE_EXTERNAL_STORAGE")) { MessageWrapper.SendMessage(this, "Location Permission Granted!"); } // if else { ActivityCompat.requestPermissions(this, new String[]{"android.permission.WRITE_EXTERNAL_STORAGE"}, 1); } // else } // if } // getIOPermission() // Google official example protected synchronized void buildGoogleApiClient() { mGoogleApiClient = new GoogleApiClient.Builder(this) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .addApi(LocationServices.API) .build(); } // buildGoogleApiClient @Override public void onConnected(@Nullable Bundle bundle) { MessageWrapper.SendMessage(this, "ApiClient connect!"); } // onConnected( Bundle ) @Override public void onConnectionSuspended(int i) { mGoogleApiClient.connect(); } // onConnectionSuspended( int ) @Override public void onConnectionFailed(@NonNull ConnectionResult connectionResult) { Log.i(TAG, "Connection failed: ConnectionResult.getErrorCode() = " + connectionResult.getErrorCode()); } // onConnectionFailed( ConnectionResult ) @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if ( requestCode == 1 ) { if ( resultCode == RESULT_OK ) { IoEBookMarkHelper.FILE_NAME = data.getDataString(); } } } public Action getIndexApiAction() { Thing object = new Thing.Builder() .setName("Main Page") // TODO: Define a title for the content shown. // TODO: Make sure this auto-generated URL is correct. .setUrl(Uri.parse("http://[ENTER-YOUR-URL-HERE]")) .build(); return new Action.Builder(Action.TYPE_VIEW) .setObject(object) .setActionStatus(Action.STATUS_TYPE_COMPLETED) .build(); } } // class MainActivity
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.synapse.util.xpath; import org.apache.axiom.om.OMAttribute; import org.apache.axiom.om.OMDocument; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMNamespace; import org.apache.axiom.om.OMText; import org.apache.axiom.om.xpath.AXIOMXPath; import org.apache.axiom.soap.SOAPEnvelope; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.synapse.MessageContext; import org.apache.synapse.SynapseException; import org.jaxen.*; import org.jaxen.util.SingletonList; import java.util.*; /** * <p>XPath that has been used inside Synapse xpath processing. This has a extension function named * <code>get-property</code> which is use to retrieve message context properties with the given * name from the function</p> * * <p>For example the following function <code>get-property('prop')</code> can be evaluatedd using * an XPath to retrieve the message context property value with the name <code>prop</code>.</p> * * <p>Apart from that this XPath has a certain set of XPath variables associated with it. They are * as follows; * <dl> * <dt><tt>body</tt></dt> * <dd>The SOAP 1.1 or 1.2 body element.</dd> * <dt><tt>header</tt></dt> * <dd>The SOAP 1.1 or 1.2 header element.</dd> * </dl> * </p> * * <p>Also there are some XPath prefixes defined in <code>SynapseXPath</code> to access various * properties using XPath variables, where the variable name represents the particular prefix and * the property name as the local part of the variable. Those variables are; * <dl> * <dt><tt>ctx</tt></dt> * <dd>Prefix for Synapse MessageContext properties</dd> * <dt><tt>axis2</tt></dt> * <dd>Prefix for Axis2 MessageContext properties</dd> * <dt><tt>trp</tt></dt> * <dd>Prefix for the transport headers</dd> * </dl> * </p> * * <p>This XPath is Thread Safe, and provides a special set of evaluate functions for the * <code>MessageContext</code> and <code>SOAPEnvelope</code> as well as a method to retrieve * string values of the evaluated XPaths</p> * * @see org.apache.axiom.om.xpath.AXIOMXPath * @see #getContext(Object) * @see org.apache.synapse.util.xpath.SynapseXPathFunctionContext * @see org.apache.synapse.util.xpath.SynapseXPathVariableContext */ public class SynapseXPath extends AXIOMXPath { private static final long serialVersionUID = 7639226137534334222L; private static final Log log = LogFactory.getLog(SynapseXPath.class); private boolean contentAware; /** * <p>Initializes the <code>SynapseXPath</code> with the given <code>xpathString</code> as the * XPath</p> * * @param xpathString xpath in its string format * @throws JaxenException in case of an initialization failure */ public SynapseXPath(String xpathString) throws JaxenException { super(xpathString); // TODO: Improve this if (xpathString.contains("/")) { contentAware = true; } else if (xpathString.contains("get-property('From'") || xpathString.contains("get-property('FAULT')")) { contentAware = true; } else { contentAware = false; } if (xpathString.contains("$trp") || xpathString.contains("$ctx") || xpathString.contains("$axis2")) { contentAware = false; } } /** * Construct an XPath expression from a given string and initialize its * namespace context based on a given element. * * @param element The element that determines the namespace context of the * XPath expression. See {@link #addNamespaces(OMElement)} * for more details. * @param xpathExpr the string representation of the XPath expression. * @throws JaxenException if there is a syntax error while parsing the expression * or if the namespace context could not be set up */ public SynapseXPath(OMElement element, String xpathExpr) throws JaxenException { super(element, xpathExpr); } /** * Construct an XPath expression from a given attribute. * The string representation of the expression is taken from the attribute * value, while the attribute's owner element is used to determine the * namespace context of the expression. * * @param attribute the attribute to construct the expression from * @throws JaxenException if there is a syntax error while parsing the expression * or if the namespace context could not be set up */ public SynapseXPath(OMAttribute attribute) throws JaxenException { super(attribute); } public static SynapseXPath parseXPathString(String xPathStr) throws JaxenException { if (xPathStr.indexOf('{') == -1) { return new SynapseXPath(xPathStr); } int count = 0; StringBuffer newXPath = new StringBuffer(); Map<String, String> nameSpaces = new HashMap<String, String>(); String curSegment = null; boolean xPath = false; StringTokenizer st = new StringTokenizer(xPathStr, "{}", true); while (st.hasMoreTokens()) { String s = st.nextToken(); if ("{".equals(s)) { xPath = true; } else if ("}".equals(s)) { xPath = false; String prefix = "rp" + count++; nameSpaces.put(prefix, curSegment); newXPath.append(prefix).append(":"); } else { if (xPath) { curSegment = s; } else { newXPath.append(s); } } } SynapseXPath synXPath = new SynapseXPath(newXPath.toString()); for (Map.Entry<String,String> entry : nameSpaces.entrySet()) { synXPath.addNamespace(entry.getKey(), entry.getValue()); } return synXPath; } /** * <P>Evaluates the XPath expression against the MessageContext of the current message and * returns a String representation of the result</p> * * @param synCtx the source message which holds the MessageContext against full context * @return a String representation of the result of evaluation */ public String stringValueOf(MessageContext synCtx) { try { Object result = evaluate(synCtx); if (result == null) { return null; } StringBuffer textValue = new StringBuffer(); if (result instanceof List) { List list = (List) result; for (Object o : list) { if (o == null && list.size() == 1) { return null; } if (o instanceof OMText) { textValue.append(((OMText) o).getText()); } else if (o instanceof OMElement) { String s = ((OMElement) o).getText(); if (s.trim().length() == 0) { s = o.toString(); } textValue.append(s); } else if (o instanceof OMDocument) { textValue.append( ((OMDocument) o).getOMDocumentElement().toString()); } else if (o instanceof OMAttribute) { textValue.append( ((OMAttribute) o).getAttributeValue()); } } } else { textValue.append(result.toString()); } return textValue.toString(); } catch (JaxenException je) { handleException("Evaluation of the XPath expression " + this.toString() + " resulted in an error", je); } return null; } /** * Specialized form of xpath evaluation function.An xpath evaluate() will be performed using two contexts * (ie:-soap-envelope and on Synapse Message Context). This is useful for evaluating xpath on a * nodeset for function contexts (we need both nodeset and synapse ctxts for evaluating function * scope expressions) * @param primaryContext a context object ie:- a soap envelope * @param secondaryContext a context object ie:-synapse message ctxt * @return result */ public Object evaluate(Object primaryContext, MessageContext secondaryContext) { Object result = null; //if result is still not found use second ctxt ie:-syn-ctxt with a wrapper to evaluate if (secondaryContext != null) { try { //wrapper Context is used to evaluate 'dynamic' function scope objects result = evaluate(new ContextWrapper((SOAPEnvelope) primaryContext,secondaryContext)); } catch (Exception e) { handleException("Evaluation of the XPath expression " + this.toString() + " resulted in an error", e); } } else { try { result = evaluate(primaryContext); } catch (JaxenException e) { handleException("Evaluation of the XPath expression " + this.toString() + " resulted in an error", e); } } return result; } public void addNamespace(OMNamespace ns) throws JaxenException { addNamespace(ns.getPrefix(), ns.getNamespaceURI()); } /** * Create a {@link Context} wrapper for the provided object. * This methods implements the following class specific behavior: * <dl> * <dt>{@link MessageContext}</dt> * <dd>The XPath expression is evaluated against the SOAP envelope * and the functions and variables defined by * {@link SynapseXPathFunctionContext} and * {@link SynapseXPathVariableContext} are * available.</dd> * <dt>{@link SOAPEnvelope}</dt> * <dd>The variables defined by {@link SynapseXPathVariableContext} * are available.</dd> * </dl> * For all other object types, the behavior is identical to * {@link BaseXPath#getContext(Object)}. * <p> * Note that the behavior described here also applies to all evaluation * methods such as {@link #evaluate(Object)} or {@link #selectSingleNode(Object)}, * given that these methods all use {@link #getContext(Object)}. * * @see SynapseXPathFunctionContext#getFunction(String, String, String) * @see SynapseXPathVariableContext#getVariableValue(String, String, String) */ @Override protected Context getContext(Object obj) { if (obj instanceof MessageContext) { MessageContext synCtx = (MessageContext)obj; ContextSupport baseContextSupport = getContextSupport(); ContextSupport contextSupport = new ContextSupport(baseContextSupport.getNamespaceContext(), new SynapseXPathFunctionContext(baseContextSupport.getFunctionContext(), synCtx), new SynapseXPathVariableContext(baseContextSupport.getVariableContext(), synCtx), baseContextSupport.getNavigator()); Context context = new Context(contextSupport); context.setNodeSet(new SingletonList(synCtx.getEnvelope())); return context; } else if (obj instanceof SOAPEnvelope) { SOAPEnvelope env = (SOAPEnvelope)obj; ContextSupport baseContextSupport = getContextSupport(); ContextSupport contextSupport = new ContextSupport(baseContextSupport.getNamespaceContext(), baseContextSupport.getFunctionContext(), new SynapseXPathVariableContext(baseContextSupport.getVariableContext(), env), baseContextSupport.getNavigator()); Context context = new Context(contextSupport); context.setNodeSet(new SingletonList(env)); return context; } else if (obj instanceof ContextWrapper) { ContextWrapper wrapper = (ContextWrapper) obj; ContextSupport baseContextSupport = getContextSupport(); ContextSupport contextSupport = new ContextSupport(baseContextSupport.getNamespaceContext(), baseContextSupport.getFunctionContext(), new SynapseXPathVariableContext(baseContextSupport.getVariableContext(), wrapper.getMessageCtxt(), wrapper.getEnvelope()), baseContextSupport.getNavigator()); Context context = new Context(contextSupport); context.setNodeSet(new SingletonList(wrapper.getEnvelope())); return context; } else { return super.getContext(obj); } } public boolean isContentAware() { return contentAware; } private void handleException(String msg, Throwable e) { log.error(msg, e); throw new SynapseException(msg, e); } /** * This is a wrapper class used to inject both envelope and message contexts for xpath * We use this to resolve function scope xpath variables */ private static class ContextWrapper{ private MessageContext ctxt; private SOAPEnvelope env; public ContextWrapper(SOAPEnvelope env, MessageContext ctxt){ this.env = env; this.ctxt = ctxt; } public SOAPEnvelope getEnvelope() { return env; } public MessageContext getMessageCtxt() { return ctxt; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.quota.implementation; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.quota.fluent.QuotaRequestStatusClient; import com.azure.resourcemanager.quota.fluent.models.QuotaRequestDetailsInner; import com.azure.resourcemanager.quota.models.QuotaRequestDetailsList; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in QuotaRequestStatusClient. */ public final class QuotaRequestStatusClientImpl implements QuotaRequestStatusClient { private final ClientLogger logger = new ClientLogger(QuotaRequestStatusClientImpl.class); /** The proxy service used to perform REST calls. */ private final QuotaRequestStatusService service; /** The service client containing this operation class. */ private final AzureQuotaExtensionApiImpl client; /** * Initializes an instance of QuotaRequestStatusClientImpl. * * @param client the instance of the service client containing this operation class. */ QuotaRequestStatusClientImpl(AzureQuotaExtensionApiImpl client) { this.service = RestProxy.create(QuotaRequestStatusService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for AzureQuotaExtensionApiQuotaRequestStatus to be used by the proxy * service to perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "AzureQuotaExtensionA") private interface QuotaRequestStatusService { @Headers({"Content-Type: application/json"}) @Get("/{scope}/providers/Microsoft.Quota/quotaRequests/{id}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<QuotaRequestDetailsInner>> get( @HostParam("$host") String endpoint, @PathParam("id") String id, @QueryParam("api-version") String apiVersion, @PathParam(value = "scope", encoded = true) String scope, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("/{scope}/providers/Microsoft.Quota/quotaRequests") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<QuotaRequestDetailsList>> list( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam(value = "scope", encoded = true) String scope, @QueryParam("$filter") String filter, @QueryParam("$top") Integer top, @QueryParam("$skiptoken") String skiptoken, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<QuotaRequestDetailsList>> listNext( @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint, @HeaderParam("Accept") String accept, Context context); } /** * Get the quota request details and status by quota request ID for the resources of the resource provider at a * specific location. The quota request ID **id** is returned in the response of the PUT operation. * * @param id Quota request ID. * @param scope The target Azure resource URI. For example, * `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/qms-test/providers/Microsoft.Batch/batchAccounts/testAccount/`. * This is the target Azure resource URI for the List GET operation. If a `{resourceName}` is added after * `/quotas`, then it's the target Azure resource URI in the GET operation for the specific resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the quota request details and status by quota request ID for the resources of the resource provider at a * specific location. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<QuotaRequestDetailsInner>> getWithResponseAsync(String id, String scope) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (id == null) { return Mono.error(new IllegalArgumentException("Parameter id is required and cannot be null.")); } if (scope == null) { return Mono.error(new IllegalArgumentException("Parameter scope is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service.get(this.client.getEndpoint(), id, this.client.getApiVersion(), scope, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get the quota request details and status by quota request ID for the resources of the resource provider at a * specific location. The quota request ID **id** is returned in the response of the PUT operation. * * @param id Quota request ID. * @param scope The target Azure resource URI. For example, * `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/qms-test/providers/Microsoft.Batch/batchAccounts/testAccount/`. * This is the target Azure resource URI for the List GET operation. If a `{resourceName}` is added after * `/quotas`, then it's the target Azure resource URI in the GET operation for the specific resource. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the quota request details and status by quota request ID for the resources of the resource provider at a * specific location. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<QuotaRequestDetailsInner>> getWithResponseAsync(String id, String scope, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (id == null) { return Mono.error(new IllegalArgumentException("Parameter id is required and cannot be null.")); } if (scope == null) { return Mono.error(new IllegalArgumentException("Parameter scope is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service.get(this.client.getEndpoint(), id, this.client.getApiVersion(), scope, accept, context); } /** * Get the quota request details and status by quota request ID for the resources of the resource provider at a * specific location. The quota request ID **id** is returned in the response of the PUT operation. * * @param id Quota request ID. * @param scope The target Azure resource URI. For example, * `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/qms-test/providers/Microsoft.Batch/batchAccounts/testAccount/`. * This is the target Azure resource URI for the List GET operation. If a `{resourceName}` is added after * `/quotas`, then it's the target Azure resource URI in the GET operation for the specific resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the quota request details and status by quota request ID for the resources of the resource provider at a * specific location. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<QuotaRequestDetailsInner> getAsync(String id, String scope) { return getWithResponseAsync(id, scope) .flatMap( (Response<QuotaRequestDetailsInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Get the quota request details and status by quota request ID for the resources of the resource provider at a * specific location. The quota request ID **id** is returned in the response of the PUT operation. * * @param id Quota request ID. * @param scope The target Azure resource URI. For example, * `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/qms-test/providers/Microsoft.Batch/batchAccounts/testAccount/`. * This is the target Azure resource URI for the List GET operation. If a `{resourceName}` is added after * `/quotas`, then it's the target Azure resource URI in the GET operation for the specific resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the quota request details and status by quota request ID for the resources of the resource provider at a * specific location. */ @ServiceMethod(returns = ReturnType.SINGLE) public QuotaRequestDetailsInner get(String id, String scope) { return getAsync(id, scope).block(); } /** * Get the quota request details and status by quota request ID for the resources of the resource provider at a * specific location. The quota request ID **id** is returned in the response of the PUT operation. * * @param id Quota request ID. * @param scope The target Azure resource URI. For example, * `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/qms-test/providers/Microsoft.Batch/batchAccounts/testAccount/`. * This is the target Azure resource URI for the List GET operation. If a `{resourceName}` is added after * `/quotas`, then it's the target Azure resource URI in the GET operation for the specific resource. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the quota request details and status by quota request ID for the resources of the resource provider at a * specific location. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<QuotaRequestDetailsInner> getWithResponse(String id, String scope, Context context) { return getWithResponseAsync(id, scope, context).block(); } /** * For the specified scope, get the current quota requests for a one year period ending at the time is made. Use the * **oData** filter to select quota requests. * * @param scope The target Azure resource URI. For example, * `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/qms-test/providers/Microsoft.Batch/batchAccounts/testAccount/`. * This is the target Azure resource URI for the List GET operation. If a `{resourceName}` is added after * `/quotas`, then it's the target Azure resource URI in the GET operation for the specific resource. * @param filter | Field | Supported operators |---------------------|------------------------ * <p>|requestSubmitTime | ge, le, eq, gt, lt |provisioningState eq {QuotaRequestState} |resourceName eq * {resourceName}. * @param top Number of records to return. * @param skiptoken The **Skiptoken** parameter is used only if a previous operation returned a partial result. If a * previous response contains a **nextLink** element, its value includes a **skiptoken** parameter that * specifies a starting point to use for subsequent calls. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return quota request information. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<QuotaRequestDetailsInner>> listSinglePageAsync( String scope, String filter, Integer top, String skiptoken) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (scope == null) { return Mono.error(new IllegalArgumentException("Parameter scope is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .list( this.client.getEndpoint(), this.client.getApiVersion(), scope, filter, top, skiptoken, accept, context)) .<PagedResponse<QuotaRequestDetailsInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * For the specified scope, get the current quota requests for a one year period ending at the time is made. Use the * **oData** filter to select quota requests. * * @param scope The target Azure resource URI. For example, * `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/qms-test/providers/Microsoft.Batch/batchAccounts/testAccount/`. * This is the target Azure resource URI for the List GET operation. If a `{resourceName}` is added after * `/quotas`, then it's the target Azure resource URI in the GET operation for the specific resource. * @param filter | Field | Supported operators |---------------------|------------------------ * <p>|requestSubmitTime | ge, le, eq, gt, lt |provisioningState eq {QuotaRequestState} |resourceName eq * {resourceName}. * @param top Number of records to return. * @param skiptoken The **Skiptoken** parameter is used only if a previous operation returned a partial result. If a * previous response contains a **nextLink** element, its value includes a **skiptoken** parameter that * specifies a starting point to use for subsequent calls. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return quota request information. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<QuotaRequestDetailsInner>> listSinglePageAsync( String scope, String filter, Integer top, String skiptoken, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (scope == null) { return Mono.error(new IllegalArgumentException("Parameter scope is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .list( this.client.getEndpoint(), this.client.getApiVersion(), scope, filter, top, skiptoken, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } /** * For the specified scope, get the current quota requests for a one year period ending at the time is made. Use the * **oData** filter to select quota requests. * * @param scope The target Azure resource URI. For example, * `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/qms-test/providers/Microsoft.Batch/batchAccounts/testAccount/`. * This is the target Azure resource URI for the List GET operation. If a `{resourceName}` is added after * `/quotas`, then it's the target Azure resource URI in the GET operation for the specific resource. * @param filter | Field | Supported operators |---------------------|------------------------ * <p>|requestSubmitTime | ge, le, eq, gt, lt |provisioningState eq {QuotaRequestState} |resourceName eq * {resourceName}. * @param top Number of records to return. * @param skiptoken The **Skiptoken** parameter is used only if a previous operation returned a partial result. If a * previous response contains a **nextLink** element, its value includes a **skiptoken** parameter that * specifies a starting point to use for subsequent calls. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return quota request information. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<QuotaRequestDetailsInner> listAsync(String scope, String filter, Integer top, String skiptoken) { return new PagedFlux<>( () -> listSinglePageAsync(scope, filter, top, skiptoken), nextLink -> listNextSinglePageAsync(nextLink)); } /** * For the specified scope, get the current quota requests for a one year period ending at the time is made. Use the * **oData** filter to select quota requests. * * @param scope The target Azure resource URI. For example, * `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/qms-test/providers/Microsoft.Batch/batchAccounts/testAccount/`. * This is the target Azure resource URI for the List GET operation. If a `{resourceName}` is added after * `/quotas`, then it's the target Azure resource URI in the GET operation for the specific resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return quota request information. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<QuotaRequestDetailsInner> listAsync(String scope) { final String filter = null; final Integer top = null; final String skiptoken = null; return new PagedFlux<>( () -> listSinglePageAsync(scope, filter, top, skiptoken), nextLink -> listNextSinglePageAsync(nextLink)); } /** * For the specified scope, get the current quota requests for a one year period ending at the time is made. Use the * **oData** filter to select quota requests. * * @param scope The target Azure resource URI. For example, * `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/qms-test/providers/Microsoft.Batch/batchAccounts/testAccount/`. * This is the target Azure resource URI for the List GET operation. If a `{resourceName}` is added after * `/quotas`, then it's the target Azure resource URI in the GET operation for the specific resource. * @param filter | Field | Supported operators |---------------------|------------------------ * <p>|requestSubmitTime | ge, le, eq, gt, lt |provisioningState eq {QuotaRequestState} |resourceName eq * {resourceName}. * @param top Number of records to return. * @param skiptoken The **Skiptoken** parameter is used only if a previous operation returned a partial result. If a * previous response contains a **nextLink** element, its value includes a **skiptoken** parameter that * specifies a starting point to use for subsequent calls. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return quota request information. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<QuotaRequestDetailsInner> listAsync( String scope, String filter, Integer top, String skiptoken, Context context) { return new PagedFlux<>( () -> listSinglePageAsync(scope, filter, top, skiptoken, context), nextLink -> listNextSinglePageAsync(nextLink, context)); } /** * For the specified scope, get the current quota requests for a one year period ending at the time is made. Use the * **oData** filter to select quota requests. * * @param scope The target Azure resource URI. For example, * `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/qms-test/providers/Microsoft.Batch/batchAccounts/testAccount/`. * This is the target Azure resource URI for the List GET operation. If a `{resourceName}` is added after * `/quotas`, then it's the target Azure resource URI in the GET operation for the specific resource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return quota request information. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<QuotaRequestDetailsInner> list(String scope) { final String filter = null; final Integer top = null; final String skiptoken = null; return new PagedIterable<>(listAsync(scope, filter, top, skiptoken)); } /** * For the specified scope, get the current quota requests for a one year period ending at the time is made. Use the * **oData** filter to select quota requests. * * @param scope The target Azure resource URI. For example, * `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/qms-test/providers/Microsoft.Batch/batchAccounts/testAccount/`. * This is the target Azure resource URI for the List GET operation. If a `{resourceName}` is added after * `/quotas`, then it's the target Azure resource URI in the GET operation for the specific resource. * @param filter | Field | Supported operators |---------------------|------------------------ * <p>|requestSubmitTime | ge, le, eq, gt, lt |provisioningState eq {QuotaRequestState} |resourceName eq * {resourceName}. * @param top Number of records to return. * @param skiptoken The **Skiptoken** parameter is used only if a previous operation returned a partial result. If a * previous response contains a **nextLink** element, its value includes a **skiptoken** parameter that * specifies a starting point to use for subsequent calls. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return quota request information. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<QuotaRequestDetailsInner> list( String scope, String filter, Integer top, String skiptoken, Context context) { return new PagedIterable<>(listAsync(scope, filter, top, skiptoken, context)); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return quota request information. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<QuotaRequestDetailsInner>> listNextSinglePageAsync(String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context)) .<PagedResponse<QuotaRequestDetailsInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return quota request information. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<QuotaRequestDetailsInner>> listNextSinglePageAsync(String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } }
package cat.nyaa.nyaacore.utils; // A stripped version from guava 6f22af40 as ClassPath contained in guava 21 suffers from https://github.com/google/guava/issues/2152 /* * Copyright (C) 2012 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ import cat.nyaa.nyaacore.NyaaCoreLoader; import com.google.common.annotations.Beta; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Splitter; import com.google.common.collect.*; import com.google.common.reflect.Reflection; import org.bukkit.plugin.Plugin; import java.io.File; import java.io.IOException; import java.lang.annotation.Annotation; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.net.URLClassLoader; import java.util.*; import java.util.jar.Attributes; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.StandardSystemProperty.JAVA_CLASS_PATH; import static com.google.common.base.StandardSystemProperty.PATH_SEPARATOR; import static java.util.logging.Level.WARNING; public final class ClassPathUtils { /** * Separator for the Class-Path manifest attribute value in jar files. */ private static final Splitter CLASS_PATH_ATTRIBUTE_SEPARATOR = Splitter.on(" ").omitEmptyStrings(); private static final String CLASS_FILE_NAME_EXTENSION = ".class"; private final ImmutableSet<ResourceInfo> resources; private ClassPathUtils(ImmutableSet<ResourceInfo> resources) { this.resources = resources; } @SuppressWarnings("unchecked") public static <T> Class<? extends T>[] scanSubclasses(File file, ClassLoader classLoader, String pack, Class<T> clazz) { try { Set<ClassPathUtils.ClassInfo> classInfos = from(file, classLoader).getAllClasses(); return (Class<? extends T>[]) loadClassesInPackage(pack, classInfos) .filter(c -> c != null && clazz.isAssignableFrom(c)) .toArray(Class<?>[]::new); } catch (IOException e) { throw new RuntimeException(e); } } public static Class<?>[] scanClassesWithAnnotations(File file, ClassLoader classLoader, String pack, Class<? extends Annotation> annotation) { try { Set<ClassPathUtils.ClassInfo> classInfos = from(file, classLoader).getAllClasses(); return loadClassesInPackage(pack, classInfos) .filter(c -> c != null && c.getAnnotation(annotation) != null) .toArray(Class<?>[]::new); } catch (IOException e) { throw new RuntimeException(e); } } public static <T> Class<? extends T>[] scanSubclasses(Plugin plugin, String pack, Class<T> clazz) { try { return scanSubclasses(new File(plugin.getClass().getProtectionDomain().getCodeSource().getLocation().toURI()), plugin.getClass().getClassLoader(), pack, clazz); } catch (URISyntaxException e) { throw new RuntimeException(e); } } public static Class<?>[] scanClassesWithAnnotations(Plugin plugin, String pack, Class<? extends Annotation> annotation) { try { return scanClassesWithAnnotations(new File(plugin.getClass().getProtectionDomain().getCodeSource().getLocation().toURI()), plugin.getClass().getClassLoader(), pack, annotation); } catch (URISyntaxException e) { throw new RuntimeException(e); } } private static Stream<? extends Class<?>> loadClassesInPackage(String pack, Set<ClassInfo> classInfos) { return classInfos .stream() .filter(c -> pack == null || c.getPackageName().startsWith(pack)) .map(ClassInfo::load); } /** * Returns a {@code ClassPathUtils} representing all classes and resources loadable from {@code * classloader} and its ancestor class loaders. * * <p><b>Warning:</b> {@code ClassPathUtils} can find classes and resources only from: * * <ul> * <li>{@link URLClassLoader} instances' {@code file:} URLs * <li>the {@linkplain ClassLoader#getSystemClassLoader() system class loader}. To search the * system class loader even when it is not a {@link URLClassLoader} (as in Java 9), {@code * ClassPathUtils} searches the files from the {@code java.class.path} system property. * </ul> * * @throws IOException if the attempt to read class path resources (jar files or directories) * failed. */ public static ClassPathUtils from(File file, ClassLoader classloader) throws IOException { DefaultScanner scanner = new DefaultScanner(); scanner.scan(file, classloader); return new ClassPathUtils(scanner.getResources()); } private static Logger getLogger() { return NyaaCoreLoader.getInstance().getLogger(); } @VisibleForTesting static String getClassName(String filename) { int classNameEnd = filename.length() - CLASS_FILE_NAME_EXTENSION.length(); return filename.substring(0, classNameEnd).replace('/', '.'); } static File toFile(URL url) { checkArgument(url.getProtocol().equals("file")); try { return new File(url.toURI()); // Accepts escaped characters like %20. } catch (URISyntaxException e) { // URL.toURI() doesn't escape chars. return new File(url.getPath()); // Accepts non-escaped chars like space. } } /** * Returns all classes loadable from the current class path. * * @since 16.0 */ public Set<ClassInfo> getAllClasses() { return resources.stream().filter(ClassInfo.class::isInstance).map(ClassInfo.class::cast).collect(Collectors.toSet()); } /** * Represents a class path resource that can be either a class file or any other resource file * loadable from the class path. * * @since 14.0 */ @Beta public static class ResourceInfo { final ClassLoader loader; private final String resourceName; ResourceInfo(String resourceName, ClassLoader loader) { this.resourceName = checkNotNull(resourceName); this.loader = checkNotNull(loader); } static ResourceInfo of(String resourceName, ClassLoader loader) { if (resourceName.endsWith(CLASS_FILE_NAME_EXTENSION)) { return new ClassInfo(resourceName, loader); } else { return new ResourceInfo(resourceName, loader); } } @Override public int hashCode() { return resourceName.hashCode(); } @Override public boolean equals(Object obj) { if (obj instanceof ResourceInfo) { ResourceInfo that = (ResourceInfo) obj; return resourceName.equals(that.resourceName) && loader == that.loader; } return false; } // Do not change this arbitrarily. We rely on it for sorting ResourceInfo. @Override public String toString() { return resourceName; } } /** * Represents a class that can be loaded through {@link #load}. * * @since 14.0 */ @Beta public static final class ClassInfo extends ResourceInfo { private final String className; ClassInfo(String resourceName, ClassLoader loader) { super(resourceName, loader); this.className = getClassName(resourceName); } /** * Returns the package name of the class, without attempting to load the class. * * <p>Behaves identically to {@link Package#getName()} but does not require the class (or * package) to be loaded. */ public String getPackageName() { return Reflection.getPackageName(className); } /** * Returns the fully qualified name of the class. * * <p>Behaves identically to {@link Class#getName()} but does not require the class to be * loaded. */ public String getName() { return className; } /** * Loads (but doesn't link or initialize) the class. * * @throws LinkageError when there were errors in loading classes that this class depends on. * For example, {@link NoClassDefFoundError}. */ public Class<?> load() { try { return loader.loadClass(className); } catch (ClassNotFoundException e) { // Shouldn't happen, since the class name is read from the class path. throw new IllegalStateException(e); } } @Override public String toString() { return className; } } /** * Abstract class that scans through the class path represented by a {@link ClassLoader} and calls * {@link #scanDirectory} and {@link #scanJarFile} for directories and jar files on the class path * respectively. */ abstract static class Scanner { // We only scan each file once independent of the classloader that resource might be associated // with. private final Set<File> scannedUris = Sets.newHashSet(); /** * Returns the class path URIs specified by the {@code Class-Path} manifest attribute, according * to <a * href="http://docs.oracle.com/javase/8/docs/technotes/guides/jar/jar.html#Main_Attributes">JAR * File Specification</a>. If {@code manifest} is null, it means the jar file has no manifest, * and an empty set will be returned. */ static ImmutableSet<File> getClassPathFromManifest( File jarFile, Manifest manifest) { if (manifest == null) { return ImmutableSet.of(); } ImmutableSet.Builder<File> builder = ImmutableSet.builder(); String classpathAttribute = manifest.getMainAttributes().getValue(Attributes.Name.CLASS_PATH.toString()); if (classpathAttribute != null) { for (String path : CLASS_PATH_ATTRIBUTE_SEPARATOR.split(classpathAttribute)) { URL url; try { url = getClassPathEntry(jarFile, path); } catch (MalformedURLException e) { // Ignore bad entry getLogger().warning("Invalid Class-Path entry: " + path); continue; } if (url.getProtocol().equals("file")) { builder.add(toFile(url)); } } } return builder.build(); } static ImmutableMap<File, ClassLoader> getClassPathEntries(ClassLoader classloader) { LinkedHashMap<File, ClassLoader> entries = Maps.newLinkedHashMap(); // Search parent first, since it's the order ClassLoader#loadClass() uses. ClassLoader parent = classloader.getParent(); if (parent != null) { entries.putAll(getClassPathEntries(parent)); } for (URL url : getClassLoaderUrls(classloader)) { if (url.getProtocol().equals("file")) { File file = toFile(url); if (!entries.containsKey(file)) { entries.put(file, classloader); } } } return ImmutableMap.copyOf(entries); } private static ImmutableList<URL> getClassLoaderUrls(ClassLoader classloader) { if (classloader instanceof URLClassLoader) { return ImmutableList.copyOf(((URLClassLoader) classloader).getURLs()); } if (classloader.equals(ClassLoader.getSystemClassLoader())) { return parseJavaClassPath(); } return ImmutableList.of(); } /** * Returns the URLs in the class path specified by the {@code java.class.path} {@linkplain * System#getProperty system property}. */ static ImmutableList<URL> parseJavaClassPath() { ImmutableList.Builder<URL> urls = ImmutableList.builder(); for (String entry : Splitter.on(Objects.requireNonNull(PATH_SEPARATOR.value())).split(Objects.requireNonNull(JAVA_CLASS_PATH.value()))) { try { try { urls.add(new File(entry).toURI().toURL()); } catch (SecurityException e) { // File.toURI checks to see if the file is a directory urls.add(new URL("file", null, new File(entry).getAbsolutePath())); } } catch (MalformedURLException e) { getLogger().log(WARNING, "malformed classpath entry: " + entry, e); } } return urls.build(); } /** * Returns the absolute uri of the Class-Path entry value as specified in <a * href="http://docs.oracle.com/javase/8/docs/technotes/guides/jar/jar.html#Main_Attributes">JAR * File Specification</a>. Even though the specification only talks about relative urls, * absolute urls are actually supported too (for example, in Maven surefire plugin). */ static URL getClassPathEntry(File jarFile, String path) throws MalformedURLException { return new URL(jarFile.toURI().toURL(), path); } /** * Called when a directory is scanned for resource files. */ protected abstract void scanDirectory(ClassLoader loader, File directory) throws IOException; /** * Called when a jar file is scanned for resource entries. */ protected abstract void scanJarFile(ClassLoader loader, JarFile file) throws IOException; final void scan(File file, ClassLoader classloader) throws IOException { if (scannedUris.add(file.getCanonicalFile())) { scanFrom(file, classloader); } } private void scanFrom(File file, ClassLoader classloader) throws IOException { try { if (!file.exists()) { return; } } catch (SecurityException e) { getLogger().warning("Cannot access " + file + ": " + e); // TODO(emcmanus): consider whether to log other failure cases too. return; } if (file.isDirectory()) { scanDirectory(classloader, file); } else { scanJar(file, classloader); } } private void scanJar(File file, ClassLoader classloader) throws IOException { JarFile jarFile; try { jarFile = new JarFile(file); } catch (IOException e) { // Not a jar file return; } try { for (File path : getClassPathFromManifest(file, jarFile.getManifest())) { scan(path, classloader); } scanJarFile(classloader, jarFile); } finally { try { jarFile.close(); } catch (IOException ignored) { } } } } static final class DefaultScanner extends Scanner { private final SetMultimap<ClassLoader, String> resources = MultimapBuilder.hashKeys().linkedHashSetValues().build(); ImmutableSet<ResourceInfo> getResources() { ImmutableSet.Builder<ResourceInfo> builder = ImmutableSet.builder(); for (Map.Entry<ClassLoader, String> entry : resources.entries()) { builder.add(ResourceInfo.of(entry.getValue(), entry.getKey())); } return builder.build(); } @Override protected void scanJarFile(ClassLoader classloader, JarFile file) { Enumeration<JarEntry> entries = file.entries(); while (entries.hasMoreElements()) { JarEntry entry = entries.nextElement(); if (entry.isDirectory() || entry.getName().equals(JarFile.MANIFEST_NAME)) { continue; } resources.get(classloader).add(entry.getName()); } } @Override protected void scanDirectory(ClassLoader classloader, File directory) throws IOException { Set<File> currentPath = new HashSet<>(); currentPath.add(directory.getCanonicalFile()); scanDirectory(directory, classloader, "", currentPath); } /** * Recursively scan the given directory, adding resources for each file encountered. Symlinks * which have already been traversed in the current tree path will be skipped to eliminate * cycles; otherwise symlinks are traversed. * * @param directory the root of the directory to scan * @param classloader the classloader that includes resources found in {@code directory} * @param packagePrefix resource path prefix inside {@code classloader} for any files found * under {@code directory} * @param currentPath canonical files already visited in the current directory tree path, for * cycle elimination */ private void scanDirectory( File directory, ClassLoader classloader, String packagePrefix, Set<File> currentPath) throws IOException { File[] files = directory.listFiles(); if (files == null) { getLogger().warning("Cannot read directory " + directory); // IO error, just skip the directory return; } for (File f : files) { String name = f.getName(); if (f.isDirectory()) { File deref = f.getCanonicalFile(); if (currentPath.add(deref)) { scanDirectory(deref, classloader, packagePrefix + name + "/", currentPath); currentPath.remove(deref); } } else { String resourceName = packagePrefix + name; if (!resourceName.equals(JarFile.MANIFEST_NAME)) { resources.get(classloader).add(resourceName); } } } } } }
package com.sunny.grokkingalgorithms.ctc.c2; import com.sunny.grokkingalgorithms.ctc.c2.util.LinkedListUtil; import com.sunny.grokkingalgorithms.ctc.c2.util.Node; /** * Created by sundas on 3/15/2018. * * where both numbers are of same length */ public class AddTwoNumbers { /* Add two numbers represented by linked list 7->1->6 + 5->9->2 617 + 295 912 stored as 2->1->9 */ /** * * @param numberA * @param numberB * @return */ public static Node addTwoNumbers(Node numberA,Node numberB){ Node sumRoot = null; int previousCarry = 0; Node aRunner = numberA; Node bRunner = numberB; Node previousDigitNode = null; /* Assuming that both the linked lists are of same size */ while(aRunner != null || bRunner != null){ int tempSum = previousCarry; if(aRunner != null){ tempSum += aRunner.data; } if(bRunner != null){ tempSum += bRunner.data; } int currentDigit = tempSum%10; previousCarry = tempSum/10; Node currentdigitNode = new Node(); currentdigitNode.data = currentDigit; if(sumRoot == null){ sumRoot = currentdigitNode; } if(previousDigitNode != null){ previousDigitNode.next = currentdigitNode; } previousDigitNode = currentdigitNode; if(aRunner != null){ aRunner = aRunner.next; } if(bRunner != null){ bRunner = bRunner.next; } } if(previousCarry > 0){ Node currentDigitNode = new Node(); currentDigitNode.data = previousCarry; if(previousDigitNode != null){ previousDigitNode.next = currentDigitNode; } } return sumRoot; } /* Add two numbers represented by linked list 6->1->7 + 2->9->5 617 + 295 912 stored as 9->1->2 */ private static Node sumNodeRoot = null; /** * * @param numberA * @param numberB * @return */ public static Node addTwoNumbersForward(Node numberA,Node numberB){ /* Check if length of linked lists are different */ int lengthA = LinkedListUtil.findLengthOfLinkedList(numberA); int lengthB = LinkedListUtil.findLengthOfLinkedList(numberB); if(lengthA < lengthB){ numberA = LinkedListUtil.padLinkedListWIthZerosFromFromHead(numberA,(lengthB - lengthA)); } else{ numberB = LinkedListUtil.padLinkedListWIthZerosFromFromHead(numberB,(lengthA - lengthB)); } int carry = addTwoNumbersUsingBacktracking(numberA,numberB); if(carry > 0){ Node node = new Node(); node.data = carry; node.next = sumNodeRoot; sumNodeRoot = node; } return sumNodeRoot; } /** * * @param numberA * @param numberB * @return */ public static int addTwoNumbersUsingBacktracking(Node numberA,Node numberB){ /* The return type integere rrpresents a carry from previous step. We navigate to end of the two lists and then keep adding. */ if(numberA == null && numberB == null){ return 0; } int previousCarry = addTwoNumbersUsingBacktracking(numberA.next,numberB.next); int sum = numberA.data + numberB.data + previousCarry; int currentDigitSum = sum%10; int carry = sum/10; if(sumNodeRoot == null){ sumNodeRoot = new Node(); sumNodeRoot.data = currentDigitSum; } else{ Node currentDigit = new Node(); currentDigit.data = currentDigitSum; currentDigit.next = sumNodeRoot; sumNodeRoot = currentDigit; } return carry; } /** * * @param args */ public static void main(String[] args) { int[] input = new int[]{7,1,6}; Node numberA = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(numberA); input = new int[]{5,9,2}; Node numberB = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(numberB); Node sumRoot = addTwoNumbers(numberA, numberB); LinkedListUtil.printLinkedList(sumRoot); System.out.println("next......."); input = new int[]{7,1,9}; numberA = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(numberA); input = new int[]{5,9,9}; numberB = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(numberB); sumRoot = addTwoNumbers(numberA,numberB); LinkedListUtil.printLinkedList(sumRoot); System.out.println("next......."); input = new int[]{7,1,9}; numberA = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(numberA); input = new int[]{9,9}; numberB = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(numberB); sumRoot = addTwoNumbers(numberA,numberB); LinkedListUtil.printLinkedList(sumRoot); System.out.println("----now stored in forward order"); input = new int[]{6,1,7}; numberA = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(numberA); input = new int[]{2,9,5}; numberB = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(numberB); sumRoot = addTwoNumbersForward(numberA, numberB); LinkedListUtil.printLinkedList(sumRoot); System.out.println("next ----- "); sumNodeRoot = null; input = new int[]{9,1,7}; numberA = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(numberA); input = new int[]{9,9,5}; numberB = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(numberB); sumRoot = addTwoNumbersForward(numberA, numberB); LinkedListUtil.printLinkedList(sumRoot); System.out.println("next ----- "); sumNodeRoot = null; input = new int[]{9,1,7}; numberA = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(numberA); input = new int[]{9,9}; numberB = LinkedListUtil.createLinkedList(input); LinkedListUtil.printLinkedList(numberB); sumRoot = addTwoNumbersForward(numberA, numberB); LinkedListUtil.printLinkedList(sumRoot); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager; import java.nio.ByteBuffer; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter; import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager; import org.apache.hadoop.yarn.server.resourcemanager.recovery.NullRMStateStore; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore; import org.apache.hadoop.yarn.server.resourcemanager.reservation.ReservationSystem; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.AMLivelinessMonitor; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; import org.apache.hadoop.yarn.server.resourcemanager.security.AMRMTokenSecretManager; import org.apache.hadoop.yarn.server.resourcemanager.security.ClientToAMTokenSecretManagerInRM; import org.apache.hadoop.yarn.server.resourcemanager.security.DelegationTokenRenewer; import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM; import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager; import org.apache.hadoop.yarn.server.resourcemanager.security.RMDelegationTokenSecretManager; import org.apache.hadoop.yarn.util.Clock; import org.apache.hadoop.yarn.util.SystemClock; /** * The RMActiveServiceContext is the class that maintains all the * RMActiveService contexts.This is expected to be used only by ResourceManager * and RMContext. */ @Private @Unstable public class RMActiveServiceContext { private static final Log LOG = LogFactory .getLog(RMActiveServiceContext.class); private final ConcurrentMap<ApplicationId, RMApp> applications = new ConcurrentHashMap<ApplicationId, RMApp>(); private final ConcurrentMap<NodeId, RMNode> nodes = new ConcurrentHashMap<NodeId, RMNode>(); private final ConcurrentMap<NodeId, RMNode> inactiveNodes = new ConcurrentHashMap<NodeId, RMNode>(); private final ConcurrentMap<ApplicationId, ByteBuffer> systemCredentials = new ConcurrentHashMap<ApplicationId, ByteBuffer>(); private boolean isWorkPreservingRecoveryEnabled; private AMLivelinessMonitor amLivelinessMonitor; private AMLivelinessMonitor amFinishingMonitor; private RMStateStore stateStore = null; private ContainerAllocationExpirer containerAllocationExpirer; private DelegationTokenRenewer delegationTokenRenewer; private AMRMTokenSecretManager amRMTokenSecretManager; private RMContainerTokenSecretManager containerTokenSecretManager; private NMTokenSecretManagerInRM nmTokenSecretManager; private ClientToAMTokenSecretManagerInRM clientToAMTokenSecretManager; private ClientRMService clientRMService; private RMDelegationTokenSecretManager rmDelegationTokenSecretManager; private ResourceScheduler scheduler; private ReservationSystem reservationSystem; private NodesListManager nodesListManager; private ResourceTrackerService resourceTrackerService; private ApplicationMasterService applicationMasterService; private RMNodeLabelsManager nodeLabelManager; private long epoch; private Clock systemClock = new SystemClock(); private long schedulerRecoveryStartTime = 0; private long schedulerRecoveryWaitTime = 0; private boolean printLog = true; private boolean isSchedulerReady = false; public RMActiveServiceContext() { } @Private @Unstable public RMActiveServiceContext(Dispatcher rmDispatcher, ContainerAllocationExpirer containerAllocationExpirer, AMLivelinessMonitor amLivelinessMonitor, AMLivelinessMonitor amFinishingMonitor, DelegationTokenRenewer delegationTokenRenewer, AMRMTokenSecretManager appTokenSecretManager, RMContainerTokenSecretManager containerTokenSecretManager, NMTokenSecretManagerInRM nmTokenSecretManager, ClientToAMTokenSecretManagerInRM clientToAMTokenSecretManager, ResourceScheduler scheduler) { this(); this.setContainerAllocationExpirer(containerAllocationExpirer); this.setAMLivelinessMonitor(amLivelinessMonitor); this.setAMFinishingMonitor(amFinishingMonitor); this.setDelegationTokenRenewer(delegationTokenRenewer); this.setAMRMTokenSecretManager(appTokenSecretManager); this.setContainerTokenSecretManager(containerTokenSecretManager); this.setNMTokenSecretManager(nmTokenSecretManager); this.setClientToAMTokenSecretManager(clientToAMTokenSecretManager); this.setScheduler(scheduler); RMStateStore nullStore = new NullRMStateStore(); nullStore.setRMDispatcher(rmDispatcher); try { nullStore.init(new YarnConfiguration()); setStateStore(nullStore); } catch (Exception e) { assert false; } } @Private @Unstable public void setStateStore(RMStateStore store) { stateStore = store; } @Private @Unstable public ClientRMService getClientRMService() { return clientRMService; } @Private @Unstable public ApplicationMasterService getApplicationMasterService() { return applicationMasterService; } @Private @Unstable public ResourceTrackerService getResourceTrackerService() { return resourceTrackerService; } @Private @Unstable public RMStateStore getStateStore() { return stateStore; } @Private @Unstable public ConcurrentMap<ApplicationId, RMApp> getRMApps() { return this.applications; } @Private @Unstable public ConcurrentMap<NodeId, RMNode> getRMNodes() { return this.nodes; } @Private @Unstable public ConcurrentMap<NodeId, RMNode> getInactiveRMNodes() { return this.inactiveNodes; } @Private @Unstable public ContainerAllocationExpirer getContainerAllocationExpirer() { return this.containerAllocationExpirer; } @Private @Unstable public AMLivelinessMonitor getAMLivelinessMonitor() { return this.amLivelinessMonitor; } @Private @Unstable public AMLivelinessMonitor getAMFinishingMonitor() { return this.amFinishingMonitor; } @Private @Unstable public DelegationTokenRenewer getDelegationTokenRenewer() { return delegationTokenRenewer; } @Private @Unstable public AMRMTokenSecretManager getAMRMTokenSecretManager() { return this.amRMTokenSecretManager; } @Private @Unstable public RMContainerTokenSecretManager getContainerTokenSecretManager() { return this.containerTokenSecretManager; } @Private @Unstable public NMTokenSecretManagerInRM getNMTokenSecretManager() { return this.nmTokenSecretManager; } @Private @Unstable public ResourceScheduler getScheduler() { return this.scheduler; } @Private @Unstable public ReservationSystem getReservationSystem() { return this.reservationSystem; } @Private @Unstable public NodesListManager getNodesListManager() { return this.nodesListManager; } @Private @Unstable public ClientToAMTokenSecretManagerInRM getClientToAMTokenSecretManager() { return this.clientToAMTokenSecretManager; } @Private @Unstable public void setClientRMService(ClientRMService clientRMService) { this.clientRMService = clientRMService; } @Private @Unstable public RMDelegationTokenSecretManager getRMDelegationTokenSecretManager() { return this.rmDelegationTokenSecretManager; } @Private @Unstable public void setRMDelegationTokenSecretManager( RMDelegationTokenSecretManager delegationTokenSecretManager) { this.rmDelegationTokenSecretManager = delegationTokenSecretManager; } @Private @Unstable void setContainerAllocationExpirer( ContainerAllocationExpirer containerAllocationExpirer) { this.containerAllocationExpirer = containerAllocationExpirer; } @Private @Unstable void setAMLivelinessMonitor(AMLivelinessMonitor amLivelinessMonitor) { this.amLivelinessMonitor = amLivelinessMonitor; } @Private @Unstable void setAMFinishingMonitor(AMLivelinessMonitor amFinishingMonitor) { this.amFinishingMonitor = amFinishingMonitor; } @Private @Unstable void setContainerTokenSecretManager( RMContainerTokenSecretManager containerTokenSecretManager) { this.containerTokenSecretManager = containerTokenSecretManager; } @Private @Unstable void setNMTokenSecretManager(NMTokenSecretManagerInRM nmTokenSecretManager) { this.nmTokenSecretManager = nmTokenSecretManager; } @Private @Unstable void setScheduler(ResourceScheduler scheduler) { this.scheduler = scheduler; } @Private @Unstable void setReservationSystem(ReservationSystem reservationSystem) { this.reservationSystem = reservationSystem; } @Private @Unstable void setDelegationTokenRenewer(DelegationTokenRenewer delegationTokenRenewer) { this.delegationTokenRenewer = delegationTokenRenewer; } @Private @Unstable void setClientToAMTokenSecretManager( ClientToAMTokenSecretManagerInRM clientToAMTokenSecretManager) { this.clientToAMTokenSecretManager = clientToAMTokenSecretManager; } @Private @Unstable void setAMRMTokenSecretManager(AMRMTokenSecretManager amRMTokenSecretManager) { this.amRMTokenSecretManager = amRMTokenSecretManager; } @Private @Unstable void setNodesListManager(NodesListManager nodesListManager) { this.nodesListManager = nodesListManager; } @Private @Unstable void setApplicationMasterService( ApplicationMasterService applicationMasterService) { this.applicationMasterService = applicationMasterService; } @Private @Unstable void setResourceTrackerService(ResourceTrackerService resourceTrackerService) { this.resourceTrackerService = resourceTrackerService; } @Private @Unstable public void setWorkPreservingRecoveryEnabled(boolean enabled) { this.isWorkPreservingRecoveryEnabled = enabled; } @Private @Unstable public boolean isWorkPreservingRecoveryEnabled() { return this.isWorkPreservingRecoveryEnabled; } @Private @Unstable public long getEpoch() { return this.epoch; } @Private @Unstable void setEpoch(long epoch) { this.epoch = epoch; } @Private @Unstable public RMNodeLabelsManager getNodeLabelManager() { return nodeLabelManager; } @Private @Unstable public void setNodeLabelManager(RMNodeLabelsManager mgr) { nodeLabelManager = mgr; } @Private @Unstable public void setSchedulerRecoveryStartAndWaitTime(long waitTime) { this.schedulerRecoveryStartTime = systemClock.getTime(); this.schedulerRecoveryWaitTime = waitTime; } @Private @Unstable public boolean isSchedulerReadyForAllocatingContainers() { if (isSchedulerReady) { return isSchedulerReady; } isSchedulerReady = (systemClock.getTime() - schedulerRecoveryStartTime) > schedulerRecoveryWaitTime; if (!isSchedulerReady && printLog) { LOG.info("Skip allocating containers. Scheduler is waiting for recovery."); printLog = false; } if (isSchedulerReady) { LOG.info("Scheduler recovery is done. Start allocating new containers."); } return isSchedulerReady; } @Private @Unstable public void setSystemClock(Clock clock) { this.systemClock = clock; } @Private @Unstable public ConcurrentMap<ApplicationId, ByteBuffer> getSystemCredentialsForApps() { return systemCredentials; } }
package com.ctrip.platform.dal.dao.task; import com.ctrip.platform.dal.dao.DalHints; import com.ctrip.platform.dal.dao.ResultMerger; import com.ctrip.platform.dal.dao.helper.DefaultResultCallback; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import static org.junit.Assert.*; public class DalRequestExecutorTest { private class TestDalRequest implements DalRequest<Integer> { private SQLException e; public Integer[] values; private TestDalRequest(SQLException e, Integer[] values) { this.e = e; this.values = values; } @Override public void validateAndPrepare() throws SQLException { if(e!= null) throw e; } @Override public String getLogicDbName() { return null; } @Override public boolean isCrossShard() throws SQLException { return values.length > 1; } @Override public TaskCallable<Integer> createTask() throws SQLException { return createInternalTask(values[0]); } @Override public Map<String, TaskCallable<Integer>> createTasks() throws SQLException { Map<String, TaskCallable<Integer>> tasks = new HashMap<>(); for(int i = 0; i < values.length; i++) { final int k = values[i]; tasks.put(String.valueOf(i), createInternalTask(k)); } return tasks; } public TaskCallable<Integer> createInternalTask(final Integer k) throws SQLException { return new TaskCallable<Integer>() { private DalTaskContext dalTaskContext = new DefaultTaskContext(); @Override public DalTaskContext getDalTaskContext() { return this.dalTaskContext; } @Override public String getPreparedDbShard() { return null; } public Integer call() throws Exception { return k; } }; } @Override public ResultMerger<Integer> getMerger() { return new ResultMerger.IntSummary(); } @Override public String getCaller() { // TODO Auto-generated method stub return null; } @Override public boolean isAsynExecution() { // TODO Auto-generated method stub return false; } @Override public void endExecution() throws SQLException { // TODO Auto-generated method stub } } static ConcurrentHashMap<String, Object> all = new ConcurrentHashMap<>(); private class TestThreadPoolDalRequest extends TestDalRequest { TestThreadPoolDalRequest(int size) { super(null, null); values = new Integer[size]; for(int i = 0; i < size; i++) values[i] = i; } private boolean sleep; public TaskCallable<Integer> createInternalTask(final Integer k) throws SQLException { return new TaskCallable<Integer>() { private DalTaskContext dalTaskContext = new DefaultTaskContext(); @Override public DalTaskContext getDalTaskContext() { return this.dalTaskContext; } @Override public String getPreparedDbShard() { return null; } public Integer call() throws Exception { all.put(Thread.currentThread().getName(), 1); if(sleep) Thread.sleep(1000); return k; } }; } } @Before public void setUp() { try{ DalRequestExecutor.init(null, null); }catch(Throwable e) { fail(); } } @After public void teardown() { try{ DalRequestExecutor.shutdown(); }catch(Throwable e) { fail(); } } @Test public void testExecuteFailByValidate() { DalRequestExecutor test = new DalRequestExecutor(); SQLException ex = new SQLException("Test"); TestDalRequest request = new TestDalRequest(ex, null); DalHints hints = new DalHints(); try { test.execute(hints, request); fail(); } catch (SQLException e) { assertEquals(ex, e); } } @Test public void testExecuteFailByValidateAsync() { DalRequestExecutor test = new DalRequestExecutor(); SQLException ex = new SQLException("Test"); TestDalRequest request = new TestDalRequest(ex, null); DalHints hints = new DalHints().asyncExecution(); try { assertNull(test.execute(hints, request)); Future<?> result = hints.getAsyncResult(); result.get(); fail(); } catch (SQLException | InterruptedException | ExecutionException e) { assertEquals(ex, ((ExecutionException)e).getCause()); } } @Test public void testExecuteNotCrossShard() { DalRequestExecutor test = new DalRequestExecutor(); TestDalRequest request = new TestDalRequest(null, new Integer[]{1}); DalHints hints = new DalHints(); try { Integer result = test.execute(hints, request); assertEquals(1, result.intValue()); } catch (Exception e) { fail(); } } @Test public void testExecuteCrossShard() { DalRequestExecutor test = new DalRequestExecutor(); TestDalRequest request = new TestDalRequest(null, new Integer[]{1, 2}); DalHints hints = new DalHints(); try { Integer result = test.execute(hints, request); assertEquals(3, result.intValue()); } catch (Exception e) { fail(); } } @Test public void testExecuteNullable() { DalRequestExecutor test = new DalRequestExecutor(); TestDalRequest request = new TestDalRequest(null, new Integer[]{null}); DalHints hints = new DalHints(); try { Integer result = test.execute(hints, request, true); assertNull(result); } catch (Exception e) { fail(); } try { test.execute(hints, request, false); fail(); } catch (Exception e) { } } @Test public void testExecuteCallback() { DalRequestExecutor test = new DalRequestExecutor(); TestDalRequest request = new TestDalRequest(null, new Integer[]{1}); DalHints hints = new DalHints(); DefaultResultCallback callback = new DefaultResultCallback(); try { Integer result = test.execute(hints.callbackWith(callback), request, true); assertNull(result); assertEquals(1, ((Integer)hints.getAsyncResult().get()).intValue()); assertEquals(1, ((Integer)callback.getResult()).intValue()); } catch (Exception e) { fail(); } } @Test public void testThreadPoolFeature() { DalRequestExecutor.shutdown(); DalRequestExecutor.init("10", null); DalRequestExecutor test = new DalRequestExecutor(); TestDalRequest request = new TestThreadPoolDalRequest(50); DalHints hints = new DalHints(); try { all.clear(); test.execute(hints, request, true); assertEquals(10, all.keySet().size()); } catch (Exception e) { fail(); } all.clear(); DalRequestExecutor.shutdown(); DalRequestExecutor.init("200", null); test = new DalRequestExecutor(); request = new TestThreadPoolDalRequest(1000); hints = new DalHints(); try { test.execute(hints, request, true); assertEquals(200, all.keySet().size()); } catch (Exception e) { fail(); } } @Test public void testThreadPoolFeatureCooldown() { DalRequestExecutor.shutdown(); DalRequestExecutor.init("10", "10"); DalRequestExecutor test = new DalRequestExecutor(); System.out.println(test.getPoolSize()); TestDalRequest request = new TestThreadPoolDalRequest(50); DalHints hints = new DalHints(); try { all.clear(); test.execute(hints, request, true); assertEquals(10, test.getPoolSize()); assertEquals(10, all.keySet().size()); assertEquals(10, test.getPoolSize()); } catch (Exception e) { fail(); } try { Thread.sleep(9*1000); System.out.println(test.getPoolSize()); Thread.sleep(1*1000); System.out.println(test.getPoolSize()); request = new TestThreadPoolDalRequest(5); test.execute(hints, request, true); Thread.sleep(10*1000); System.out.println(test.getPoolSize()); assertTrue(test.getPoolSize() < 10); } catch (Exception e) { e.printStackTrace(); } } @Test public void testThreadPoolFeatureMax() { DalRequestExecutor.shutdown(); DalRequestExecutor.init("1000", "10"); DalRequestExecutor test = new DalRequestExecutor(); System.out.println(test.getPoolSize()); TestDalRequest request = new TestThreadPoolDalRequest(20000); DalHints hints = new DalHints(); try { all.clear(); System.out.println("Start"); long start = System.currentTimeMillis(); test.execute(hints, request, true); start = System.currentTimeMillis() - start; System.out.println(start + "ms"); assertEquals(1000, all.keySet().size()); assertEquals(1000, test.getPoolSize()); } catch (Exception e) { fail(); } try { Thread.sleep(9*1000); System.out.println("after 9s: " + test.getPoolSize()); Thread.sleep(1*1000); System.out.println("after 10s: " + test.getPoolSize()); request = new TestThreadPoolDalRequest(5); test.execute(hints, request, true); System.out.println(test.getPoolSize()); assertTrue(test.getPoolSize() < 1000); } catch (Exception e) { e.printStackTrace(); } } @Test public void testThreadPoolCheckCapacity() { DalRequestExecutor.shutdown(); DalRequestExecutor.init("1000", "1"); DalRequestExecutor test = new DalRequestExecutor(); testCapacity(test, 100, 1000); testCapacity(test, 200, 1000); testCapacity(test, 300, 1000); testCapacity(test, 500, 1000); testCapacity(test, 1000, 1000); testCapacity(test, 1500, 2000); testCapacity(test, 2000, 2000); testCapacity(test, 10000, 10000); } public void testCapacity(DalRequestExecutor test, int size, int cost) { int delta = 200; TestThreadPoolDalRequest request = new TestThreadPoolDalRequest(size); request.sleep = true; DalHints hints = new DalHints(); try { System.out.print("Size: " + size + " Cost: " + cost + " pool size: " + test.getPoolSize()); all.clear(); long start = System.currentTimeMillis(); test.execute(hints, request, true); start = System.currentTimeMillis() - start; System.out.println(" Actual cost: " + start + "ms"); assertTrue(start - cost < delta); Thread.sleep(1500); } catch (Exception e) { fail(); } } }
/* * Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.util; import java.net.InetAddress; import java.net.URI; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.amazonaws.internal.config.HostRegexToRegionMapping; import com.amazonaws.internal.config.InternalConfig; import com.amazonaws.log.InternalLogFactory; public class AwsHostNameUtils { private static final Pattern S3_ENDPOINT_PATTERN = Pattern.compile("^(?:.+\\.)?s3[.-]([a-z0-9-]+)$"); private static final Pattern STANDARD_CLOUDSEARCH_ENDPOINT_PATTERN = Pattern.compile("^(?:.+\\.)?([a-z0-9-]+)\\.cloudsearch$"); private static final Pattern EXTENDED_CLOUDSEARCH_ENDPOINT_PATTERN = Pattern.compile("^(?:.+\\.)?([a-z0-9-]+)\\.cloudsearch\\..+"); /** * @deprecated in favor of {@link #parseRegionName(String, String)}. */ @Deprecated public static String parseRegionName(URI endpoint) { return parseRegionName(endpoint.getHost(), null); } /** * Attempts to parse the region name from an endpoint based on conventions * about the endpoint format. * * @param host the hostname to parse * @param serviceHint an optional hint about the service for the endpoint * @return the region parsed from the hostname, or * &quot;us-east-1&quot; if no region information * could be found */ public static String parseRegionName(final String host, final String serviceHint) { String regionNameInInternalConfig = parseRegionNameByInternalConfig(host); if (regionNameInInternalConfig != null) { return regionNameInInternalConfig; } if (host.endsWith(".amazonaws.com")) { int index = host.length() - ".amazonaws.com".length(); return parseStandardRegionName(host.substring(0, index)); } if (serviceHint != null) { if (serviceHint.equals("cloudsearch") && !host.startsWith("cloudsearch.")) { // CloudSearch domains use the nonstandard domain format // [domain].[region].cloudsearch.[suffix]. Matcher matcher = EXTENDED_CLOUDSEARCH_ENDPOINT_PATTERN .matcher(host); if (matcher.matches()) { return matcher.group(1); } } // If we have a service hint, look for 'service.[region]' or // 'service-[region]' in the endpoint's hostname. Pattern pattern = Pattern.compile( "^(?:.+\\.)?" + Pattern.quote(serviceHint) + "[.-]([a-z0-9-]+)\\." ); Matcher matcher = pattern.matcher(host); if (matcher.find()) { return matcher.group(1); } } // Endpoint is totally non-standard; guess us-east-1 for lack of a // better option. return "us-east-1"; } /** * Parses the region name from a standard (*.amazonaws.com) endpoint. * * @param fragment the portion of the endpoint excluding * &quot;.amazonaws.com&quot; * @return the parsed region name (or &quot;us-east-1&quot; as a * best guess if we can't tell for sure) */ private static String parseStandardRegionName(final String fragment) { Matcher matcher = S3_ENDPOINT_PATTERN.matcher(fragment); if (matcher.matches()) { // host was 'bucket.s3-[region].amazonaws.com'. return matcher.group(1); } matcher = STANDARD_CLOUDSEARCH_ENDPOINT_PATTERN.matcher(fragment); if (matcher.matches()) { // host was 'domain.[region].cloudsearch.amazonaws.com'. return matcher.group(1); } int index = fragment.lastIndexOf('.'); if (index == -1) { // host was 'service.amazonaws.com', guess us-east-1 // for lack of a better option. return "us-east-1"; } // host was 'service.[region].amazonaws.com'. String region = fragment.substring(index + 1); // Special case for iam.us-gov.amazonaws.com, which is actually // us-gov-west-1. if ("us-gov".equals(region)) { region = "us-gov-west-1"; } return region; } /** * @return the configured region name if the given host name matches any of * the host-to-region mappings in the internal config; otherwise * return null. */ private static String parseRegionNameByInternalConfig(String host) { InternalConfig internConfig = InternalConfig.Factory.getInternalConfig(); for (HostRegexToRegionMapping mapping : internConfig.getHostRegexToRegionMappings()) { String hostNameRegex = mapping.getHostNameRegex(); if (host.matches(hostNameRegex)) { return mapping.getRegionName(); } } return null; } /** * Parses the service name from an endpoint. Can only handle endpoints of * the form 'service.[region.]amazonaws.com'. */ public static String parseServiceName(URI endpoint) { String host = endpoint.getHost(); // If we don't recognize the domain, throw an exception. if (!host.endsWith(".amazonaws.com")) { throw new IllegalArgumentException( "Cannot parse a service name from an unrecognized endpoint (" + host + ")."); } String serviceAndRegion = host.substring(0, host.indexOf(".amazonaws.com")); // Special cases for S3 endpoints with bucket names embedded. if (serviceAndRegion.endsWith(".s3") || S3_ENDPOINT_PATTERN.matcher(serviceAndRegion).matches()) { return "s3"; } char separator = '.'; // If we don't detect a separator between service name and region, then // assume that the region is not included in the hostname, and it's only // the service name (ex: "http://iam.amazonaws.com"). if (serviceAndRegion.indexOf(separator) == -1) { return serviceAndRegion; } String service = serviceAndRegion.substring(0, serviceAndRegion.indexOf(separator)); return service; } /** * Returns the host name for the local host. If the operation is not allowed * by the security check, the textual representation of the IP address of * the local host is returned instead. If the ip address of the local host * cannot be resolved or if there is any other failure, "localhost" is * returned as a fallback. */ public static String localHostName() { try { InetAddress localhost = InetAddress.getLocalHost(); return localhost.getHostName(); } catch (Exception e) { InternalLogFactory.getLog(AwsHostNameUtils.class) .debug( "Failed to determine the local hostname; fall back to " + "use \"localhost\".", e); return "localhost"; } } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.keymap.impl.ui; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.ide.actionMacro.ActionMacro; import com.intellij.ide.plugins.IdeaPluginDescriptor; import com.intellij.ide.plugins.PluginManagerCore; import com.intellij.ide.ui.search.SearchUtil; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.ActionManagerEx; import com.intellij.openapi.actionSystem.ex.QuickList; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.extensions.PluginId; import com.intellij.openapi.keymap.KeyMapBundle; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.KeymapExtension; import com.intellij.openapi.keymap.ex.KeymapManagerEx; import com.intellij.openapi.keymap.impl.ActionShortcutRestrictions; import com.intellij.openapi.keymap.impl.KeymapImpl; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.tree.DefaultMutableTreeNode; import java.util.*; public class ActionsTreeUtil { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.keymap.impl.ui.ActionsTreeUtil"); public static final String MAIN_MENU_TITLE = KeyMapBundle.message("main.menu.action.title"); public static final String MAIN_TOOLBAR = KeyMapBundle.message("main.toolbar.title"); public static final String EDITOR_POPUP = KeyMapBundle.message("editor.popup.menu.title"); public static final String EDITOR_TAB_POPUP = KeyMapBundle.message("editor.tab.popup.menu.title"); public static final String FAVORITES_POPUP = KeyMapBundle.message("favorites.popup.title"); public static final String PROJECT_VIEW_POPUP = KeyMapBundle.message("project.view.popup.menu.title"); public static final String COMMANDER_POPUP = KeyMapBundle.message("commender.view.popup.menu.title"); public static final String J2EE_POPUP = KeyMapBundle.message("j2ee.view.popup.menu.title"); @NonNls private static final String EDITOR_PREFIX = "Editor"; @NonNls private static final String TOOL_ACTION_PREFIX = "Tool_"; private ActionsTreeUtil() { } private static Group createPluginsActionsGroup(Condition<AnAction> filtered) { Group pluginsGroup = new Group(KeyMapBundle.message("plugins.group.title"), null, null); final KeymapManagerEx keymapManager = KeymapManagerEx.getInstanceEx(); ActionManagerEx managerEx = ActionManagerEx.getInstanceEx(); final List<IdeaPluginDescriptor> plugins = new ArrayList<IdeaPluginDescriptor>(); Collections.addAll(plugins, PluginManagerCore.getPlugins()); Collections.sort(plugins, new Comparator<IdeaPluginDescriptor>() { public int compare(IdeaPluginDescriptor o1, IdeaPluginDescriptor o2) { return o1.getName().compareTo(o2.getName()); } }); List<PluginId> collected = new ArrayList<PluginId>(); for (IdeaPluginDescriptor plugin : plugins) { collected.add(plugin.getPluginId()); Group pluginGroup; if (plugin.getName().equals("IDEA CORE")) { continue; } else { pluginGroup = new Group(plugin.getName(), null, null); } final String[] pluginActions = managerEx.getPluginActions(plugin.getPluginId()); if (pluginActions == null || pluginActions.length == 0) { continue; } Arrays.sort(pluginActions, new Comparator<String>() { public int compare(String o1, String o2) { return getTextToCompare(o1).compareTo(getTextToCompare(o2)); } }); for (String pluginAction : pluginActions) { if (keymapManager.getBoundActions().contains(pluginAction)) continue; final AnAction anAction = managerEx.getActionOrStub(pluginAction); if (filtered == null || filtered.value(anAction)) { pluginGroup.addActionId(pluginAction); } } if (pluginGroup.getSize() > 0) { pluginsGroup.addGroup(pluginGroup); } } for (PluginId pluginId : PluginId.getRegisteredIds().values()) { if (collected.contains(pluginId)) continue; Group pluginGroup = new Group(pluginId.getIdString(), null, null); final String[] pluginActions = managerEx.getPluginActions(pluginId); if (pluginActions == null || pluginActions.length == 0) { continue; } for (String pluginAction : pluginActions) { if (keymapManager.getBoundActions().contains(pluginAction)) continue; final AnAction anAction = managerEx.getActionOrStub(pluginAction); if (filtered == null || filtered.value(anAction)) { pluginGroup.addActionId(pluginAction); } } if (pluginGroup.getSize() > 0) { pluginsGroup.addGroup(pluginGroup); } } return pluginsGroup; } private static Group createMainMenuGroup(Condition<AnAction> filtered) { Group group = new Group(MAIN_MENU_TITLE, IdeActions.GROUP_MAIN_MENU, AllIcons.Nodes.KeymapMainMenu); ActionGroup mainMenuGroup = (ActionGroup)ActionManager.getInstance().getActionOrStub(IdeActions.GROUP_MAIN_MENU); fillGroupIgnorePopupFlag(mainMenuGroup, group, filtered); return group; } @Nullable private static Condition<AnAction> wrapFilter(@Nullable final Condition<AnAction> filter, final Keymap keymap, final ActionManager actionManager) { final ActionShortcutRestrictions shortcutRestrictions = ActionShortcutRestrictions.getInstance(); return new Condition<AnAction>() { @Override public boolean value(final AnAction action) { if (action == null) return false; final String id = action instanceof ActionStub ? ((ActionStub)action).getId() : actionManager.getId(action); if (id != null) { if (!Registry.is("keymap.show.alias.actions")) { String binding = getActionBinding(keymap, id); boolean bound = binding != null && actionManager.getAction(binding) != null // do not hide bound action, that miss the 'bound-with' && !hasAssociatedShortcutsInHierarchy(id, keymap); // do not hide bound actions when they are redefined if (bound) { return false; } } if (!shortcutRestrictions.getForActionId(id).allowChanging) { return false; } } return filter == null || filter.value(action); } }; } private static boolean hasAssociatedShortcutsInHierarchy(String id, Keymap keymap) { while (keymap != null) { if (((KeymapImpl)keymap).hasOwnActionId(id)) return true; keymap = keymap.getParent(); } return false; } private static void fillGroupIgnorePopupFlag(ActionGroup actionGroup, Group group, Condition<AnAction> filtered) { AnAction[] mainMenuTopGroups = actionGroup instanceof DefaultActionGroup ? ((DefaultActionGroup)actionGroup).getChildActionsOrStubs() : actionGroup.getChildren(null); for (AnAction action : mainMenuTopGroups) { if (!(action instanceof ActionGroup)) continue; Group subGroup = createGroup((ActionGroup)action, false, filtered); if (subGroup.getSize() > 0) { group.addGroup(subGroup); } } } public static Group createGroup(ActionGroup actionGroup, boolean ignore, Condition<AnAction> filtered) { return createGroup(actionGroup, getName(actionGroup), null, null, ignore, filtered); } private static String getName(AnAction action) { final String name = action.getTemplatePresentation().getText(); if (name != null && !name.isEmpty()) { return name; } else { final String id = action instanceof ActionStub ? ((ActionStub)action).getId() : ActionManager.getInstance().getId(action); if (id != null) { return id; } if (action instanceof DefaultActionGroup) { final DefaultActionGroup group = (DefaultActionGroup)action; if (group.getChildrenCount() == 0) return "Empty group"; final AnAction[] children = group.getChildActionsOrStubs(); for (AnAction child : children) { if (!(child instanceof Separator)) { return "group." + getName(child); } } return "Empty unnamed group"; } return action.getClass().getName(); } } public static Group createGroup(ActionGroup actionGroup, String groupName, Icon icon, Icon openIcon, boolean ignore, Condition<AnAction> filtered) { return createGroup(actionGroup, groupName, icon, openIcon, ignore, filtered, true); } public static Group createGroup(ActionGroup actionGroup, String groupName, Icon icon, Icon openIcon, boolean ignore, Condition<AnAction> filtered, boolean normalizeSeparators) { ActionManager actionManager = ActionManager.getInstance(); Group group = new Group(groupName, actionManager.getId(actionGroup), icon); AnAction[] children = actionGroup instanceof DefaultActionGroup ? ((DefaultActionGroup)actionGroup).getChildActionsOrStubs() : actionGroup.getChildren(null); for (AnAction action : children) { if (action == null) { LOG.error(groupName + " contains null actions"); continue; } if (action instanceof ActionGroup) { Group subGroup = createGroup((ActionGroup)action, getName(action), null, null, ignore, filtered, normalizeSeparators); if (subGroup.getSize() > 0) { if (!ignore && !((ActionGroup)action).isPopup()) { group.addAll(subGroup); } else { group.addGroup(subGroup); } } else if (filtered == null || filtered.value(action)) { group.addGroup(subGroup); } } else if (action instanceof Separator) { group.addSeparator(); } else { String id = action instanceof ActionStub ? ((ActionStub)action).getId() : actionManager.getId(action); if (id != null) { if (id.startsWith(TOOL_ACTION_PREFIX)) continue; if (filtered == null || filtered.value(action)) { group.addActionId(id); } } } } if (normalizeSeparators) group.normalizeSeparators(); return group; } private static Group createEditorActionsGroup(Condition<AnAction> filtered) { ActionManager actionManager = ActionManager.getInstance(); DefaultActionGroup editorGroup = (DefaultActionGroup)actionManager.getActionOrStub(IdeActions.GROUP_EDITOR); ArrayList<String> ids = new ArrayList<String>(); addEditorActions(filtered, editorGroup, ids); Collections.sort(ids); Group group = new Group(KeyMapBundle.message("editor.actions.group.title"), IdeActions.GROUP_EDITOR, AllIcons.Nodes.KeymapEditor ); for (String id : ids) { group.addActionId(id); } return group; } @Nullable private static String getActionBinding(final Keymap keymap, final String id) { if (keymap == null) return null; Keymap parent = keymap.getParent(); String result = ((KeymapImpl)keymap).getActionBinding(id); if (result == null && parent != null) { result = ((KeymapImpl)parent).getActionBinding(id); } return result; } private static void addEditorActions(final Condition<AnAction> filtered, final DefaultActionGroup editorGroup, final ArrayList<String> ids) { AnAction[] editorActions = editorGroup.getChildActionsOrStubs(); final ActionManager actionManager = ActionManager.getInstance(); for (AnAction editorAction : editorActions) { if (editorAction instanceof DefaultActionGroup) { addEditorActions(filtered, (DefaultActionGroup) editorAction, ids); } else { String actionId = editorAction instanceof ActionStub ? ((ActionStub)editorAction).getId() : actionManager.getId(editorAction); if (actionId == null) continue; if (filtered == null || filtered.value(editorAction)) { ids.add(actionId); } } } } private static Group createExtensionGroup(Condition<AnAction> filtered, final Project project, KeymapExtension provider) { return (Group) provider.createGroup(filtered, project); } private static Group createMacrosGroup(Condition<AnAction> filtered) { final ActionManagerEx actionManager = ActionManagerEx.getInstanceEx(); String[] ids = actionManager.getActionIds(ActionMacro.MACRO_ACTION_PREFIX); Arrays.sort(ids); Group group = new Group(KeyMapBundle.message("macros.group.title"), null, null); for (String id : ids) { if (filtered == null || filtered.value(actionManager.getActionOrStub(id))) { group.addActionId(id); } } return group; } private static Group createQuickListsGroup(final Condition<AnAction> filtered, final String filter, final boolean forceFiltering, final QuickList[] quickLists) { Arrays.sort(quickLists, new Comparator<QuickList>() { public int compare(QuickList l1, QuickList l2) { return l1.getActionId().compareTo(l2.getActionId()); } }); Group group = new Group(KeyMapBundle.message("quick.lists.group.title"), null, null); for (QuickList quickList : quickLists) { if (filtered != null && filtered.value(ActionManagerEx.getInstanceEx().getAction(quickList.getActionId()))) { group.addQuickList(quickList); } else if (SearchUtil.isComponentHighlighted(quickList.getName(), filter, forceFiltering, null)) { group.addQuickList(quickList); } else if (filtered == null && StringUtil.isEmpty(filter)) { group.addQuickList(quickList); } } return group; } private static Group createOtherGroup(Condition<AnAction> filtered, Group addedActions, final Keymap keymap) { addedActions.initIds(); ArrayList<String> result = new ArrayList<String>(); if (keymap != null) { String[] actionIds = keymap.getActionIds(); for (String id : actionIds) { if (id.startsWith(EDITOR_PREFIX)) { AnAction action = ActionManager.getInstance().getActionOrStub("$" + id.substring(6)); if (action != null) continue; } if (!id.startsWith(QuickList.QUICK_LIST_PREFIX) && !addedActions.containsId(id)) { result.add(id); } } } // add all registered actions final ActionManagerEx actionManager = ActionManagerEx.getInstanceEx(); final KeymapManagerEx keymapManager = KeymapManagerEx.getInstanceEx(); String[] registeredActionIds = actionManager.getActionIds(""); for (String id : registeredActionIds) { final AnAction actionOrStub = actionManager.getActionOrStub(id); if (actionOrStub instanceof ActionGroup && !((ActionGroup)actionOrStub).canBePerformed(DataManager.getInstance().getDataContext())) { continue; } if (id.startsWith(QuickList.QUICK_LIST_PREFIX) || addedActions.containsId(id) || result.contains(id)) { continue; } if (keymapManager.getBoundActions().contains(id)) continue; result.add(id); } filterOtherActionsGroup(result); ContainerUtil.quickSort(result, new Comparator<String>() { public int compare(String id1, String id2) { return getTextToCompare(id1).compareToIgnoreCase(getTextToCompare(id2)); } }); Group group = new Group(KeyMapBundle.message("other.group.title"), AllIcons.Nodes.KeymapOther); for (String id : result) { if (filtered == null || filtered.value(actionManager.getActionOrStub(id))) group.addActionId(id); } return group; } private static String getTextToCompare(String id) { AnAction action = ActionManager.getInstance().getActionOrStub(id); if (action == null) { return id; } String text = action.getTemplatePresentation().getText(); return text != null ? text : id; } private static void filterOtherActionsGroup(ArrayList<String> actions) { filterOutGroup(actions, IdeActions.GROUP_GENERATE); filterOutGroup(actions, IdeActions.GROUP_NEW); filterOutGroup(actions, IdeActions.GROUP_CHANGE_SCHEME); } private static void filterOutGroup(ArrayList<String> actions, String groupId) { if (groupId == null) { throw new IllegalArgumentException(); } ActionManager actionManager = ActionManager.getInstance(); AnAction action = actionManager.getActionOrStub(groupId); if (action instanceof DefaultActionGroup) { DefaultActionGroup group = (DefaultActionGroup)action; AnAction[] children = group.getChildActionsOrStubs(); for (AnAction child : children) { String childId = child instanceof ActionStub ? ((ActionStub)child).getId() : actionManager.getId(child); if (childId == null) { // SCR 35149 continue; } if (child instanceof DefaultActionGroup) { filterOutGroup(actions, childId); } else { actions.remove(childId); } } } } public static DefaultMutableTreeNode createNode(Group group) { DefaultMutableTreeNode node = new DefaultMutableTreeNode(group); for (Object child : group.getChildren()) { if (child instanceof Group) { DefaultMutableTreeNode childNode = createNode((Group)child); node.add(childNode); } else { node.add(new DefaultMutableTreeNode(child)); } } return node; } public static Group createMainGroup(final Project project, final Keymap keymap, final QuickList[] quickLists) { return createMainGroup(project, keymap, quickLists, null, false, null); } public static Group createMainGroup(final Project project, final Keymap keymap, final QuickList[] quickLists, final String filter, final boolean forceFiltering, final Condition<AnAction> filtered) { final Condition<AnAction> wrappedFilter = wrapFilter(filtered, keymap, ActionManager.getInstance()); Group mainGroup = new Group(KeyMapBundle.message("all.actions.group.title"), null, null); mainGroup.addGroup(createEditorActionsGroup(wrappedFilter)); mainGroup.addGroup(createMainMenuGroup(wrappedFilter)); for (KeymapExtension extension : Extensions.getExtensions(KeymapExtension.EXTENSION_POINT_NAME)) { final Group group = createExtensionGroup(wrappedFilter, project, extension); if (group != null) { mainGroup.addGroup(group); } } mainGroup.addGroup(createMacrosGroup(wrappedFilter)); mainGroup.addGroup(createQuickListsGroup(wrappedFilter, filter, forceFiltering, quickLists)); mainGroup.addGroup(createPluginsActionsGroup(wrappedFilter)); mainGroup.addGroup(createOtherGroup(wrappedFilter, mainGroup, keymap)); if (!StringUtil.isEmpty(filter) || filtered != null) { final ArrayList list = mainGroup.getChildren(); for (Iterator i = list.iterator(); i.hasNext();) { final Object o = i.next(); if (o instanceof Group) { final Group group = (Group)o; if (group.getSize() == 0) { if (!SearchUtil.isComponentHighlighted(group.getName(), filter, forceFiltering, null)) { i.remove(); } } } } } return mainGroup; } public static Condition<AnAction> isActionFiltered(final String filter, final boolean force) { return new Condition<AnAction>() { public boolean value(final AnAction action) { if (filter == null) return true; if (action == null) return false; final String insensitiveFilter = filter.toLowerCase(); for (String text : new String[]{action.getTemplatePresentation().getText(), action.getTemplatePresentation().getDescription(), action instanceof ActionStub ? ((ActionStub)action).getId() : ActionManager.getInstance().getId(action)}) { if (text != null) { final String lowerText = text.toLowerCase(); if (SearchUtil.isComponentHighlighted(lowerText, insensitiveFilter, force, null)) { return true; } else if (lowerText.contains(insensitiveFilter)) { return true; } } } return false; } }; } public static Condition<AnAction> isActionFiltered(final ActionManager actionManager, final Keymap keymap, final KeyboardShortcut keyboardShortcut) { return new Condition<AnAction>() { public boolean value(final AnAction action) { if (keyboardShortcut == null) return true; if (action == null) return false; final Shortcut[] actionShortcuts = keymap.getShortcuts(action instanceof ActionStub ? ((ActionStub)action).getId() : actionManager.getId(action)); for (Shortcut shortcut : actionShortcuts) { if (shortcut instanceof KeyboardShortcut) { final KeyboardShortcut keyboardActionShortcut = (KeyboardShortcut)shortcut; if (Comparing.equal(keyboardActionShortcut, keyboardShortcut)) { return true; } } } return false; } }; } public static Condition<AnAction> isActionFiltered(final ActionManager actionManager, final Keymap keymap, final KeyboardShortcut shortcut, final String filter, final boolean force) { return filter != null && filter.length() > 0 ? isActionFiltered(filter, force) : shortcut != null ? isActionFiltered(actionManager, keymap, shortcut) : null; } }
package com.java_promise.tests.genericpromise; import com.java_promise.common.RejectCallback; import com.java_promise.genericpromise.*; import static org.junit.Assert.*; import org.junit.Before; import org.junit.Test; import java.util.List; import java.util.ArrayList; /** * Created by Philip on 25/02/2016. */ public class Generic_Promise_Spec_2_2_Test { private Promise<Integer> testObject; @Before public void Init() { testObject = new Promise<>(); } /* * promise.then(onFulfilled, onRejected) * * 2.2.1 * Both onFulfilled and onRejected are optional arguments: */ /* * 2.2.1.1 */ @Test public void if_onFulfilled_is_not_a_function_it_must_be_ignored() { final List<Integer> promiseResults = new ArrayList<>(); testObject.then(null, new RejectCallback() { @Override public void onRejected(Exception ex) { } }); testObject.then(new ResolveCallback<Integer>() { @Override public void onResolved(Integer result) { promiseResults.add(result); } }); testObject.resolve(1); assertEquals(1, promiseResults.size()); } /* * 2.2.1.2 */ @Test public void if_onRejected_is_not_a_function_it_must_be_ignored() { final List<Exception> promiseExceptions = new ArrayList<>(); testObject.then(new ResolveCallback<Integer>() { @Override public void onResolved(Integer result) { } }, null); testObject.handle(new RejectCallback() { @Override public void onRejected(Exception ex) { promiseExceptions.add(ex); } }); testObject.reject(new Exception("Test")); assertEquals(1, promiseExceptions.size()); } /* * 2.2.2 * * If onFulfilled is a function: */ /* * 2.2.2.1 */ @Test public void onFulfilled_must_be_called_after_promise_is_fulfilled_with_promises_value_as_its_first_argument() { final List<Integer> promiseResults = new ArrayList<>(); testObject.then(new ResolveCallback<Integer>() { @Override public void onResolved(Integer result) { promiseResults.add(result); } }); testObject.resolve(1); assertEquals(1, promiseResults.size()); assertEquals(1, promiseResults.get(0).intValue()); } /* * 2.2.2.2 */ @Test public void onFulfilled_must_not_be_called_before_promise_is_fulfilled() { final List<Integer> promiseResults = new ArrayList<>(); testObject.then(new ResolveCallback<Integer>() { @Override public void onResolved(Integer result) { promiseResults.add(result); } }); assertEquals(0, promiseResults.size()); testObject.resolve(1); assertEquals(1, promiseResults.size()); } /* * 2.2.2.3 */ @Test public void onFulfilled_must_not_be_called_more_than_once() { final List<Integer> promiseResults = new ArrayList<>(); testObject.then(new ResolveCallback<Integer>() { @Override public void onResolved(Integer result) { promiseResults.add(result); } }); testObject.resolve(1); testObject.resolve(2); assertEquals(1, promiseResults.size()); } /* * 2.2.3 * * If onRejected is a function: */ /* * 2.2.3.1 */ @Test public void onRejected_must_be_called_after_promise_is_rejected_with_promises_reason_as_its_first_argument() { final List<Exception> promiseRejections = new ArrayList<>(); Exception testException = new Exception("Test Exception"); testObject.handle(new RejectCallback() { @Override public void onRejected(Exception ex) { promiseRejections.add(ex); } }); testObject.reject(testException); assertEquals(1, promiseRejections.size()); assertEquals(testException, promiseRejections.get(0)); } /* * 2.2.3.2 */ @Test public void onRejected_must_not_be_called_before_promise_is_rejected() { final List<Exception> promiseRejections = new ArrayList<>(); testObject.handle(new RejectCallback() { @Override public void onRejected(Exception ex) { promiseRejections.add(ex); } }); assertEquals(0, promiseRejections.size()); testObject.reject(new Exception("Test Exception")); assertEquals(1, promiseRejections.size()); } /* * 2.2.3.3 */ @Test public void onRejected_must_not_be_called_more_than_once() { final List<Exception> promiseRejections = new ArrayList<>(); testObject.handle(new RejectCallback() { @Override public void onRejected(Exception ex) { promiseRejections.add(ex); } }); testObject.reject(new Exception("Test Exception 1")); testObject.reject(new Exception("Test Exception 2")); assertEquals(1, promiseRejections.size()); } // 2.2.4 // onFulfilled or onRejected must not be called until the execution context stack contains only platform code. [3.1]. // 2.2.5 // onFulfilled and onRejected must be called as functions (i.e. with no this value). [3.2] /* * 2.2.6 * * Then may be called multiple times on the same promise: */ /* * 2.2.6.1 * * If/when promise is fulfilled, */ @Test public void all_respective_onFulfilled_callbacks_must_execute_in_the_order_of_their_originating_calls_to_then() { final List<Integer> testOrder = new ArrayList<>(); testObject.then(new ResolveCallback<Integer>() { @Override public void onResolved(Integer result) { testOrder.add(1); } }); testObject.then(new ResolveCallback<Integer>() { @Override public void onResolved(Integer result) { testOrder.add(2); } }, null); testObject.then(new ResolveCallback<Integer>() { @Override public void onResolved(Integer result) { testOrder.add(3); } }); testObject.resolve(1); assertEquals(3, testOrder.size()); assertEquals(1, testOrder.get(0).intValue()); assertEquals(2, testOrder.get(1).intValue()); assertEquals(3, testOrder.get(2).intValue()); } /* * 2.2.6.2 * * If/when promise is rejected */ @Test public void all_respective_onRejected_callbacks_must_execute_in_the_order_of_their_originating_calls_to_then() { final List<Integer> testOrder = new ArrayList<>(); testObject.handle(new RejectCallback() { @Override public void onRejected(Exception ex) { testOrder.add(1); } }); testObject.then(null, new RejectCallback() { @Override public void onRejected(Exception ex) { testOrder.add(2); } }); testObject.handle(new RejectCallback() { @Override public void onRejected(Exception ex) { testOrder.add(3); } }); testObject.reject(new Exception("Test Exception")); assertEquals(1, testOrder.get(0).intValue()); assertEquals(2, testOrder.get(1).intValue()); assertEquals(3, testOrder.get(2).intValue()); } /* * 2.2.7 * * then must return a promise [3.3]. * promise2 = promise1.then(onFulfilled, onRejected); */ /* * 2.2.7.1 * If either onFulfilled or onRejected returns a value x, run the Promise Resolution Procedure * [[resolve]](promise2, x). */ /* * 2.2.7.1.1 */ @Test public void onFulfilled_returns_a_value_x_run_the_Promise_Resolution_Procedure() { final List<Integer> promiseResults = new ArrayList<>(); Promise<Integer> promise2 = testObject.then(new ResolveCallback<Integer>() { @Override public void onResolved(Integer result) { // No action required for test. } }); promise2.then(new ResolveCallback<Integer>() { @Override public void onResolved(Integer result) { promiseResults.add(result); } }); testObject.resolve(5); assertEquals(1, promiseResults.size()); assertEquals(5, promiseResults.get(0).intValue()); } /* * 2.2.7.1.2 */ @Test public void onRejected_returns_a_value_x_run_the_Promise_Resolution_Procedure() { final List<Exception> promiseRejections = new ArrayList<>(); Exception testException = new Exception("Test Exception"); Promise<Integer> promise2 = testObject.then(new ResolveCallback<Integer>() { @Override public void onResolved(Integer result) { // No action required for test. } }); promise2.handle(new RejectCallback() { @Override public void onRejected(Exception ex) { promiseRejections.add(ex); } }); testObject.reject(testException); assertEquals(1, promiseRejections.size()); assertEquals(testException, promiseRejections.get(0)); } // 2.2.7.2 If either onFulfilled or onRejected throws an exception e, promise2 must be rejected with e as the reason. // 2.2.7.3 If onFulfilled is not a function and promise1 is fulfilled, promise2 must be fulfilled with the same value as promise1. // 2.2.7.4 If onRejected is not a function and promise1 is rejected, promise2 must be rejected with the same reason as promise1. }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import alluxio.Configuration.Source; import alluxio.PropertyKey.Template; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import org.hamcrest.CoreMatchers; import org.junit.After; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import java.io.Closeable; import java.io.File; import java.io.FileOutputStream; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.concurrent.atomic.AtomicInteger; /** * Unit tests for the {@link Configuration} class. */ public class ConfigurationTest { @Rule public final ExpectedException mThrown = ExpectedException.none(); @Rule public final TemporaryFolder mFolder = new TemporaryFolder(); @After public void after() { ConfigurationTestUtils.resetConfiguration(); } @Test public void defaultLoggerCorrectlyLoaded() throws Exception { // Avoid interference from system properties. site-properties will not be loaded during tests try (Closeable p = new SystemPropertyRule(PropertyKey.LOGGER_TYPE.toString(), null).toResource()) { String loggerType = Configuration.get(PropertyKey.LOGGER_TYPE); assertEquals("Console", loggerType); } } @Test public void alias() { Configuration.merge(ImmutableMap.of("alluxio.master.worker.timeout.ms", "100"), Source.SYSTEM_PROPERTY); assertEquals(100, Configuration.getMs(PropertyKey.MASTER_WORKER_TIMEOUT_MS)); } @Test public void getInt() { Configuration.set(PropertyKey.WEB_THREADS, "1"); assertEquals(1, Configuration.getInt(PropertyKey.WEB_THREADS)); } @Test public void getMalformedIntThrowsException() { Configuration.set(PropertyKey.WEB_THREADS, "9448367483758473854738"); // bigger than MAX_INT mThrown.expect(RuntimeException.class); Configuration.getInt(PropertyKey.WEB_THREADS); } @Test public void getLong() { Configuration.set(PropertyKey.WEB_THREADS, "12345678910"); // bigger than MAX_INT assertEquals(12345678910L, Configuration.getLong(PropertyKey.WEB_THREADS)); } @Test public void getMalformedLongThrowsException() { Configuration.set(PropertyKey.WEB_THREADS, "999999999999999999999999999999999999"); // bigger than MAX_LONG mThrown.expect(RuntimeException.class); Configuration.getLong(PropertyKey.WEB_THREADS); } @Test public void getDouble() { Configuration.set(PropertyKey.WEB_THREADS, "1.1"); assertEquals(1.1, Configuration.getDouble(PropertyKey.WEB_THREADS), /*tolerance=*/0.0001); } @Test public void getMalformedDoubleThrowsException() { Configuration.set(PropertyKey.WEB_THREADS, "1a"); mThrown.expect(RuntimeException.class); Configuration.getDouble(PropertyKey.WEB_THREADS); } @Test public void getFloat() { Configuration.set(PropertyKey.WEB_THREADS, "1.1"); assertEquals(1.1, Configuration.getFloat(PropertyKey.WEB_THREADS), /*tolerance=*/0.0001); } @Test public void getMalformedFloatThrowsException() { Configuration.set(PropertyKey.WEB_THREADS, "1a"); mThrown.expect(RuntimeException.class); Configuration.getFloat(PropertyKey.WEB_THREADS); } @Test public void getTrueBoolean() { Configuration.set(PropertyKey.WEB_THREADS, "true"); assertTrue(Configuration.getBoolean(PropertyKey.WEB_THREADS)); } @Test public void getTrueBooleanUppercase() { Configuration.set(PropertyKey.WEB_THREADS, "True"); assertTrue(Configuration.getBoolean(PropertyKey.WEB_THREADS)); } @Test public void getTrueBooleanMixcase() { Configuration.set(PropertyKey.WEB_THREADS, "tRuE"); assertTrue(Configuration.getBoolean(PropertyKey.WEB_THREADS)); } @Test public void getFalseBoolean() { Configuration.set(PropertyKey.WEB_THREADS, "false"); assertFalse(Configuration.getBoolean(PropertyKey.WEB_THREADS)); } @Test public void getFalseBooleanUppercase() { Configuration.set(PropertyKey.WEB_THREADS, "False"); assertFalse(Configuration.getBoolean(PropertyKey.WEB_THREADS)); } @Test public void getFalseBooleanMixcase() { Configuration.set(PropertyKey.WEB_THREADS, "fAlSe"); assertFalse(Configuration.getBoolean(PropertyKey.WEB_THREADS)); } @Test public void getMalformedBooleanThrowsException() { Configuration.set(PropertyKey.WEB_THREADS, "x"); mThrown.expect(RuntimeException.class); Configuration.getBoolean(PropertyKey.WEB_THREADS); } @Test public void getList() { Configuration.set(PropertyKey.WEB_THREADS, "a,b,c"); assertEquals( Lists.newArrayList("a", "b", "c"), Configuration.getList(PropertyKey.WEB_THREADS, ",")); } private enum TestEnum { VALUE } @Test public void getEnum() { Configuration.set(PropertyKey.WEB_THREADS, "VALUE"); assertEquals( TestEnum.VALUE, Configuration.getEnum(PropertyKey.WEB_THREADS, TestEnum.class)); } @Test public void getMalformedEnum() { Configuration.set(PropertyKey.WEB_THREADS, "not_a_value"); mThrown.expect(RuntimeException.class); Configuration.getEnum(PropertyKey.WEB_THREADS, TestEnum.class); } @Test public void getBytes() { Configuration.set(PropertyKey.WEB_THREADS, "10b"); assertEquals(10, Configuration.getBytes(PropertyKey.WEB_THREADS)); } @Test public void getBytesKb() { Configuration.set(PropertyKey.WEB_THREADS, "10kb"); assertEquals(10 * Constants.KB, Configuration.getBytes(PropertyKey.WEB_THREADS)); } @Test public void getBytesMb() { Configuration.set(PropertyKey.WEB_THREADS, "10mb"); assertEquals(10 * Constants.MB, Configuration.getBytes(PropertyKey.WEB_THREADS)); } @Test public void getBytesGb() { Configuration.set(PropertyKey.WEB_THREADS, "10gb"); assertEquals(10 * (long) Constants.GB, Configuration.getBytes(PropertyKey.WEB_THREADS)); } @Test public void getBytesGbUppercase() { Configuration.set(PropertyKey.WEB_THREADS, "10GB"); assertEquals(10 * (long) Constants.GB, Configuration.getBytes(PropertyKey.WEB_THREADS)); } @Test public void getBytesTb() { Configuration.set(PropertyKey.WEB_THREADS, "10tb"); assertEquals(10 * Constants.TB, Configuration.getBytes(PropertyKey.WEB_THREADS)); } @Test public void getBytespT() { Configuration.set(PropertyKey.WEB_THREADS, "10pb"); assertEquals(10 * Constants.PB, Configuration.getBytes(PropertyKey.WEB_THREADS)); } @Test public void getMalformedBytesThrowsException() { Configuration.set(PropertyKey.WEB_THREADS, "100a"); mThrown.expect(RuntimeException.class); Configuration.getBoolean(PropertyKey.WEB_THREADS); } @Test public void getMs() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "100"); assertEquals(100, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsMS() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "100ms"); assertEquals(100, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsMillisecond() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "100millisecond"); assertEquals(100, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsS() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "10s"); assertEquals(10 * Constants.SECOND, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsSUppercase() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "10S"); assertEquals(10 * Constants.SECOND, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsSEC() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "10sec"); assertEquals(10 * Constants.SECOND, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsSecond() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "10second"); assertEquals(10 * Constants.SECOND, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsM() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "10m"); assertEquals(10 * Constants.MINUTE, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsMIN() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "10min"); assertEquals(10 * Constants.MINUTE, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsMinute() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "10minute"); assertEquals(10 * Constants.MINUTE, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsH() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "10h"); assertEquals(10 * Constants.HOUR, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsHR() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "10hr"); assertEquals(10 * Constants.HOUR, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsHour() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "10hour"); assertEquals(10 * Constants.HOUR, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsD() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "10d"); assertEquals(10 * Constants.DAY, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getMsDay() { Configuration.set(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS, "10day"); assertEquals(10 * Constants.DAY, Configuration.getMs(PropertyKey.PROXY_STREAM_CACHE_TIMEOUT_MS)); } @Test public void getNegativeSyncInterval() { Configuration.set(PropertyKey.USER_FILE_METADATA_SYNC_INTERVAL, "-1"); assertEquals(-1, Configuration.getMs(PropertyKey.USER_FILE_METADATA_SYNC_INTERVAL)); } @Test public void getNegativeSyncIntervalS() { Configuration.set(PropertyKey.USER_FILE_METADATA_SYNC_INTERVAL, "-1s"); assertTrue(Configuration.getMs(PropertyKey.USER_FILE_METADATA_SYNC_INTERVAL) < 0); } @Test public void getZeroSyncInterval() { Configuration.set(PropertyKey.USER_FILE_METADATA_SYNC_INTERVAL, "0"); assertEquals(0, Configuration.getMs(PropertyKey.USER_FILE_METADATA_SYNC_INTERVAL)); } @Test public void getZeroSyncIntervalS() { Configuration.set(PropertyKey.USER_FILE_METADATA_SYNC_INTERVAL, "0s"); assertEquals(0, Configuration.getMs(PropertyKey.USER_FILE_METADATA_SYNC_INTERVAL)); } @Test public void getPositiveSyncInterval() { Configuration.set(PropertyKey.USER_FILE_METADATA_SYNC_INTERVAL, "10"); assertEquals(10, Configuration.getMs(PropertyKey.USER_FILE_METADATA_SYNC_INTERVAL)); } @Test public void getPosiviteSyncIntervalS() { Configuration.set(PropertyKey.USER_FILE_METADATA_SYNC_INTERVAL, "10s"); assertEquals(10 * Constants.SECOND_MS, Configuration.getMs(PropertyKey.USER_FILE_METADATA_SYNC_INTERVAL)); } @Test public void getNestedProperties() { Configuration.set( PropertyKey.Template.MASTER_MOUNT_TABLE_OPTION_PROPERTY.format("foo", PropertyKey.WEB_THREADS.toString()), "val1"); Configuration.set( PropertyKey.Template.MASTER_MOUNT_TABLE_OPTION_PROPERTY.format("foo", "alluxio.unknown.property"), "val2"); Map<String, String> expected = new HashMap<>(); expected.put(PropertyKey.WEB_THREADS.toString(), "val1"); expected.put("alluxio.unknown.property", "val2"); assertThat(Configuration.getNestedProperties( PropertyKey.Template.MASTER_MOUNT_TABLE_OPTION.format("foo")), CoreMatchers.is(expected)); } @Test public void getNestedPropertiesEmptyTrailingProperty() { Configuration.set(PropertyKey.Template.MASTER_MOUNT_TABLE_OPTION_PROPERTY .format("foo", ""), "val"); Map<String, String> empty = new HashMap<>(); assertThat(Configuration.getNestedProperties( PropertyKey.Template.MASTER_MOUNT_TABLE_OPTION.format("foo")), CoreMatchers.is(empty)); } @Test public void getNestedPropertiesWrongPrefix() { Configuration.set( PropertyKey.Template.MASTER_MOUNT_TABLE_OPTION_PROPERTY.format("foo", PropertyKey.WEB_THREADS.toString()), "val"); Map<String, String> empty = new HashMap<>(); assertThat(Configuration.getNestedProperties(PropertyKey.HOME), CoreMatchers.is(empty)); assertThat(Configuration.getNestedProperties( PropertyKey.Template.MASTER_MOUNT_TABLE_OPTION.format("bar")), CoreMatchers.is(empty)); } @Test public void getClassTest() { // The name getClass is already reserved. Configuration.set(PropertyKey.WEB_THREADS, "java.lang.String"); assertEquals(String.class, Configuration.getClass(PropertyKey.WEB_THREADS)); } @Test public void getMalformedClassThrowsException() { Configuration.set(PropertyKey.WEB_THREADS, "java.util.not.a.class"); mThrown.expect(RuntimeException.class); Configuration.getClass(PropertyKey.WEB_THREADS); } @Test public void variableSubstitution() { Configuration.merge(ImmutableMap.of( PropertyKey.WORK_DIR, "value", PropertyKey.LOGS_DIR, "${alluxio.work.dir}/logs"), Source.SYSTEM_PROPERTY); String substitution = Configuration.get(PropertyKey.LOGS_DIR); assertEquals("value/logs", substitution); } @Test public void twoVariableSubstitution() { Configuration.merge(ImmutableMap.of( PropertyKey.MASTER_HOSTNAME, "value1", PropertyKey.MASTER_RPC_PORT, "value2", PropertyKey.MASTER_JOURNAL_FOLDER, "${alluxio.master.hostname}-${alluxio.master.port}"), Source.SYSTEM_PROPERTY); String substitution = Configuration.get(PropertyKey.MASTER_JOURNAL_FOLDER); assertEquals("value1-value2", substitution); } @Test public void recursiveVariableSubstitution() { Configuration.merge(ImmutableMap.of( PropertyKey.WORK_DIR, "value", PropertyKey.LOGS_DIR, "${alluxio.work.dir}/logs", PropertyKey.SITE_CONF_DIR, "${alluxio.logs.dir}/conf"), Source.SYSTEM_PROPERTY); String substitution2 = Configuration.get(PropertyKey.SITE_CONF_DIR); assertEquals("value/logs/conf", substitution2); } @Test public void systemVariableSubstitution() throws Exception { try (Closeable p = new SystemPropertyRule(PropertyKey.MASTER_HOSTNAME.toString(), "new_master").toResource()) { Configuration.init(); assertEquals("new_master", Configuration.get(PropertyKey.MASTER_HOSTNAME)); } } @Test public void systemPropertySubstitution() throws Exception { try (Closeable p = new SystemPropertyRule("user.home", "/home").toResource()) { Configuration.init(); Configuration.merge(ImmutableMap.of(PropertyKey.WORK_DIR, "${user.home}/work"), Source.SITE_PROPERTY); assertEquals("/home/work", Configuration.get(PropertyKey.WORK_DIR)); } } @Test public void circularSubstitution() throws Exception { Configuration.merge( ImmutableMap.of(PropertyKey.HOME, String.format("${%s}", PropertyKey.HOME.toString())), Source.SITE_PROPERTY); mThrown.expect(RuntimeException.class); mThrown.expectMessage(PropertyKey.HOME.toString()); Configuration.get(PropertyKey.HOME); } @Test public void userFileBufferBytesOverFlowException() { Configuration.set(PropertyKey.USER_FILE_BUFFER_BYTES, String.valueOf(Integer.MAX_VALUE + 1) + "B"); mThrown.expect(IllegalStateException.class); Configuration.validate(); } @Test public void setUserFileBufferBytesMaxInteger() { Configuration.set(PropertyKey.USER_FILE_BUFFER_BYTES, String.valueOf(Integer.MAX_VALUE) + "B"); assertEquals(Integer.MAX_VALUE, (int) Configuration.getBytes(PropertyKey.USER_FILE_BUFFER_BYTES)); } @Test public void setUserFileBufferBytes1GB() { Configuration.set(PropertyKey.USER_FILE_BUFFER_BYTES, "1GB"); assertEquals(1073741824, (int) Configuration.getBytes(PropertyKey.USER_FILE_BUFFER_BYTES)); } @Test public void unset() { assertFalse(Configuration.containsKey(PropertyKey.SECURITY_LOGIN_USERNAME)); Configuration.set(PropertyKey.SECURITY_LOGIN_USERNAME, "test"); assertTrue(Configuration.containsKey(PropertyKey.SECURITY_LOGIN_USERNAME)); Configuration.unset(PropertyKey.SECURITY_LOGIN_USERNAME); assertFalse(Configuration.containsKey(PropertyKey.SECURITY_LOGIN_USERNAME)); } @Test public void validateTieredLocality() throws Exception { // Pre-load the Configuration class so that the exception is thrown when we call init(), not // during class loading. Configuration.init(); HashMap<String, String> sysProps = new HashMap<>(); sysProps.put(Template.LOCALITY_TIER.format("unknownTier").toString(), "val"); try (Closeable p = new SystemPropertyRule(sysProps).toResource()) { mThrown.expect(IllegalStateException.class); mThrown.expectMessage("Tier unknownTier is configured by alluxio.locality.unknownTier, but " + "does not exist in the tier list [node, rack] configured by alluxio.locality.order"); Configuration.init(); } } @Test public void propertyTestModeEqualsTrue() throws Exception { assertTrue(Configuration.getBoolean(PropertyKey.TEST_MODE)); } @Test public void sitePropertiesNotLoadedInTest() throws Exception { Properties props = new Properties(); props.setProperty(PropertyKey.LOGGER_TYPE.toString(), "TEST_LOGGER"); File propsFile = mFolder.newFile(Constants.SITE_PROPERTIES); props.store(new FileOutputStream(propsFile), "ignored header"); // Avoid interference from system properties. Reset SITE_CONF_DIR to include the temp // site-properties file HashMap<String, String> sysProps = new HashMap<>(); sysProps.put(PropertyKey.LOGGER_TYPE.toString(), null); sysProps.put(PropertyKey.SITE_CONF_DIR.toString(), mFolder.getRoot().getAbsolutePath()); try (Closeable p = new SystemPropertyRule(sysProps).toResource()) { Configuration.init(); assertEquals(PropertyKey.LOGGER_TYPE.getDefaultValue(), Configuration.get(PropertyKey.LOGGER_TYPE)); } } @Test public void sitePropertiesLoadedNotInTest() throws Exception { Properties props = new Properties(); props.setProperty(PropertyKey.LOGGER_TYPE.toString(), "TEST_LOGGER"); File propsFile = mFolder.newFile(Constants.SITE_PROPERTIES); props.store(new FileOutputStream(propsFile), "ignored header"); // Avoid interference from system properties. Reset SITE_CONF_DIR to include the temp // site-properties file HashMap<String, String> sysProps = new HashMap<>(); sysProps.put(PropertyKey.LOGGER_TYPE.toString(), null); sysProps.put(PropertyKey.SITE_CONF_DIR.toString(), mFolder.getRoot().getAbsolutePath()); sysProps.put(PropertyKey.TEST_MODE.toString(), "false"); try (Closeable p = new SystemPropertyRule(sysProps).toResource()) { Configuration.init(); assertEquals("TEST_LOGGER", Configuration.get(PropertyKey.LOGGER_TYPE)); } } @Test public void setIgnoredPropertiesInSiteProperties() throws Exception { // Need to initialize the configuration instance first, other wise in after // ConfigurationTestUtils.resetConfiguration() will fail due to failed class init. Configuration.init(); Properties siteProps = new Properties(); siteProps.setProperty(PropertyKey.LOGS_DIR.toString(), "/tmp/logs1"); File propsFile = mFolder.newFile(Constants.SITE_PROPERTIES); siteProps.store(new FileOutputStream(propsFile), "tmp site properties file"); Map<String, String> sysProps = new HashMap<>(); sysProps.put(PropertyKey.SITE_CONF_DIR.toString(), mFolder.getRoot().getAbsolutePath()); sysProps.put(PropertyKey.TEST_MODE.toString(), "false"); try (Closeable p = new SystemPropertyRule(sysProps).toResource()) { mThrown.expect(IllegalStateException.class); Configuration.init(); } } @Test public void setIgnoredPropertiesInSystemProperties() throws Exception { Properties siteProps = new Properties(); File propsFile = mFolder.newFile(Constants.SITE_PROPERTIES); siteProps.store(new FileOutputStream(propsFile), "tmp site properties file"); Map<String, String> sysProps = new HashMap<>(); sysProps.put(PropertyKey.LOGS_DIR.toString(), "/tmp/logs1"); sysProps.put(PropertyKey.SITE_CONF_DIR.toString(), mFolder.getRoot().getAbsolutePath()); sysProps.put(PropertyKey.TEST_MODE.toString(), "false"); try (Closeable p = new SystemPropertyRule(sysProps).toResource()) { Configuration.init(); assertEquals( Source.SYSTEM_PROPERTY, Configuration.getSource(PropertyKey.LOGS_DIR)); assertEquals("/tmp/logs1", Configuration.get(PropertyKey.LOGS_DIR)); } } @Test public void noWhitespaceTrailingInSiteProperties() throws Exception { Properties siteProps = new Properties(); siteProps.setProperty(PropertyKey.MASTER_HOSTNAME.toString(), " host-1 "); siteProps.setProperty(PropertyKey.WEB_THREADS.toString(), "\t123\t"); File propsFile = mFolder.newFile(Constants.SITE_PROPERTIES); siteProps.store(new FileOutputStream(propsFile), "tmp site properties file"); // Avoid interference from system properties. Reset SITE_CONF_DIR to include the temp // site-properties file HashMap<String, String> sysProps = new HashMap<>(); sysProps.put(PropertyKey.SITE_CONF_DIR.toString(), mFolder.getRoot().getAbsolutePath()); sysProps.put(PropertyKey.TEST_MODE.toString(), "false"); try (Closeable p = new SystemPropertyRule(sysProps).toResource()) { Configuration.init(); assertEquals("host-1", Configuration.get(PropertyKey.MASTER_HOSTNAME)); assertEquals("123", Configuration.get(PropertyKey.WEB_THREADS)); } } @Test public void source() throws Exception { Properties siteProps = new Properties(); File propsFile = mFolder.newFile(Constants.SITE_PROPERTIES); siteProps.setProperty(PropertyKey.MASTER_HOSTNAME.toString(), "host-1"); siteProps.setProperty(PropertyKey.MASTER_WEB_PORT.toString(), "1234"); siteProps.store(new FileOutputStream(propsFile), "tmp site properties file"); Map<String, String> sysProps = new HashMap<>(); sysProps.put(PropertyKey.LOGS_DIR.toString(), "/tmp/logs1"); sysProps.put(PropertyKey.MASTER_WEB_PORT.toString(), "4321"); sysProps.put(PropertyKey.SITE_CONF_DIR.toString(), mFolder.getRoot().getAbsolutePath()); sysProps.put(PropertyKey.TEST_MODE.toString(), "false"); try (Closeable p = new SystemPropertyRule(sysProps).toResource()) { Configuration.init(); // set only in site prop assertEquals(Source.SITE_PROPERTY, Configuration.getSource(PropertyKey.MASTER_HOSTNAME)); // set both in site and system prop assertEquals(Source.SYSTEM_PROPERTY, Configuration.getSource(PropertyKey.MASTER_WEB_PORT)); // set only in system prop assertEquals(Source.SYSTEM_PROPERTY, Configuration.getSource(PropertyKey.LOGS_DIR)); // set neither in system prop assertEquals(Source.DEFAULT, Configuration.getSource(PropertyKey.MASTER_RPC_PORT)); } } @Test public void getRuntimeDefault() throws Exception { AtomicInteger x = new AtomicInteger(100); PropertyKey key = new PropertyKey.Builder("testKey") .setDefaultSupplier(new DefaultSupplier(() -> x.get(), "finds x")) .build(); assertEquals(100, Configuration.getInt(key)); x.set(20); assertEquals(20, Configuration.getInt(key)); } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.local; import io.netty.channel.AbstractChannel; import io.netty.channel.Channel; import io.netty.channel.ChannelConfig; import io.netty.channel.ChannelException; import io.netty.channel.ChannelMetadata; import io.netty.channel.ChannelOutboundBuffer; import io.netty.channel.ChannelPipeline; import io.netty.channel.ChannelPromise; import io.netty.channel.DefaultChannelConfig; import io.netty.channel.EventLoop; import io.netty.channel.SingleThreadEventLoop; import io.netty.util.ReferenceCountUtil; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.SingleThreadEventExecutor; import io.netty.util.internal.EmptyArrays; import io.netty.util.internal.InternalThreadLocalMap; import io.netty.util.internal.OneTimeTask; import io.netty.util.internal.PlatformDependent; import java.net.SocketAddress; import java.nio.channels.AlreadyConnectedException; import java.nio.channels.ClosedChannelException; import java.nio.channels.ConnectionPendingException; import java.nio.channels.NotYetConnectedException; import java.util.Queue; import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; /** * A {@link Channel} for the local transport. */ public class LocalChannel extends AbstractChannel { private enum State { OPEN, BOUND, CONNECTED, CLOSED } @SuppressWarnings({ "rawtypes" }) private static final AtomicReferenceFieldUpdater<LocalChannel, Future> FINISH_READ_FUTURE_UPDATER; private static final ChannelMetadata METADATA = new ChannelMetadata(false); private static final int MAX_READER_STACK_DEPTH = 8; private static final ClosedChannelException CLOSED_CHANNEL_EXCEPTION = new ClosedChannelException(); private final ChannelConfig config = new DefaultChannelConfig(this); // To further optimize this we could write our own SPSC queue. private final Queue<Object> inboundBuffer = PlatformDependent.newSpscQueue(); private final Runnable readTask = new Runnable() { @Override public void run() { ChannelPipeline pipeline = pipeline(); for (;;) { Object m = inboundBuffer.poll(); if (m == null) { break; } pipeline.fireChannelRead(m); } pipeline.fireChannelReadComplete(); } }; private final Runnable shutdownHook = new Runnable() { @Override public void run() { unsafe().close(unsafe().voidPromise()); } }; private volatile State state; private volatile LocalChannel peer; private volatile LocalAddress localAddress; private volatile LocalAddress remoteAddress; private volatile ChannelPromise connectPromise; private volatile boolean readInProgress; private volatile boolean registerInProgress; private volatile boolean writeInProgress; private volatile Future<?> finishReadFuture; static { @SuppressWarnings({ "rawtypes" }) AtomicReferenceFieldUpdater<LocalChannel, Future> finishReadFutureUpdater = PlatformDependent.newAtomicReferenceFieldUpdater(LocalChannel.class, "finishReadFuture"); if (finishReadFutureUpdater == null) { finishReadFutureUpdater = AtomicReferenceFieldUpdater.newUpdater(LocalChannel.class, Future.class, "finishReadFuture"); } FINISH_READ_FUTURE_UPDATER = finishReadFutureUpdater; CLOSED_CHANNEL_EXCEPTION.setStackTrace(EmptyArrays.EMPTY_STACK_TRACE); } public LocalChannel() { super(null); } LocalChannel(LocalServerChannel parent, LocalChannel peer) { super(parent); this.peer = peer; localAddress = parent.localAddress(); remoteAddress = peer.localAddress(); } @Override public ChannelMetadata metadata() { return METADATA; } @Override public ChannelConfig config() { return config; } @Override public LocalServerChannel parent() { return (LocalServerChannel) super.parent(); } @Override public LocalAddress localAddress() { return (LocalAddress) super.localAddress(); } @Override public LocalAddress remoteAddress() { return (LocalAddress) super.remoteAddress(); } @Override public boolean isOpen() { return state != State.CLOSED; } @Override public boolean isActive() { return state == State.CONNECTED; } @Override protected AbstractUnsafe newUnsafe() { return new LocalUnsafe(); } @Override protected boolean isCompatible(EventLoop loop) { return loop instanceof SingleThreadEventLoop; } @Override protected SocketAddress localAddress0() { return localAddress; } @Override protected SocketAddress remoteAddress0() { return remoteAddress; } @Override protected void doRegister() throws Exception { // Check if both peer and parent are non-null because this channel was created by a LocalServerChannel. // This is needed as a peer may not be null also if a LocalChannel was connected before and // deregistered / registered later again. // // See https://github.com/netty/netty/issues/2400 if (peer != null && parent() != null) { // Store the peer in a local variable as it may be set to null if doClose() is called. // Because of this we also set registerInProgress to true as we check for this in doClose() and make sure // we delay the fireChannelInactive() to be fired after the fireChannelActive() and so keep the correct // order of events. // // See https://github.com/netty/netty/issues/2144 final LocalChannel peer = this.peer; registerInProgress = true; state = State.CONNECTED; peer.remoteAddress = parent() == null ? null : parent().localAddress(); peer.state = State.CONNECTED; // Always call peer.eventLoop().execute() even if peer.eventLoop().inEventLoop() is true. // This ensures that if both channels are on the same event loop, the peer's channelActive // event is triggered *after* this channel's channelRegistered event, so that this channel's // pipeline is fully initialized by ChannelInitializer before any channelRead events. peer.eventLoop().execute(new OneTimeTask() { @Override public void run() { registerInProgress = false; ChannelPromise promise = peer.connectPromise; // Only trigger fireChannelActive() if the promise was not null and was not completed yet. // connectPromise may be set to null if doClose() was called in the meantime. if (promise != null && promise.trySuccess()) { peer.pipeline().fireChannelActive(); } } }); } ((SingleThreadEventExecutor) eventLoop()).addShutdownHook(shutdownHook); } @Override protected void doBind(SocketAddress localAddress) throws Exception { this.localAddress = LocalChannelRegistry.register(this, this.localAddress, localAddress); state = State.BOUND; } @Override protected void doDisconnect() throws Exception { doClose(); } @Override protected void doClose() throws Exception { final LocalChannel peer = this.peer; if (state != State.CLOSED) { // Update all internal state before the closeFuture is notified. if (localAddress != null) { if (parent() == null) { LocalChannelRegistry.unregister(localAddress); } localAddress = null; } // State change must happen before finishPeerRead to ensure writes are released either in doWrite or // channelRead. state = State.CLOSED; ChannelPromise promise = connectPromise; if (promise != null) { // Use tryFailure() instead of setFailure() to avoid the race against cancel(). promise.tryFailure(CLOSED_CHANNEL_EXCEPTION); connectPromise = null; } // To preserve ordering of events we must process any pending reads if (writeInProgress && peer != null) { finishPeerRead(peer); } } if (peer != null && peer.isActive()) { // Need to execute the close in the correct EventLoop (see https://github.com/netty/netty/issues/1777). // Also check if the registration was not done yet. In this case we submit the close to the EventLoop // to make sure its run after the registration completes (see https://github.com/netty/netty/issues/2144). if (peer.eventLoop().inEventLoop() && !registerInProgress) { doPeerClose(peer, peer.writeInProgress); } else { // This value may change, and so we should save it before executing the Runnable. final boolean peerWriteInProgress = peer.writeInProgress; try { peer.eventLoop().execute(new OneTimeTask() { @Override public void run() { doPeerClose(peer, peerWriteInProgress); } }); } catch (RuntimeException e) { // The peer close may attempt to drain this.inboundBuffers. If that fails make sure it is drained. releaseInboundBuffers(); throw e; } } this.peer = null; } } private void doPeerClose(LocalChannel peer, boolean peerWriteInProgress) { if (peerWriteInProgress) { finishPeerRead0(this); } peer.unsafe().close(peer.unsafe().voidPromise()); } @Override protected void doDeregister() throws Exception { // Just remove the shutdownHook as this Channel may be closed later or registered to another EventLoop ((SingleThreadEventExecutor) eventLoop()).removeShutdownHook(shutdownHook); } @Override protected void doBeginRead() throws Exception { if (readInProgress) { return; } ChannelPipeline pipeline = pipeline(); Queue<Object> inboundBuffer = this.inboundBuffer; if (inboundBuffer.isEmpty()) { readInProgress = true; return; } final InternalThreadLocalMap threadLocals = InternalThreadLocalMap.get(); final Integer stackDepth = threadLocals.localChannelReaderStackDepth(); if (stackDepth < MAX_READER_STACK_DEPTH) { threadLocals.setLocalChannelReaderStackDepth(stackDepth + 1); try { for (;;) { Object received = inboundBuffer.poll(); if (received == null) { break; } pipeline.fireChannelRead(received); } pipeline.fireChannelReadComplete(); } finally { threadLocals.setLocalChannelReaderStackDepth(stackDepth); } } else { try { eventLoop().execute(readTask); } catch (RuntimeException e) { releaseInboundBuffers(); throw e; } } } @Override protected void doWrite(ChannelOutboundBuffer in) throws Exception { switch (state) { case OPEN: case BOUND: throw new NotYetConnectedException(); case CLOSED: throw CLOSED_CHANNEL_EXCEPTION; case CONNECTED: break; } final LocalChannel peer = this.peer; writeInProgress = true; try { for (;;) { Object msg = in.current(); if (msg == null) { break; } try { // It is possible the peer could have closed while we are writing, and in this case we should // simulate real socket behavior and ensure the write operation is failed. if (peer.state == State.CONNECTED) { peer.inboundBuffer.add(ReferenceCountUtil.retain(msg)); in.remove(); } else { in.remove(CLOSED_CHANNEL_EXCEPTION); } } catch (Throwable cause) { in.remove(cause); } } } finally { // The following situation may cause trouble: // 1. Write (with promise X) // 2. promise X is completed when in.remove() is called, and a listener on this promise calls close() // 3. Then the close event will be executed for the peer before the write events, when the write events // actually happened before the close event. writeInProgress = false; } finishPeerRead(peer); } private void finishPeerRead(final LocalChannel peer) { // If the peer is also writing, then we must schedule the event on the event loop to preserve read order. if (peer.eventLoop() == eventLoop() && !peer.writeInProgress) { finishPeerRead0(peer); } else { runFinishPeerReadTask(peer); } } private void runFinishPeerReadTask(final LocalChannel peer) { // If the peer is writing, we must wait until after reads are completed for that peer before we can read. So // we keep track of the task, and coordinate later that our read can't happen until the peer is done. final Runnable finishPeerReadTask = new OneTimeTask() { @Override public void run() { finishPeerRead0(peer); } }; try { if (peer.writeInProgress) { peer.finishReadFuture = peer.eventLoop().submit(finishPeerReadTask); } else { peer.eventLoop().execute(finishPeerReadTask); } } catch (RuntimeException e) { peer.releaseInboundBuffers(); throw e; } } private void releaseInboundBuffers() { for (;;) { Object o = inboundBuffer.poll(); if (o == null) { break; } ReferenceCountUtil.release(o); } } private void finishPeerRead0(LocalChannel peer) { Future<?> peerFinishReadFuture = peer.finishReadFuture; if (peerFinishReadFuture != null) { if (!peerFinishReadFuture.isDone()) { runFinishPeerReadTask(peer); return; } else { // Lazy unset to make sure we don't prematurely unset it while scheduling a new task. FINISH_READ_FUTURE_UPDATER.compareAndSet(peer, peerFinishReadFuture, null); } } ChannelPipeline peerPipeline = peer.pipeline(); if (peer.readInProgress) { peer.readInProgress = false; for (;;) { Object received = peer.inboundBuffer.poll(); if (received == null) { break; } peerPipeline.fireChannelRead(received); } peerPipeline.fireChannelReadComplete(); } } private class LocalUnsafe extends AbstractUnsafe { @Override public void connect(final SocketAddress remoteAddress, SocketAddress localAddress, final ChannelPromise promise) { if (!promise.setUncancellable() || !ensureOpen(promise)) { return; } if (state == State.CONNECTED) { Exception cause = new AlreadyConnectedException(); safeSetFailure(promise, cause); pipeline().fireExceptionCaught(cause); return; } if (connectPromise != null) { throw new ConnectionPendingException(); } connectPromise = promise; if (state != State.BOUND) { // Not bound yet and no localAddress specified - get one. if (localAddress == null) { localAddress = new LocalAddress(LocalChannel.this); } } if (localAddress != null) { try { doBind(localAddress); } catch (Throwable t) { safeSetFailure(promise, t); close(voidPromise()); return; } } Channel boundChannel = LocalChannelRegistry.get(remoteAddress); if (!(boundChannel instanceof LocalServerChannel)) { Exception cause = new ChannelException("connection refused"); safeSetFailure(promise, cause); close(voidPromise()); return; } LocalServerChannel serverChannel = (LocalServerChannel) boundChannel; peer = serverChannel.serve(LocalChannel.this); } } }
package org.jgroups.protocols; import org.jgroups.Event; import org.jgroups.Global; import org.jgroups.Header; import org.jgroups.Message; import org.jgroups.annotations.MBean; import org.jgroups.annotations.Property; import org.jgroups.stack.Protocol; import org.jgroups.util.MessageBatch; import org.jgroups.util.Util; import java.io.DataInput; import java.io.DataOutput; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.zip.DataFormatException; import java.util.zip.Deflater; import java.util.zip.Inflater; /** * Compresses the payload of a message. Goal is to reduce the number of messages * sent across the wire. Should ideally be layered somewhere above a * fragmentation protocol (e.g. FRAG). * * @author Bela Ban */ @MBean(description="Compresses messages to send and uncompresses received messages") public class COMPRESS extends Protocol { /* ----------------------------------------- Properties -------------------------------------------------- */ @Property(description="Compression level (from java.util.zip.Deflater) " + "(0=no compression, 1=best speed, 9=best compression). Default is 9") protected int compression_level=Deflater.BEST_COMPRESSION; // this is 9 @Property(description="Minimal payload size of a message (in bytes) for compression to kick in. Default is 500 bytes") protected long min_size=500; @Property(description="Number of inflaters/deflaters for concurrent processing. Default is 2 ") protected int pool_size=2; /* --------------------------------------------- Fields ------------------------------------------------------ */ protected BlockingQueue<Deflater> deflater_pool=null; protected BlockingQueue<Inflater> inflater_pool=null; public COMPRESS() { } public void init() throws Exception { deflater_pool=new ArrayBlockingQueue<>(pool_size); for(int i=0; i < pool_size; i++) deflater_pool.add(new Deflater(compression_level)); inflater_pool=new ArrayBlockingQueue<>(pool_size); for(int i=0; i < pool_size; i++) inflater_pool.add(new Inflater()); } public void destroy() { for(Deflater deflater: deflater_pool) deflater.end(); for(Inflater inflater: inflater_pool) inflater.end(); } /** * We compress the payload if it is larger than <code>min_size</code>. In this case we add a header containing * the original size before compression. Otherwise we add no header.<br/> * Note that we compress either the entire buffer (if offset/length are not used), or a subset (if offset/length * are used) * @param evt */ public Object down(Event evt) { if(evt.getType() == Event.MSG) { Message msg=(Message)evt.getArg(); int length=msg.getLength(); // takes offset/length (if set) into account if(length >= min_size) { byte[] payload=msg.getRawBuffer(); // here we get the ref so we can avoid copying byte[] compressed_payload=new byte[length]; Deflater deflater=null; try { deflater=deflater_pool.take(); deflater.reset(); deflater.setInput(payload, msg.getOffset(), length); deflater.finish(); deflater.deflate(compressed_payload); int compressed_size=deflater.getTotalOut(); if(compressed_size < length ) { // JGRP-1000 byte[] new_payload=new byte[compressed_size]; System.arraycopy(compressed_payload,0,new_payload,0,compressed_size); Message copy=msg.copy(false).setBuffer(new_payload).putHeader(this.id,new CompressHeader(length)); if(log.isTraceEnabled()) log.trace("down(): compressed payload from " + length + " bytes to " + compressed_size + " bytes"); return down_prot.down(new Event(Event.MSG, copy)); } else { if(log.isTraceEnabled()) log.trace("down(): skipping compression since the compressed message (" + compressed_size + ") is not smaller than the original (" + length + ")"); } } catch(InterruptedException e) { Thread.currentThread().interrupt(); // set interrupt flag again throw new RuntimeException(e); } finally { if(deflater != null) deflater_pool.offer(deflater); } } } return down_prot.down(evt); } /** * If there is no header, we pass the message up. Otherwise we uncompress the payload to its original size. * @param evt */ public Object up(Event evt) { if(evt.getType() == Event.MSG) { Message msg=(Message)evt.getArg(); CompressHeader hdr=(CompressHeader)msg.getHeader(this.id); if(hdr != null) { Message uncompressed_msg=uncompress(msg, hdr.original_size); if(uncompressed_msg != null) { if(log.isTraceEnabled()) log.trace("up(): uncompressed " + msg.getLength() + " bytes to " + uncompressed_msg.getLength() + " bytes"); return up_prot.up(new Event(Event.MSG, uncompressed_msg)); } } } return up_prot.up(evt); } public void up(MessageBatch batch) { for(Message msg: batch) { CompressHeader hdr=(CompressHeader)msg.getHeader(this.id); if(hdr != null) { Message uncompressed_msg=uncompress(msg, hdr.original_size); if(uncompressed_msg != null) { if(log.isTraceEnabled()) log.trace("up(): uncompressed " + msg.getLength() + " bytes to " + uncompressed_msg.getLength() + " bytes"); batch.replace(msg, uncompressed_msg); // replace msg in batch with uncompressed_msg } } } if(!batch.isEmpty()) up_prot.up(batch); } /** Returns a new message as a result of uncompressing msg, or null if msg couldn't be uncompressed */ protected Message uncompress(Message msg, int original_size) { byte[] compressed_payload=msg.getRawBuffer(); if(compressed_payload != null && compressed_payload.length > 0) { byte[] uncompressed_payload=new byte[original_size]; Inflater inflater=null; try { inflater=inflater_pool.take(); inflater.reset(); inflater.setInput(compressed_payload, msg.getOffset(), msg.getLength()); try { inflater.inflate(uncompressed_payload); // we need to copy: https://jira.jboss.org/jira/browse/JGRP-867 return msg.copy(false).setBuffer(uncompressed_payload); } catch(DataFormatException e) { log.error(Util.getMessage("CompressionFailure"), e); } } catch(InterruptedException e) { Thread.currentThread().interrupt(); // set the interrupt bit again, so caller can handle it } finally { if(inflater != null) inflater_pool.offer(inflater); } } return null; } public static class CompressHeader extends Header { int original_size=0; public CompressHeader() { super(); } public CompressHeader(int s) { original_size=s; } public int size() { return Global.INT_SIZE; } public void writeTo(DataOutput out) throws Exception { out.writeInt(original_size); } public void readFrom(DataInput in) throws Exception { original_size=in.readInt(); } } }
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.skyframe; import com.google.common.base.Preconditions; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.skyframe.WalkableGraph.WalkableGraphFactory; import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.function.Supplier; import javax.annotation.Nullable; /** * Includes options and states used by {@link MemoizingEvaluator#evaluate}, {@link * BuildDriver#evaluate} and {@link WalkableGraphFactory#prepareAndGet} */ public class EvaluationContext { private final int numThreads; @Nullable private final Supplier<ExecutorService> executorServiceSupplier; private final boolean keepGoing; private final ExtendedEventHandler eventHandler; private final boolean useForkJoinPool; private final boolean isExecutionPhase; private final int cpuHeavySkyKeysThreadPoolSize; private final int executionPhaseThreadPoolSize; protected EvaluationContext( int numThreads, @Nullable Supplier<ExecutorService> executorServiceSupplier, boolean keepGoing, ExtendedEventHandler eventHandler, boolean useForkJoinPool, boolean isExecutionPhase, int cpuHeavySkyKeysThreadPoolSize, int executionPhaseThreadPoolSize) { Preconditions.checkArgument(0 < numThreads, "numThreads must be positive"); this.numThreads = numThreads; this.executorServiceSupplier = executorServiceSupplier; this.keepGoing = keepGoing; this.eventHandler = Preconditions.checkNotNull(eventHandler); this.useForkJoinPool = useForkJoinPool; this.isExecutionPhase = isExecutionPhase; this.cpuHeavySkyKeysThreadPoolSize = cpuHeavySkyKeysThreadPoolSize; this.executionPhaseThreadPoolSize = executionPhaseThreadPoolSize; } public int getParallelism() { return numThreads; } public Optional<Supplier<ExecutorService>> getExecutorServiceSupplier() { return Optional.ofNullable(executorServiceSupplier); } public boolean getKeepGoing() { return keepGoing; } public ExtendedEventHandler getEventHandler() { return eventHandler; } public EvaluationContext getCopyWithKeepGoing(boolean keepGoing) { if (this.keepGoing == keepGoing) { return this; } else { return new EvaluationContext( this.numThreads, this.executorServiceSupplier, keepGoing, this.eventHandler, this.useForkJoinPool, this.isExecutionPhase, this.cpuHeavySkyKeysThreadPoolSize, this.executionPhaseThreadPoolSize); } } public boolean getUseForkJoinPool() { return useForkJoinPool; } /** * Returns the size of the thread pool for CPU-heavy tasks set by * --experimental_skyframe_cpu_heavy_skykeys_thread_pool_size. * * <p>--experimental_skyframe_cpu_heavy_skykeys_thread_pool_size is currently incompatible with * the execution phase, and this method will return -1. */ public int getCPUHeavySkyKeysThreadPoolSize() { if (isExecutionPhase) { return -1; } return cpuHeavySkyKeysThreadPoolSize; } /** * Returns the size of the thread pool to be used for the execution phase. Only applicable with * --experimental_merged_skyframe_analysis_execution. */ public int getExecutionPhaseThreadPoolSize() { return executionPhaseThreadPoolSize; } public boolean isExecutionPhase() { return isExecutionPhase; } public static Builder newBuilder() { return new Builder(); } /** Builder for {@link EvaluationContext}. */ public static class Builder { private int numThreads; private Supplier<ExecutorService> executorServiceSupplier; private boolean keepGoing; private ExtendedEventHandler eventHandler; private boolean useForkJoinPool; private int cpuHeavySkyKeysThreadPoolSize; private int executionJobsThreadPoolSize = 0; private boolean isExecutionPhase = false; private Builder() {} public Builder copyFrom(EvaluationContext evaluationContext) { this.numThreads = evaluationContext.numThreads; this.executorServiceSupplier = evaluationContext.executorServiceSupplier; this.keepGoing = evaluationContext.keepGoing; this.eventHandler = evaluationContext.eventHandler; this.isExecutionPhase = evaluationContext.isExecutionPhase; this.useForkJoinPool = evaluationContext.useForkJoinPool; this.executionJobsThreadPoolSize = evaluationContext.executionPhaseThreadPoolSize; this.cpuHeavySkyKeysThreadPoolSize = evaluationContext.cpuHeavySkyKeysThreadPoolSize; return this; } public Builder setNumThreads(int numThreads) { this.numThreads = numThreads; return this; } public Builder setExecutorServiceSupplier(Supplier<ExecutorService> executorServiceSupplier) { this.executorServiceSupplier = executorServiceSupplier; return this; } public Builder setKeepGoing(boolean keepGoing) { this.keepGoing = keepGoing; return this; } public Builder setEventHandler(ExtendedEventHandler eventHandler) { this.eventHandler = eventHandler; return this; } public Builder setUseForkJoinPool(boolean useForkJoinPool) { this.useForkJoinPool = useForkJoinPool; return this; } public Builder setCPUHeavySkyKeysThreadPoolSize(int cpuHeavySkyKeysThreadPoolSize) { this.cpuHeavySkyKeysThreadPoolSize = cpuHeavySkyKeysThreadPoolSize; return this; } public Builder setExecutionPhaseThreadPoolSize(int executionJobsThreadPoolSize) { this.executionJobsThreadPoolSize = executionJobsThreadPoolSize; return this; } public Builder setExecutionPhase() { isExecutionPhase = true; return this; } public EvaluationContext build() { return new EvaluationContext( numThreads, executorServiceSupplier, keepGoing, eventHandler, useForkJoinPool, isExecutionPhase, cpuHeavySkyKeysThreadPoolSize, executionJobsThreadPoolSize); } } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.replicatedmap; import com.hazelcast.config.Config; import com.hazelcast.config.ReplicatedMapConfig; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.LifecycleEvent; import com.hazelcast.core.LifecycleListener; import com.hazelcast.spi.merge.HigherHitsMergePolicy; import com.hazelcast.spi.merge.LatestUpdateMergePolicy; import com.hazelcast.spi.merge.PassThroughMergePolicy; import com.hazelcast.spi.merge.PutIfAbsentMergePolicy; import com.hazelcast.spi.properties.ClusterProperty; import com.hazelcast.test.AssertTask; import com.hazelcast.test.HazelcastParametrizedRunner; import com.hazelcast.test.HazelcastSerialParametersRunnerFactory; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.NightlyTest; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; import org.junit.runners.Parameterized.UseParametersRunnerFactory; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; import static com.hazelcast.test.SplitBrainTestSupport.blockCommunicationBetween; import static com.hazelcast.test.SplitBrainTestSupport.unblockCommunicationBetween; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @RunWith(HazelcastParametrizedRunner.class) @UseParametersRunnerFactory(HazelcastSerialParametersRunnerFactory.class) @Category(NightlyTest.class) public class ReplicatedMapMergePolicyTest extends HazelcastTestSupport { @Parameters(name = "{0}") public static Collection<Object> parameters() { return Arrays.asList(new Object[]{ new LatestUpdateMergePolicyTestCase(), new HighestHitsMergePolicyTestCase(), new PutIfAbsentMapMergePolicyTestCase(), new PassThroughMapMergePolicyTestCase(), new CustomMergePolicyTestCase(), }); } @Parameter public ReplicatedMapMergePolicyTestCase testCase; private TestHazelcastInstanceFactory factory; @Before public void init() { factory = createHazelcastInstanceFactory(2); } @Test public void testMapMergePolicy() { final String mapName = randomMapName(); Config config = newConfig(testCase.getMergePolicyClassName(), mapName); final HazelcastInstance h1 = factory.newHazelcastInstance(config); final HazelcastInstance h2 = factory.newHazelcastInstance(config); TestLifeCycleListener lifeCycleListener = new TestLifeCycleListener(1); h2.getLifecycleService().addLifecycleListener(lifeCycleListener); // wait for cluster to be formed before breaking the connection waitAllForSafeState(h1, h2); blockCommunicationBetween(h1, h2); closeConnectionBetween(h1, h2); assertClusterSizeEventually(1, h1); assertClusterSizeEventually(1, h2); ReplicatedMap<Object, Object> map1 = h1.getReplicatedMap(mapName); ReplicatedMap<Object, Object> map2 = h2.getReplicatedMap(mapName); final Map<Object, Object> expectedValues = testCase.populateMaps(map1, map2, h1); unblockCommunicationBetween(h1, h2); assertOpenEventually(lifeCycleListener.latch); assertClusterSizeEventually(2, h1, h2); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { ReplicatedMap<Object, Object> mapTest = h1.getReplicatedMap(mapName); for (Map.Entry<Object, Object> entry : expectedValues.entrySet()) { assertEquals(entry.getValue(), mapTest.get(entry.getKey())); } } }); } private Config newConfig(String mergePolicy, String mapName) { Config config = new Config(); config.setProperty(ClusterProperty.MERGE_FIRST_RUN_DELAY_SECONDS.getName(), "5"); config.setProperty(ClusterProperty.MERGE_NEXT_RUN_DELAY_SECONDS.getName(), "3"); config.setClusterName(generateRandomString(10)); ReplicatedMapConfig replicatedMapConfig = config.getReplicatedMapConfig(mapName); replicatedMapConfig.getMergePolicyConfig().setPolicy(mergePolicy); return config; } private class TestLifeCycleListener implements LifecycleListener { CountDownLatch latch; TestLifeCycleListener(int countdown) { latch = new CountDownLatch(countdown); } @Override public void stateChanged(LifecycleEvent event) { if (event.getState() == LifecycleEvent.LifecycleState.MERGED) { latch.countDown(); } } } private interface ReplicatedMapMergePolicyTestCase { // Populate given maps with K-V pairs. Optional HZ instance is required by specific merge policies in order to // generate keys owned by the given instance. // return K-V pairs expected to be found in the merged map Map<Object, Object> populateMaps(ReplicatedMap<Object, Object> map1, ReplicatedMap<Object, Object> map2, HazelcastInstance instance); // return merge policy's class name String getMergePolicyClassName(); } private static class LatestUpdateMergePolicyTestCase implements ReplicatedMapMergePolicyTestCase { @Override public Map<Object, Object> populateMaps(ReplicatedMap<Object, Object> map1, ReplicatedMap<Object, Object> map2, HazelcastInstance instance) { map1.put("key1", "value"); // prevent updating at the same time sleepAtLeastSeconds(1); map2.put("key1", "LatestUpdatedValue"); map2.put("key2", "value2"); // prevent updating at the same time sleepAtLeastSeconds(1); map1.put("key2", "LatestUpdatedValue2"); Map<Object, Object> expectedValues = new HashMap<Object, Object>(); expectedValues.put("key1", "LatestUpdatedValue"); expectedValues.put("key2", "LatestUpdatedValue2"); return expectedValues; } @Override public String getMergePolicyClassName() { return LatestUpdateMergePolicy.class.getName(); } @Override public String toString() { return "LatestUpdateMapMergePolicy"; } } private static class HighestHitsMergePolicyTestCase implements ReplicatedMapMergePolicyTestCase { @Override public Map<Object, Object> populateMaps(ReplicatedMap<Object, Object> map1, ReplicatedMap<Object, Object> map2, HazelcastInstance instance) { map1.put("key1", "higherHitsValue"); map1.put("key2", "value2"); // increase hits number map1.get("key1"); map1.get("key1"); map2.put("key1", "value1"); map2.put("key2", "higherHitsValue2"); // increase hits number map2.get("key2"); map2.get("key2"); Map<Object, Object> expectedValues = new HashMap<Object, Object>(); expectedValues.put("key1", "higherHitsValue"); expectedValues.put("key2", "higherHitsValue2"); return expectedValues; } @Override public String getMergePolicyClassName() { return HigherHitsMergePolicy.class.getName(); } @Override public String toString() { return "HigherHitsMapMergePolicy"; } } private static class PutIfAbsentMapMergePolicyTestCase implements ReplicatedMapMergePolicyTestCase { @Override public Map<Object, Object> populateMaps(ReplicatedMap<Object, Object> map1, ReplicatedMap<Object, Object> map2, HazelcastInstance instance) { map1.put("key1", "PutIfAbsentValue1"); map2.put("key1", "value"); map2.put("key2", "PutIfAbsentValue2"); Map<Object, Object> expectedValues = new HashMap<Object, Object>(); expectedValues.put("key1", "PutIfAbsentValue1"); expectedValues.put("key2", "PutIfAbsentValue2"); return expectedValues; } @Override public String getMergePolicyClassName() { return PutIfAbsentMergePolicy.class.getName(); } @Override public String toString() { return "PutIfAbsentMapMergePolicy"; } } private static class PassThroughMapMergePolicyTestCase implements ReplicatedMapMergePolicyTestCase { @Override public Map<Object, Object> populateMaps(ReplicatedMap<Object, Object> map1, ReplicatedMap<Object, Object> map2, HazelcastInstance instance) { assertNotNull(instance); String key = generateKeyOwnedBy(instance); map1.put(key, "value"); map2.put(key, "passThroughValue"); Map<Object, Object> expectedValues = new HashMap<Object, Object>(); expectedValues.put(key, "passThroughValue"); return expectedValues; } @Override public String getMergePolicyClassName() { return PassThroughMergePolicy.class.getName(); } @Override public String toString() { return "PassThroughMergePolicy"; } } private static class CustomMergePolicyTestCase implements ReplicatedMapMergePolicyTestCase { @Override public Map<Object, Object> populateMaps(ReplicatedMap<Object, Object> map1, ReplicatedMap<Object, Object> map2, HazelcastInstance instance) { assertNotNull(instance); String key = generateKeyOwnedBy(instance); Integer value = 1; map1.put(key, "value"); map2.put(key, value); Map<Object, Object> expectedValues = new HashMap<Object, Object>(); expectedValues.put(key, value); return expectedValues; } @Override public String getMergePolicyClassName() { return CustomReplicatedMapMergePolicy.class.getName(); } @Override public String toString() { return "CustomMergePolicy"; } } }
/* * Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.user.core.jdbc; import junit.framework.TestCase; import org.apache.commons.dbcp.BasicDataSource; import org.wso2.carbon.user.api.RealmConfiguration; import org.wso2.carbon.user.core.AuthorizationManager; import org.wso2.carbon.user.core.BaseTestCase; import org.wso2.carbon.user.core.ClaimTestUtil; import org.wso2.carbon.user.core.Permission; import org.wso2.carbon.user.core.UserCoreConstants; import org.wso2.carbon.user.core.UserCoreTestConstants; import org.wso2.carbon.user.core.UserRealm; import org.wso2.carbon.user.core.UserStoreManager; import org.wso2.carbon.user.core.authman.AdvancedPermissionTreeTest; import org.wso2.carbon.user.core.authorization.JDBCAuthorizationManager; import org.wso2.carbon.user.core.common.DefaultRealm; import org.wso2.carbon.user.core.config.RealmConfigXMLProcessor; import org.wso2.carbon.user.core.config.TestRealmConfigBuilder; import org.wso2.carbon.user.core.util.DatabaseUtil; import org.wso2.carbon.utils.dbcreator.DatabaseCreator; import org.wso2.carbon.utils.multitenancy.MultitenantConstants; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.util.HashMap; import java.util.Map; public class JDBCRealmTest extends BaseTestCase { private UserRealm realm = null; public static final String JDBC_TEST_USERMGT_XML = "user-mgt-test.xml"; private static String TEST_URL = "jdbc:h2:target/BasicJDBCDatabaseTest/CARBON_TEST"; public void setUp() throws Exception { super.setUp(); } public void testStuff() throws Exception { DatabaseUtil.closeDatabasePoolConnection(); initRealmStuff(TEST_URL); doUserStuff(); doUserRoleStuff(); doAuthorizationStuff(); doClaimStuff(); } public void initRealmStuff(String dbUrl) throws Exception { String dbFolder = "target/BasicJDBCDatabaseTest"; if ((new File(dbFolder)).exists()) { deleteDir(new File(dbFolder)); } BasicDataSource ds = new BasicDataSource(); ds.setDriverClassName(UserCoreTestConstants.DB_DRIVER); ds.setUrl(dbUrl); DatabaseCreator creator = new DatabaseCreator(ds); creator.createRegistryDatabase(); realm = new DefaultRealm(); InputStream inStream = this.getClass().getClassLoader().getResource( JDBCRealmTest.JDBC_TEST_USERMGT_XML).openStream(); RealmConfiguration realmConfig = TestRealmConfigBuilder .buildRealmConfigWithJDBCConnectionUrl(inStream, TEST_URL); realm.init(realmConfig, ClaimTestUtil.getClaimTestData(), ClaimTestUtil .getProfileTestData(), MultitenantConstants.SUPER_TENANT_ID); ds.close(); } public void testAuthorizationClearence() throws Exception{ BasicDataSource ds = new BasicDataSource(); ds.setDriverClassName(UserCoreTestConstants.DB_DRIVER); ds.setUrl("jdbc:h2:target/clear-resources/WSO2CARBON_DB_CLEAR"); ds.setUsername("wso2carbon"); ds.setPassword("wso2carbon"); realm = new DefaultRealm(); Map<String, Object> properties = new HashMap<String, Object>(); properties.put(UserCoreConstants.DATA_SOURCE, ds); RealmConfigXMLProcessor xmlProcessor = new RealmConfigXMLProcessor(); InputStream stream = new FileInputStream("target/clear-resources/user-mgt-clear.xml"); RealmConfiguration configuration = xmlProcessor.buildRealmConfiguration(stream); JDBCAuthorizationManager jdbcAuthnManager = new JDBCAuthorizationManager(configuration, properties, null, null, realm, 0); String[] roles = jdbcAuthnManager.getAllowedRolesForResource("/permission/admin", "ui.execute"); assertEquals(roles.length,1); jdbcAuthnManager.clearPermissionTree(); //the tree should automatically be loaded on next call roles = jdbcAuthnManager.getAllowedRolesForResource("/permission/admin", "ui.execute"); assertEquals(roles.length,1); } public void doUserStuff() throws Exception { UserStoreManager admin = realm.getUserStoreManager(); Map<String, String> userProps = new HashMap<String, String>(); userProps.put(ClaimTestUtil.CLAIM_URI1, "1claim1Value"); userProps.put(ClaimTestUtil.CLAIM_URI2, "2claim2Value"); Permission[] permisions = new Permission[2]; permisions[0] = new Permission("high security", "read"); permisions[1] = new Permission("low security", "write"); // add admin.addUser("dimuthu", "credential", null, null, null, false); admin.addRole("role1", new String[] { "dimuthu" }, permisions); admin.addUser("vajira", "credential", new String[] { "role1" }, userProps, null, false); int id = admin.getUserId("dimuthu"); int tenatId = admin.getTenantId("dimuthu"); // authenticate assertTrue(admin.authenticate("dimuthu", "credential")); admin.updateCredentialByAdmin("dimuthu", "topsecret"); assertTrue(admin.authenticate("dimuthu", "topsecret")); assertTrue(admin.isExistingUser("dimuthu")); assertFalse(admin.isExistingUser("muhaha")); // update admin.updateCredential("dimuthu", "password", "topsecret"); assertFalse(admin.authenticate("dimuthu", "credential")); assertTrue(admin.authenticate("dimuthu", "password")); String[] names = admin.listUsers("*", 100); assertEquals(3, names.length); String[] roleNames = admin.getRoleNames(); assertEquals(3, roleNames.length); // delete admin.deleteUser("vajira"); assertFalse(admin.authenticate("vajira", "credential")); admin.addUser("vajira", "credential", new String[] { "role1" }, userProps, null, false); admin.deleteRole("role1"); admin.addRole("role1", new String[] { "dimuthu" }, permisions); } public void doUserRoleStuff() throws Exception { UserStoreManager admin = realm.getUserStoreManager(); admin.addRole("role2", null, null); admin.addRole("role3", null, null); admin.addRole("role4", null, null); admin.addUser("saman", "pass1", null, null, null, false); admin.addUser("amara", "pass2", null, null, null, false); admin.addUser("sunil", "pass3", null, null, null, false); admin.updateRoleListOfUser("saman", null, new String[] { "role2" }); admin.updateRoleListOfUser("saman", new String[] { "role2" }, new String[] { "role4", "role3" }); String[] rolesOfSaman = admin.getRoleListOfUser("saman"); assertEquals(3, rolesOfSaman.length); // negative admin.updateUserListOfRole("role2", new String[] { "saman" }, null); admin.updateUserListOfRole("role3", null, new String[] { "amara", "sunil" }); String[] users = admin.getUserListOfRole("role3"); assertEquals(3, users.length); // negative try { admin.updateRoleListOfUser("saman", new String[] { "x" }, new String[] { "y" }); TestCase.assertTrue(false); } catch (Exception e) { // exptected error in negative testing } try { admin.updateUserListOfRole("role2", null, new String[] { "d" }); TestCase.assertTrue(false); } catch (Exception e) { // exptected error in negative testing } } public void doAuthorizationStuff() throws Exception { AuthorizationManager authMan = realm.getAuthorizationManager(); UserStoreManager usWriter = realm.getUserStoreManager(); usWriter.addRole("rolex", new String[] { "saman", "amara" }, null); usWriter.addRole("roley", null, null); authMan.authorizeRole("rolex", "wall", "write"); authMan.authorizeRole("roley", "table", "write"); authMan.authorizeUser("sunil", "wall", "read"); assertTrue(authMan.isUserAuthorized("saman", "wall", "write")); assertTrue(authMan.isUserAuthorized("sunil", "wall", "read")); //assertTrue(authMan.isRoleAuthorized("primary/roley", "table", "write")); assertTrue(authMan.isRoleAuthorized("roley", "table", "write")); assertFalse(authMan.isUserAuthorized("saman", "wall", "read")); assertFalse(authMan.isUserAuthorized("sunil", "wall", "write")); assertEquals(1, authMan.getAllowedRolesForResource("wall", "write").length); assertEquals(1, authMan.getExplicitlyAllowedUsersForResource("wall", "read").length); authMan.denyRole("rolex", "wall", "write"); //assertFalse(authMan.isRoleAuthorized("primary/rolex", "wall", "write")); assertFalse(authMan.isRoleAuthorized("rolex", "wall", "write")); authMan.denyUser("saman", "wall", "read"); assertFalse(authMan.isUserAuthorized("saman", "wall", "read")); assertEquals(1, authMan.getDeniedRolesForResource("wall", "write").length); assertEquals(1, authMan.getExplicitlyDeniedUsersForResource("wall", "read").length); authMan.clearUserAuthorization("sunil", "wall", "read"); //authMan.clearRoleAuthorization("primary/roley", "table", "write"); authMan.clearRoleAuthorization("roley", "table", "write"); authMan.clearResourceAuthorizations("wall"); assertFalse(authMan.isUserAuthorized("saman", "wall", "write")); assertFalse(authMan.isUserAuthorized("sunil", "wall", "read")); assertFalse(authMan.isRoleAuthorized("roley", "table", "write")); //assertFalse(authMan.isRoleAuthorized("primary/roley", "table", "write")); } public void doClaimStuff() throws Exception { UserStoreManager usWriter = realm.getUserStoreManager(); String[] allClaims = { ClaimTestUtil.CLAIM_URI1, ClaimTestUtil.CLAIM_URI2, ClaimTestUtil.CLAIM_URI3 }; // add default usWriter.setUserClaimValue("dimuthu", ClaimTestUtil.CLAIM_URI1, "claim1default", null); String value = usWriter.getUserClaimValue("dimuthu", ClaimTestUtil.CLAIM_URI1, null); assertEquals("claim1default", value); // update default usWriter.setUserClaimValue("dimuthu", ClaimTestUtil.CLAIM_URI1, "dimzi lee", null); value = usWriter.getUserClaimValue("dimuthu", ClaimTestUtil.CLAIM_URI1, null); assertEquals("dimzi lee", value); // multiple additions Map<String, String> map = new HashMap<String, String>(); map.put(ClaimTestUtil.CLAIM_URI1, "lee"); map.put(ClaimTestUtil.CLAIM_URI3, "muthu"); usWriter.setUserClaimValue("dimuthu", ClaimTestUtil.CLAIM_URI2, "claim2default", null); usWriter.setUserClaimValues("dimuthu", map, ClaimTestUtil.HOME_PROFILE_NAME); usWriter.setUserClaimValue("dimuthu", UserCoreConstants.PROFILE_CONFIGURATION, ClaimTestUtil.HOME_PROFILE_NAME, ClaimTestUtil.HOME_PROFILE_NAME); Map<String, String> obtained = usWriter.getUserClaimValues("dimuthu", allClaims, ClaimTestUtil.HOME_PROFILE_NAME); //assertNull(obtained.get(ClaimTestUtil.CLAIM_URI1)); // hidden //assertEquals("claim2default", obtained.get(ClaimTestUtil.CLAIM_URI2)); // overridden assertEquals("muthu", obtained.get(ClaimTestUtil.CLAIM_URI3)); // normal // update map.put(ClaimTestUtil.CLAIM_URI3, "muthulee"); usWriter.setUserClaimValues("dimuthu", map, ClaimTestUtil.HOME_PROFILE_NAME); value = usWriter.getUserClaimValue("dimuthu", ClaimTestUtil.CLAIM_URI3, ClaimTestUtil.HOME_PROFILE_NAME); assertEquals("muthulee", value); // delete usWriter.deleteUserClaimValue("dimuthu", ClaimTestUtil.CLAIM_URI1, null); value = usWriter.getUserClaimValue("dimuthu", ClaimTestUtil.CLAIM_URI1, null); assertNull(value); usWriter.deleteUserClaimValues("dimuthu", allClaims, ClaimTestUtil.HOME_PROFILE_NAME); obtained = usWriter.getUserClaimValues("dimuthu", allClaims, ClaimTestUtil.HOME_PROFILE_NAME); assertNull(obtained.get(ClaimTestUtil.CLAIM_URI2)); // overridden } }
package fr.xephi.authme.datasource; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import org.bukkit.entity.Player; import fr.xephi.authme.AuthMe; import fr.xephi.authme.cache.auth.PlayerAuth; import fr.xephi.authme.cache.auth.PlayerCache; public class CacheDataSource implements DataSource { private DataSource source; public AuthMe plugin; private ConcurrentHashMap<String, PlayerAuth> cache = new ConcurrentHashMap<String, PlayerAuth>(); public CacheDataSource(AuthMe plugin, DataSource source) { this.plugin = plugin; this.source = source; /* * We need to load all players in cache ... It will took more time to * load the server, but it will be much easier to check for an * isAuthAvailable ! */ for (PlayerAuth auth : source.getAllAuths()) cache.put(auth.getNickname().toLowerCase(), auth); } @Override public synchronized boolean isAuthAvailable(String user) { return cache.containsKey(user.toLowerCase()); } @Override public synchronized PlayerAuth getAuth(String user) { user = user.toLowerCase(); if (cache.containsKey(user)) { return cache.get(user); } else { PlayerAuth auth = source.getAuth(user); if (auth != null) cache.put(user, auth); return auth; } } @Override public synchronized boolean saveAuth(PlayerAuth auth) { if (source.saveAuth(auth)) { cache.put(auth.getNickname(), auth); return true; } return false; } @Override public synchronized boolean updatePassword(PlayerAuth auth) { if (source.updatePassword(auth)) { if (cache.containsKey(auth.getNickname())) cache.get(auth.getNickname()).setHash(auth.getHash()); return true; } return false; } @Override public boolean updateSession(PlayerAuth auth) { if (source.updateSession(auth)) { if (cache.containsKey(auth.getNickname())) { cache.get(auth.getNickname()).setIp(auth.getIp()); cache.get(auth.getNickname()).setLastLogin(auth.getLastLogin()); } return true; } return false; } @Override public boolean updateQuitLoc(PlayerAuth auth) { if (source.updateQuitLoc(auth)) { if (cache.containsKey(auth.getNickname())) { cache.get(auth.getNickname()).setQuitLocX(auth.getQuitLocX()); cache.get(auth.getNickname()).setQuitLocY(auth.getQuitLocY()); cache.get(auth.getNickname()).setQuitLocZ(auth.getQuitLocZ()); cache.get(auth.getNickname()).setWorld(auth.getWorld()); } return true; } return false; } @Override public int getIps(String ip) { return source.getIps(ip); } @Override public int purgeDatabase(long until) { int cleared = source.purgeDatabase(until); if (cleared > 0) { for (PlayerAuth auth : cache.values()) { if (auth.getLastLogin() < until) { cache.remove(auth.getNickname()); } } } return cleared; } @Override public List<String> autoPurgeDatabase(long until) { List<String> cleared = source.autoPurgeDatabase(until); if (cleared.size() > 0) { for (PlayerAuth auth : cache.values()) { if (auth.getLastLogin() < until) { cache.remove(auth.getNickname()); } } } return cleared; } @Override public synchronized boolean removeAuth(String user) { if (source.removeAuth(user)) { cache.remove(user); return true; } return false; } @Override public synchronized void close() { source.close(); } @Override public void reload() { cache.clear(); source.reload(); for (Player player : plugin.getServer().getOnlinePlayers()) { String user = player.getName().toLowerCase(); if (PlayerCache.getInstance().isAuthenticated(user)) { try { PlayerAuth auth = source.getAuth(user); cache.put(user, auth); } catch (NullPointerException npe) { } } } } @Override public synchronized boolean updateEmail(PlayerAuth auth) { if (source.updateEmail(auth)) { if (cache.containsKey(auth.getNickname())) cache.get(auth.getNickname()).setEmail(auth.getEmail()); return true; } return false; } @Override public synchronized boolean updateSalt(PlayerAuth auth) { if (source.updateSalt(auth)) { if (cache.containsKey(auth.getNickname())) cache.get(auth.getNickname()).setSalt(auth.getSalt()); return true; } return false; } @Override public synchronized List<String> getAllAuthsByName(PlayerAuth auth) { return source.getAllAuthsByName(auth); } @Override public synchronized List<String> getAllAuthsByIp(String ip) { return source.getAllAuthsByIp(ip); } @Override public synchronized List<String> getAllAuthsByEmail(String email) { return source.getAllAuthsByEmail(email); } @Override public synchronized void purgeBanned(List<String> banned) { source.purgeBanned(banned); for (PlayerAuth auth : cache.values()) { if (banned.contains(auth.getNickname())) { cache.remove(auth.getNickname()); } } } @Override public DataSourceType getType() { return source.getType(); } @Override public boolean isLogged(String user) { return source.isLogged(user.toLowerCase()); } @Override public void setLogged(String user) { source.setLogged(user.toLowerCase()); } @Override public void setUnlogged(String user) { source.setUnlogged(user.toLowerCase()); } @Override public void purgeLogged() { source.purgeLogged(); } @Override public int getAccountsRegistered() { return source.getAccountsRegistered(); } @Override public void updateName(String oldone, String newone) { if (cache.containsKey(oldone)) { cache.put(newone, cache.get(oldone)); cache.remove(oldone); } source.updateName(oldone, newone); } @Override public List<PlayerAuth> getAllAuths() { return source.getAllAuths(); } @Override public List<PlayerAuth> getLoggedPlayers() { return source.getLoggedPlayers(); } }