text
stringlengths
7
1.01M
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package co.edu.uniandes.csw.partyServices.ejb; import co.edu.uniandes.csw.partyServices.entities.ClienteEntity; import co.edu.uniandes.csw.partyServices.entities.ValoracionEntity; import co.edu.uniandes.csw.partyServices.exceptions.BusinessLogicException; import co.edu.uniandes.csw.partyServices.persistence.ClientePersistence; import co.edu.uniandes.csw.partyServices.persistence.ValoracionPersistence; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.Stateless; import javax.inject.Inject; /** * Clase que implementa la conexion con la persistencia para la relación entre * la entidad de Cliente y Sugerencia. * * @author Jesús Orlando Cárcamo Posada y Elías Negrete */ @Stateless public class ClienteValoracionesLogic { private static final Logger LOGGER = Logger.getLogger(ClienteValoracionesLogic.class.getName()); @Inject private ClientePersistence clientePersistence; // Variable para acceder a la persistencia de la aplicación. Es una inyección de dependencias. @Inject private ValoracionPersistence valoracionPersistence; // Variable para acceder a la persistencia de la aplicación. Es una inyección de dependencias. /** * Agregar una valoracion a un cliente. * * @param valoracionId. El ID de la valoracion a guardar * @param proveedorId. El ID del proveedor del cual hace parte la * valoracion. * @param clientesId. El ID del cliente al cual se le va a guardar la * valoracion. * @return La valoracion agregada al cliente. */ public ValoracionEntity addValoracion(Long proveedorId, Long valoracionId, Long clientesId) { LOGGER.log(Level.INFO, "Inicia proceso de asociar la valoracion con id = {1}, que pertenece al proveedor con id = {2}, al cliente con id = {0}", new Object[]{clientesId, valoracionId, proveedorId}); ClienteEntity clienteEntity = clientePersistence.find(clientesId); ValoracionEntity valoracionEntity = valoracionPersistence.find(proveedorId, valoracionId); valoracionEntity.setCliente(clienteEntity); valoracionEntity.setNombreUsuario(clienteEntity.getNombreUsuario()); ValoracionEntity actualizado = valoracionPersistence.update(valoracionEntity); LOGGER.log(Level.INFO, "Termina proceso de asociar la valoracion con id = {1}, que pertenece al proveedor con id = {2}, al cliente con id = {0}", new Object[]{clientesId, valoracionId, proveedorId}); return actualizado; } /** * Retorna todas las valoraciones asociadas a un cliente. * * @param clientesId El ID del cliente buscado * @return La lista de valoraciones del cliente. */ public List<ValoracionEntity> getValoraciones(Long clientesId) { LOGGER.log(Level.INFO, "Inicia proceso de consultar las valoraciones asociadas al cliente con id = {0}", clientesId); return clientePersistence.find(clientesId).getValoraciones(); } /** * * Obtener una valoracion asociada a un cliente por medio del ID de esta y * el ID del proveedor al que pertenece. * * @param clientesId. El id del cliente a buscar. * @param valoracionId. ID de la valoracion a buscar. * @param proveedorId. El ID del proveedor del cual hace parte la * valoracion. * @return La valoracion del cliente solicitada. * @throws BusinessLogicException Si la valoracion no se encuentra asociada al * cliente. */ public ValoracionEntity getValoracion(Long proveedorId, Long valoracionId, Long clientesId) throws BusinessLogicException { LOGGER.log(Level.INFO, "Inicia proceso de consultar la valoracion con id = {1}, que pertenece al proveedor con id = {2}, del cliente con id = {0}", new Object[]{clientesId, valoracionId, proveedorId}); List<ValoracionEntity> valoraciones = clientePersistence.find(clientesId).getValoraciones(); ValoracionEntity valoracionEntity = valoracionPersistence.find(proveedorId, valoracionId); int index = valoraciones.indexOf(valoracionEntity); LOGGER.log(Level.INFO, "Termina proceso de consultar la valoracion con id = {1}, que pertenece al proveedor con id = {2}, del cliente con id = {0}", new Object[]{clientesId, valoracionId, proveedorId}); if (index >= 0) { return valoraciones.get(index); } throw new BusinessLogicException("La valoracion no está asociada al cliente"); } /** * Remueve la relación entre un cliente y sus valoraciones. * * @param clientesId. El ID del cliente que tiene las valoraciones a las que se quiere desasociar. */ public void removeValoraciones(Long clientesId){ LOGGER.log(Level.INFO, "Inicia proceso de remover la relación de las valoraciones con el cliente con id = {0}", clientesId); ClienteEntity clienteEntity = clientePersistence.find(clientesId); List<ValoracionEntity> valoracionList = valoracionPersistence.findAll(); for (ValoracionEntity valoracion : valoracionList) { if (valoracion.getCliente() != null && valoracion.getCliente().equals(clienteEntity)) { valoracion.setCliente(null); valoracion.setNombreUsuario("Anonimo"); valoracionPersistence.update(valoracion); } } LOGGER.log(Level.INFO, "Termina proceso de remover la relación de las valoraciones con el cliente con id = {0}", clientesId); } }
/* * This file is part of the "eHealth-Demo" project, formerly known as * "Telematics App Mockup". * Copyright 2017-2018, Hauke Sommerfeld and Sarah Schulz-Mukisa * * Licensed under the MIT license. * * For more information and/or a copy of the license visit the following * GitHub repository: https://github.com/haukesomm/eHealth-Demo */ package de.haukesomm.healthdemo.data; /** * Created on 25.08.18 * <p> * This is a data class providing basic information about a {@link Session} such as it's ID and * {@link SessionType}. * </p> * * @author Hauke Sommerfeld */ public class SessionDescription { /** * This Sessions (unique) ID */ public final int id; /** * Session type (e.g. running, biking, etc.) */ public final SessionType type; /** * Session description */ public final String description; /** * Creates a new SessionDescription from an ID and {@link SessionType} * * @param id The Session's ID * @param type The Session's Type */ public SessionDescription(int id, SessionType type, String description) { this.id = id; this.type = type; this.description = description; } // No JavaDoc @Override public String toString() { return "ID: " + id + ", Type: " + type; } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.keyFMap; import com.intellij.openapi.util.Key; import org.jetbrains.annotations.NotNull; final class PairElementsFMap implements KeyFMap { // invariant: key1.hashCode() < key2.hashCode() private final @NotNull Key key1; private final @NotNull Key key2; private final @NotNull Object value1; private final @NotNull Object value2; PairElementsFMap(@NotNull Key key1, @NotNull Object value1, @NotNull Key key2, @NotNull Object value2) { assert key1 != key2; // Key hashCodes are unique and ordered if(key1.hashCode() < key2.hashCode()) { this.key1 = key1; this.value1 = value1; this.key2 = key2; this.value2 = value2; } else { this.key1 = key2; this.value1 = value2; this.key2 = key1; this.value2 = value1; } } @NotNull @Override public <V> KeyFMap plus(@NotNull Key<V> key, @NotNull V value) { if (key == key1) { return value == value1 ? this : new PairElementsFMap(key, value, key2, value2); } if (key == key2) { return value == value2 ? this : new PairElementsFMap(key, value, key1, value1); } if(key.hashCode() < key1.hashCode()) { return new ArrayBackedFMap(new int[]{key.hashCode(), key1.hashCode(), key2.hashCode()}, new Object[]{value, value1, value2}); } else if(key.hashCode() < key2.hashCode()) { return new ArrayBackedFMap(new int[]{key1.hashCode(), key.hashCode(), key2.hashCode()}, new Object[]{value1, value, value2}); } return new ArrayBackedFMap(new int[]{key1.hashCode(), key2.hashCode(), key.hashCode()}, new Object[]{value1, value2, value}); } @NotNull @Override public KeyFMap minus(@NotNull Key<?> key) { if (key == key1) return new OneElementFMap(key2, value2); if (key == key2) return new OneElementFMap(key1, value1); return this; } @Override public <V> V get(@NotNull Key<V> key) { //noinspection unchecked return key == key1 ? (V)value1 : key == key2 ? (V)value2 : null; } @Override public int size() { return 2; } @NotNull @Override public Key[] getKeys() { return new Key[] { key1, key2 }; } @Override public String toString() { return "{" + key1 + "=" + value1 + ", " + key2 + "=" + value2 + "}"; } @Override public boolean isEmpty() { return false; } @Override public int getValueIdentityHashCode() { int hash = key1.hashCode() * 31 + System.identityHashCode(value1); hash = (hash * 31 + key2.hashCode()) * 31 + System.identityHashCode(value2); return hash; } @Override public int hashCode() { return (key1.hashCode() ^ value1.hashCode()) + (key2.hashCode() ^ value2.hashCode()); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof PairElementsFMap)) return false; PairElementsFMap map = (PairElementsFMap)o; return key1 == map.key1 && value1.equals(map.value1) && key2 == map.key2 && value2.equals(map.value2); } @Override public boolean equalsByReference(KeyFMap o) { if (this == o) return true; if (!(o instanceof PairElementsFMap)) return false; PairElementsFMap map = (PairElementsFMap)o; return key1 == map.key1 && value1 == map.value1 && key2 == map.key2 && value2 == map.value2; } }
package net.researchgate.restler.service.exceptions; import javax.ws.rs.core.Response; public class ServiceException extends RuntimeException { private static final long serialVersionUID = 3403608589330157070L; private Response.Status status = Response.Status.INTERNAL_SERVER_ERROR; public ServiceException(String message) { super(message); } public ServiceException(Throwable e) { super(e); } public ServiceException(String message, Throwable e) { super(message, e); } public ServiceException(String message, Response.Status status) { super(message); this.status = status; } public ServiceException(String message, Throwable cause, Response.Status status) { super(message, cause); this.status = status; } public Response.Status getStatus() { return status; } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.7 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2019.07.28 at 05:34:43 PM CEST // package ch.ehi.oereb.schemas.iso19139.gco; import javax.xml.bind.JAXBElement; import javax.xml.namespace.QName; public class MemberName extends JAXBElement<MemberNameType> { protected final static QName NAME = new QName("http://www.isotc211.org/2005/gco", "MemberName"); public MemberName(MemberNameType value) { super(NAME, ((Class) MemberNameType.class), null, value); } public MemberName() { super(NAME, ((Class) MemberNameType.class), null, null); } }
/* * Copyright 2016-2019 Crown Copyright * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.gchq.gaffer.operation.graph; import uk.gov.gchq.gaffer.data.element.Edge; import uk.gov.gchq.gaffer.data.element.id.DirectedType; /** * A {@code GraphFilters} is an {@link uk.gov.gchq.gaffer.operation.Operation} which * performs additional filtering on the {@link Edge}s returned. */ public interface GraphFilters extends OperationView { /** * @param edge the {@link Edge} to be validated. * @return true if the {@link Edge} is valid. Otherwise false and a reason should be logged. */ @Override default boolean validate(final Edge edge) { return null != edge && validateFlags(edge) && validatePreAggregationFilter(edge) && validatePostAggregationFilter(edge) && validatePostTransformFilter(edge); } default boolean validateFlags(final Edge edge) { final DirectedType dirType = getDirectedType(); return DirectedType.isEither(dirType) || (dirType.isDirected() && edge.isDirected()) || (dirType.isUndirected() && !edge.isDirected()); } DirectedType getDirectedType(); void setDirectedType(final DirectedType directedType); interface Builder<OP extends GraphFilters, B extends Builder<OP, ?>> extends OperationView.Builder<OP, B> { default B directedType(final DirectedType directedType) { _getOp().setDirectedType(directedType); return _self(); } } }
/* * Copyright 2015-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.elasticsearch.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.elasticsearch.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * ElasticsearchDomainStatus JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ElasticsearchDomainStatusJsonUnmarshaller implements Unmarshaller<ElasticsearchDomainStatus, JsonUnmarshallerContext> { public ElasticsearchDomainStatus unmarshall(JsonUnmarshallerContext context) throws Exception { ElasticsearchDomainStatus elasticsearchDomainStatus = new ElasticsearchDomainStatus(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("DomainId", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setDomainId(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("DomainName", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setDomainName(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("ARN", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setARN(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("Created", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setCreated(context.getUnmarshaller(Boolean.class).unmarshall(context)); } if (context.testExpression("Deleted", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setDeleted(context.getUnmarshaller(Boolean.class).unmarshall(context)); } if (context.testExpression("Endpoint", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setEndpoint(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("Endpoints", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setEndpoints(new MapUnmarshaller<String, String>(context.getUnmarshaller(String.class), context .getUnmarshaller(String.class)).unmarshall(context)); } if (context.testExpression("Processing", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setProcessing(context.getUnmarshaller(Boolean.class).unmarshall(context)); } if (context.testExpression("UpgradeProcessing", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setUpgradeProcessing(context.getUnmarshaller(Boolean.class).unmarshall(context)); } if (context.testExpression("ElasticsearchVersion", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setElasticsearchVersion(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("ElasticsearchClusterConfig", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setElasticsearchClusterConfig(ElasticsearchClusterConfigJsonUnmarshaller.getInstance().unmarshall(context)); } if (context.testExpression("EBSOptions", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setEBSOptions(EBSOptionsJsonUnmarshaller.getInstance().unmarshall(context)); } if (context.testExpression("AccessPolicies", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setAccessPolicies(context.getUnmarshaller(String.class).unmarshall(context)); } if (context.testExpression("SnapshotOptions", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setSnapshotOptions(SnapshotOptionsJsonUnmarshaller.getInstance().unmarshall(context)); } if (context.testExpression("VPCOptions", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setVPCOptions(VPCDerivedInfoJsonUnmarshaller.getInstance().unmarshall(context)); } if (context.testExpression("CognitoOptions", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setCognitoOptions(CognitoOptionsJsonUnmarshaller.getInstance().unmarshall(context)); } if (context.testExpression("EncryptionAtRestOptions", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setEncryptionAtRestOptions(EncryptionAtRestOptionsJsonUnmarshaller.getInstance().unmarshall(context)); } if (context.testExpression("NodeToNodeEncryptionOptions", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setNodeToNodeEncryptionOptions(NodeToNodeEncryptionOptionsJsonUnmarshaller.getInstance().unmarshall(context)); } if (context.testExpression("AdvancedOptions", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setAdvancedOptions(new MapUnmarshaller<String, String>(context.getUnmarshaller(String.class), context .getUnmarshaller(String.class)).unmarshall(context)); } if (context.testExpression("LogPublishingOptions", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setLogPublishingOptions(new MapUnmarshaller<String, LogPublishingOption>(context.getUnmarshaller(String.class), LogPublishingOptionJsonUnmarshaller.getInstance()).unmarshall(context)); } if (context.testExpression("ServiceSoftwareOptions", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setServiceSoftwareOptions(ServiceSoftwareOptionsJsonUnmarshaller.getInstance().unmarshall(context)); } if (context.testExpression("DomainEndpointOptions", targetDepth)) { context.nextToken(); elasticsearchDomainStatus.setDomainEndpointOptions(DomainEndpointOptionsJsonUnmarshaller.getInstance().unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return elasticsearchDomainStatus; } private static ElasticsearchDomainStatusJsonUnmarshaller instance; public static ElasticsearchDomainStatusJsonUnmarshaller getInstance() { if (instance == null) instance = new ElasticsearchDomainStatusJsonUnmarshaller(); return instance; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.util.descriptor.web; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; /** * Representation of a handler reference for a web service, as * represented in a <code>&lt;handler&gt;</code> element in the * deployment descriptor. * * @author Fabien Carrion */ public class ContextHandler extends ResourceBase { private static final long serialVersionUID = 1L; // ------------------------------------------------------------- Properties /** * The Handler reference class. */ private String handlerclass = null; public String getHandlerclass() { return this.handlerclass; } public void setHandlerclass(String handlerclass) { this.handlerclass = handlerclass; } /** * A list of QName specifying the SOAP Headers the handler will work on. * -namespace and localpart values must be found inside the WSDL. * * A service-qname is composed by a namespaceURI and a localpart. * * soapHeader[0] : namespaceURI * soapHeader[1] : localpart */ private final HashMap<String, String> soapHeaders = new HashMap<>(); public Iterator<String> getLocalparts() { return soapHeaders.keySet().iterator(); } public String getNamespaceuri(String localpart) { return soapHeaders.get(localpart); } public void addSoapHeaders(String localpart, String namespaceuri) { soapHeaders.put(localpart, namespaceuri); } /** * Set a configured property. * @param name The property name * @param value The property value */ public void setProperty(String name, String value) { this.setProperty(name, (Object) value); } /** * The soapRole. */ private final ArrayList<String> soapRoles = new ArrayList<>(); public String getSoapRole(int i) { return this.soapRoles.get(i); } public int getSoapRolesSize() { return this.soapRoles.size(); } public void addSoapRole(String soapRole) { this.soapRoles.add(soapRole); } /** * The portName. */ private final ArrayList<String> portNames = new ArrayList<>(); public String getPortName(int i) { return this.portNames.get(i); } public int getPortNamesSize() { return this.portNames.size(); } public void addPortName(String portName) { this.portNames.add(portName); } // --------------------------------------------------------- Public Methods /** * Return a String representation of this object. */ @Override public String toString() { StringBuilder sb = new StringBuilder("ContextHandler["); sb.append("name="); sb.append(getName()); if (handlerclass != null) { sb.append(", class="); sb.append(handlerclass); } if (this.soapHeaders != null) { sb.append(", soap-headers="); sb.append(this.soapHeaders); } if (this.getSoapRolesSize() > 0) { sb.append(", soap-roles="); sb.append(soapRoles); } if (this.getPortNamesSize() > 0) { sb.append(", port-name="); sb.append(portNames); } if (this.listProperties() != null) { sb.append(", init-param="); sb.append(this.listProperties()); } sb.append(']'); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + ((handlerclass == null) ? 0 : handlerclass.hashCode()); result = prime * result + ((portNames == null) ? 0 : portNames.hashCode()); result = prime * result + ((soapHeaders == null) ? 0 : soapHeaders.hashCode()); result = prime * result + ((soapRoles == null) ? 0 : soapRoles.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } ContextHandler other = (ContextHandler) obj; if (handlerclass == null) { if (other.handlerclass != null) { return false; } } else if (!handlerclass.equals(other.handlerclass)) { return false; } if (portNames == null) { if (other.portNames != null) { return false; } } else if (!portNames.equals(other.portNames)) { return false; } if (soapHeaders == null) { if (other.soapHeaders != null) { return false; } } else if (!soapHeaders.equals(other.soapHeaders)) { return false; } if (soapRoles == null) { if (other.soapRoles != null) { return false; } } else if (!soapRoles.equals(other.soapRoles)) { return false; } return true; } }
/** * Xml2Pages.java * * Copyright 2003 by Goldin-Rudahl Associates * * Created by Kurt Rudahl 3/19/2003 * derived from manReader/detailGenerator * * $Id: Xml2Pages.java,v 1.4 2003/06/10 12:43:07 rudahl Exp $ * $Log: Xml2Pages.java,v $ * Revision 1.4 2003/06/10 12:43:07 rudahl * allow for empty var values * * Revision 1.3 2003/06/04 01:53:45 rudahl * refined especially tables, colors * * Revision 1.2 2003/05/04 21:21:20 rudahl * polished/added paragraph line margins attrib-inheritance * * Revision 1.1 2003/03/28 12:45:56 rudahl * components of Xml2Pages using lowagie - itext package * * */ package com.grs.util; import com.grs.gui.ValueLimits; import com.lowagie.text.*; import com.lowagie.text.pdf.PdfWriter; import com.lowagie.text.html.HtmlWriter; import com.lowagie.text.xml.*; import java.util.*; import java.io.*; import javax.xml.transform.sax.*; import javax.xml.parsers.*; import org.xml.sax.*; import org.xml.sax.helpers.*; //*************************************************************************+ /** * This class reads and parses an XML document describing or defining a document * and produces a file consisting of the specified one or more page document. * This is initially created to produce pdf output using the iText package * but can be used to produce other formats. In particular, we are considering it * for Dragon, possibly as a means of specifying the contents of the Viewport, * or for implementing the Decorate utility. * <p> * This class uses the SAX2 Api and is intended to be compiled * under Java 1.4, which provides an implementation and plug-ins * for that interface. * * @author rudahl */ public class Xml2Pages extends DefaultHandler { /** temp accumulation of field info objects as they are created */ protected Hashtable fieldTable = new Hashtable(); /** temp accumulation of panel info objects as * they are created */ protected Hashtable panelTable = new Hashtable(); /** * XML file to read. */ protected String xmlFileName = null; /** * DTD file name */ protected String dtdFileName = null; /** * keep track of nested element level * Used in tracing */ protected int nestLevel = 0; protected String nestString = ""; protected boolean bTracing = false; /** * Tags corresponding to each element in xlatedText. * Needed for formatting after all the elements have * been found and translations retrieved. */ protected Vector textTag = null; /** * Keys corresponding to each element in xlatedText. * Used for debugging. */ protected Vector textKey = null; /** * Source of XML data */ // protected InputSource xmlSource = null; /** * Object that actually parses the * XML and calls the callbacks. */ protected XMLReader xmlReader = null; protected Document document = new Document(); //================================================================== /** * Constructor - intializes input file name, dtd file name. * @param xmlFile full path name to XML file * @param dtdFile full path name to DTD file * @param sResultFile full path name to output file * @param iFileType type of desired result: 0 => PDF 1 => HTML */ public Xml2Pages(String xmlFile, String dtdFile, String sResultFile, int iFileType) { xmlFileName = xmlFile; dtdFileName = dtdFile; document.setMargins(0,0,0,0); try { FileInputStream stream = new FileInputStream(xmlFileName); // we create a writer that listens to the document // and directs a XML-stream to a file if (iFileType == 0) PdfWriter.getInstance(document, new FileOutputStream(sResultFile)); else HtmlWriter.getInstance(document, new FileOutputStream(sResultFile)); } catch (FileNotFoundException fnf) { System.out.println("Cannot find XML file: " + xmlFileName); } catch (DocumentException e) { System.out.println("Cannot create GrsPdfWriter: " + xmlFileName); } } /** * Parse the specified XML, creating the appropriate * UI data structures as described in the XML. * @param args hash of command line name=value pairs * used for substitution in <var> tags */ public void parse(HashMap args) throws ClassNotFoundException, IllegalAccessException, InstantiationException, IOException, SAXException, ParserConfigurationException { if (bTracing) System.out.println("Xml2Pages::parse (en)"); SAXParser saxParser = SAXParserFactory.newInstance().newSAXParser(); xmlReader = saxParser.getXMLReader(); xmlReader.setContentHandler(this); xmlReader.setEntityResolver(this); xmlReader.setErrorHandler(this); xmlReader.setDTDHandler(this); TagMap tagMap = new TagMap("com/grs/util/tagmapGRS.xml"); if (bTracing) System.out.println("Xml2Pages::parse about to construct handler"); SAXgrsHandler h = new SAXgrsHandler(document, tagMap, args, bTracing); if (bTracing) System.out.println("Xml2Pages::parse about to parse "+xmlFileName); saxParser.parse(xmlFileName,h); } //================================================================== /** display the contents of the attribute list for debugging. */ public void dumpAttributeList(Attributes atts) { if (atts.getLength() == 0) System.out.println(nestString + " (no attributes) "); else { System.out.println(nestString + " AttributeList: "); for (int i = 0; i < atts.getLength(); i++) { String name = atts.getQName(i); // String type = atts.getType(i); String value = atts.getValue(i); System.out.println(nestString + " name=" + name + " value=" + value); } } } //================================================================= /** * Methods to handle each possible element type. */ protected void startPage(Hashtable attributes) { } protected void endPage(Hashtable attributes) { } protected void processMetaSpec(Hashtable attributes) { } protected void processPlacementSpec(Hashtable attributes) { } protected void startPageElement(XMLPagesTag elementTag, Hashtable attributes) // generally a noop { } protected void endPageElement(XMLPagesTag elementTag, Hashtable attributes) // generally where we render it { } //********************************************************************* /** * Access Methods */ public XMLReader getXmlReader() { return xmlReader; } public void setTracing(boolean flag) { bTracing = flag; } public boolean getTracing() { return bTracing; } public int getPanelCount() { return panelTable.size(); } //********************************************************************* /** * Replace angle brackets with HTML special character * indicators. */ protected String replaceAngleBrackets(String inString) { StringBuffer outString = new StringBuffer(inString.length()); int p1 = 0; int p2 = 0; p1 = inString.indexOf("<"); while (p1 >= 0) { outString.append(inString.substring(p2,p1)); outString.append("&lt;"); p2 = p1 + 1; p1 = inString.indexOf("<",p2); } if (p2 < inString.length()) outString.append(inString.substring(p2)); return outString.toString(); } //********************************************************************* // driver program - invoke as Xml2Pages srcfilename destfilename static public void main(String[] argv) { boolean bFirstAdded = false; HashMap master = new HashMap(); if (argv.length < 2) { System.out.println( "Usage: Xml2Pages <xmlfilename> <resultfilename> " + "[-trace] [-pdf|-html|-rtf] [name=val...]"); System.exit(0); } try { boolean bTracing = false; int iFiletype = 0; master.put("argc",new Integer(argv.length)); master.put("argv0",argv[0]); master.put("argv1",argv[1]); if (argv.length > 2) { System.out.println("Supplied "+argv.length+" args"); for (int i=2; i<argv.length; i++) { if (argv[i].compareTo("-trace") == 0) { System.out.println("Tracing enabled"); bTracing = true; } else if (argv[i].compareTo("-pdf") == 0) { System.out.println("Selecting PDF output"); iFiletype = 0; } else if (argv[i].compareTo("-html") == 0) { System.out.println("Selecting HTML output"); iFiletype = 1; } else if (argv[i].compareTo("-rtf") == 0) { System.out.println("Selecting RTF output"); iFiletype = 2; } else /* any number of name=value pairs */ { String[] fields = argv[i].split("="); if (fields.length > 1) { //System.out.println(" got '"+fields[0]+"'='" // +fields[1]+"' for arg "+i); master.put(fields[0],fields[1]); } else { //System.out.println(" got '"+fields[0]+"'=''" // +" for arg "+i); master.put(fields[0],""); } } } } Xml2Pages generator = new Xml2Pages(argv[0],null,argv[1],iFiletype); if (bTracing) generator.setTracing(true); generator.parse(master); } catch (Exception e) { e.printStackTrace(); } } }
package cn.iocoder.yudao.adminserver.modules.system.convert.sms; import cn.iocoder.yudao.framework.common.pojo.PageResult; import cn.iocoder.yudao.adminserver.modules.system.controller.sms.vo.log.SysSmsLogExcelVO; import cn.iocoder.yudao.adminserver.modules.system.controller.sms.vo.log.SysSmsLogRespVO; import cn.iocoder.yudao.adminserver.modules.system.dal.dataobject.sms.SysSmsLogDO; import org.mapstruct.Mapper; import org.mapstruct.factory.Mappers; import java.util.List; /** * 短信日志 Convert * * @author 芋道源码 */ @Mapper public interface SysSmsLogConvert { SysSmsLogConvert INSTANCE = Mappers.getMapper(SysSmsLogConvert.class); SysSmsLogRespVO convert(SysSmsLogDO bean); List<SysSmsLogRespVO> convertList(List<SysSmsLogDO> list); PageResult<SysSmsLogRespVO> convertPage(PageResult<SysSmsLogDO> page); List<SysSmsLogExcelVO> convertList02(List<SysSmsLogDO> list); }
package mage.cards.o; import mage.abilities.Ability; import mage.abilities.condition.common.AdamantCondition; import mage.abilities.effects.OneShotEffect; import mage.abilities.effects.common.ExileSpellEffect; import mage.cards.*; import mage.constants.CardType; import mage.constants.Outcome; import mage.constants.Zone; import mage.game.Game; import mage.players.Player; import mage.target.Target; import mage.target.common.TargetCardInYourGraveyard; import mage.watchers.common.ManaSpentToCastWatcher; import java.util.UUID; /** * @author TheElk801 */ public final class OnceAndFuture extends CardImpl { public OnceAndFuture(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.INSTANT}, "{3}{G}"); // Return target card from your graveyard to your hand. Put up to one other target card from your graveyard on top of your library. Exile Once and Future. // Adamant — If at least three green mana was spent to cast this spell, instead return those cards to your hand and exile Once and Future. this.getSpellAbility().addEffect(new OnceAndFutureEffect()); Target target = new TargetCardInYourGraveyard().withChooseHint("To put in your hand"); target.setTargetTag(1); this.getSpellAbility().addTarget(target); target = new TargetCardInYourGraveyard(0, 1).withChooseHint("To put on top of your library"); target.setTargetTag(2); this.getSpellAbility().addTarget(target); this.getSpellAbility().addWatcher(new ManaSpentToCastWatcher()); } private OnceAndFuture(final OnceAndFuture card) { super(card); } @Override public OnceAndFuture copy() { return new OnceAndFuture(this); } } class OnceAndFutureEffect extends OneShotEffect { OnceAndFutureEffect() { super(Outcome.Benefit); staticText = "Return target card from your graveyard to your hand. " + "Put up to one other target card from your graveyard on top of your library. Exile {this}." + "<br><i>Adamant</i> &mdash; If at least three green mana was spent to cast this spell, " + "instead return those cards to your hand and exile {this}."; } private OnceAndFutureEffect(final OnceAndFutureEffect effect) { super(effect); } @Override public OnceAndFutureEffect copy() { return new OnceAndFutureEffect(this); } @Override public boolean apply(Game game, Ability source) { Player player = game.getPlayer(source.getControllerId()); if (player == null) { return false; } Card card1 = game.getCard(source.getFirstTarget()); Card card2 = game.getCard(source.getTargets().get(1).getFirstTarget()); if (card1 == null) { card1 = card2; card2 = null; } if (card1 == null) { return false; } if (card2 == null) { player.putInHand(card1, game); return new ExileSpellEffect().apply(game, source); } if (AdamantCondition.GREEN.apply(game, source)) { Cards cards = new CardsImpl(); cards.add(card1); cards.add(card2); player.moveCards(cards, Zone.HAND, source, game); return new ExileSpellEffect().apply(game, source); } player.putInHand(card1, game); player.putCardsOnTopOfLibrary(new CardsImpl(card2), game, source, false); return new ExileSpellEffect().apply(game, source); } }
/* * Copyright 2018 Carlos Rodriguez. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package engine.components; import engine.core.Transform; import engine.rendering.Material; import engine.rendering.Mesh; import engine.rendering.RenderingEngine; import engine.rendering.Shader; /** * * @author Carlos Rodriguez. * @version 1.1 * @since 2018 */ public class MeshRenderer extends GameComponent { private Transform transform; private Mesh mesh; private Material material; /** * Renderer of a mesh with more than one possible material * To render. * @param mesh to render. * @param transform of the mesh. * @param material of the mesh. */ public MeshRenderer(Mesh mesh, Transform transform, Material material) { this.mesh = mesh; this.transform = transform; this.material = material; } /** * Render method of the mesh. * @param shader to render. * @param renderingEngine to call. */ public void render(Shader shader, RenderingEngine renderingEngine) { shader.bind(); shader.updateUniforms(transform, material, renderingEngine); mesh.draw(); } }
/******************************************************************************* * ___ _ ____ ____ * / _ \ _ _ ___ ___| |_| _ \| __ ) * | | | | | | |/ _ \/ __| __| | | | _ \ * | |_| | |_| | __/\__ \ |_| |_| | |_) | * \__\_\\__,_|\___||___/\__|____/|____/ * * Copyright (c) 2014-2019 Appsicle * Copyright (c) 2019-2020 QuestDB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package io.questdb.griffin; import io.questdb.cairo.ColumnType; import io.questdb.cairo.sql.Function; import io.questdb.cairo.sql.RecordMetadata; import io.questdb.griffin.model.AliasTranslator; import io.questdb.griffin.model.ExpressionNode; import io.questdb.griffin.model.IntervalUtils; import io.questdb.griffin.model.IntrinsicModel; import io.questdb.std.*; import io.questdb.std.datetime.microtime.TimestampFormatUtils; import io.questdb.std.str.FlyweightCharSequence; import java.util.ArrayDeque; import static io.questdb.griffin.SqlKeywords.*; final class WhereClauseParser implements Mutable { private static final int INTRINSIC_OP_IN = 1; private static final int INTRINSIC_OP_GREATER = 2; private static final int INTRINSIC_OP_GREATER_EQ = 3; private static final int INTRINSIC_OP_LESS = 4; private static final int INTRINSIC_OP_LESS_EQ = 5; private static final int INTRINSIC_OP_EQUAL = 6; private static final int INTRINSIC_OP_NOT_EQ = 7; private static final int INTRINSIC_OP_NOT = 8; private static final CharSequenceIntHashMap intrinsicOps = new CharSequenceIntHashMap(); private final ArrayDeque<ExpressionNode> stack = new ArrayDeque<>(); private final ObjList<ExpressionNode> keyNodes = new ObjList<>(); private final ObjList<ExpressionNode> keyExclNodes = new ObjList<>(); private final ObjList<ExpressionNode> tempNodes = new ObjList<>(); // TODO: configure size private final ObjectPool<IntrinsicModel> models = new ObjectPool<>(IntrinsicModel.FACTORY, 8); private final CharSequenceHashSet tempKeys = new CharSequenceHashSet(); private final IntList tempPos = new IntList(); private final CharSequenceHashSet tempK = new CharSequenceHashSet(); private final IntList tempP = new IntList(); // TODO: configure size private final ObjectPool<FlyweightCharSequence> csPool = new ObjectPool<>(FlyweightCharSequence.FACTORY, 64); private CharSequence timestamp; private CharSequence preferredKeyColumn; @Override public void clear() { this.models.clear(); this.stack.clear(); this.keyNodes.clear(); this.keyExclNodes.clear(); this.csPool.clear(); this.tempNodes.clear(); } private static void checkNodeValid(ExpressionNode node) throws SqlException { if (node.lhs == null || node.rhs == null) { throw SqlException.$(node.position, "Argument expected"); } } private static boolean nodesEqual(ExpressionNode left, ExpressionNode right) { return (left.type == ExpressionNode.LITERAL || left.type == ExpressionNode.CONSTANT) && (right.type == ExpressionNode.LITERAL || right.type == ExpressionNode.CONSTANT) && Chars.equals(left.token, right.token); } private boolean analyzeEquals(AliasTranslator translator, IntrinsicModel model, ExpressionNode node, RecordMetadata m, FunctionParser functionParser, SqlExecutionContext executionContext) throws SqlException { checkNodeValid(node); return analyzeEquals0(translator, model, node, node.lhs, node.rhs, m, functionParser, executionContext) || analyzeEquals0(translator, model, node, node.rhs, node.lhs, m, functionParser, executionContext); } private boolean analyzeEquals0(AliasTranslator translator, IntrinsicModel model, ExpressionNode node, ExpressionNode a, ExpressionNode b, RecordMetadata m, FunctionParser functionParser, SqlExecutionContext executionContext) throws SqlException { if (nodesEqual(a, b)) { node.intrinsicValue = IntrinsicModel.TRUE; return true; } if (a.type == ExpressionNode.LITERAL && (b.type == ExpressionNode.CONSTANT || b.type == ExpressionNode.BIND_VARIABLE || b.type == ExpressionNode.FUNCTION || b.type == ExpressionNode.OPERATION)) { if (isTimestamp(a)) { if (b.type == ExpressionNode.CONSTANT) { model.intersectIntervals(b.token, 1, b.token.length() - 1, b.position); node.intrinsicValue = IntrinsicModel.TRUE; return true; } Function function = functionParser.parseFunction(b, m, executionContext); checkFunctionCanBeTimestamp(m, executionContext, function); if (function.isConstant()) { long value = function.getTimestamp(null); if (value == Numbers.LONG_NaN) { // make it empty set model.intersectEmpty(); } else { model.intersectIntervals(value, value); } node.intrinsicValue = IntrinsicModel.TRUE; return true; } else if (function.isRuntimeConstant()) { model.intersectEquals(function); node.intrinsicValue = IntrinsicModel.TRUE; return true; } } else { CharSequence column = translator.translateAlias(a.token); int index = m.getColumnIndexQuiet(column); if (index == -1) { throw SqlException.invalidColumn(a.position, a.token); } switch (m.getColumnType(index)) { case ColumnType.SYMBOL: case ColumnType.STRING: case ColumnType.LONG: case ColumnType.INT: final boolean preferred = Chars.equalsIgnoreCaseNc(preferredKeyColumn, column); final boolean indexed = m.isColumnIndexed(index); if (preferred || (indexed && preferredKeyColumn == null)) { CharSequence value = isNullKeyword(b.token) ? null : unquote(b.token); if (Chars.equalsIgnoreCaseNc(model.keyColumn, column)) { // compute overlap of values // if values do overlap, keep only our value // otherwise invalidate entire model if (model.keyValues.contains(value)) { // when we have "x in ('a,'b') and x = 'a')" the x='b' can never happen // so we have to clear all other key values if (model.keyValues.size() > 1) { model.keyValues.clear(); model.keyValuePositions.clear(); model.keyValues.add(value); model.keyValuePositions.add(b.position); node.intrinsicValue = IntrinsicModel.TRUE; } } else { if (model.keyExcludedValues.contains(value)) { if (model.keyExcludedValues.size() > 1) { int removedIndex = model.keyExcludedValues.remove(value); if (removedIndex > -1) { model.keyExcludedValuePositions.removeIndex(index); } } else { model.keyExcludedValues.clear(); model.keyExcludedValuePositions.clear(); } removeNodes(b, keyExclNodes); } node.intrinsicValue = IntrinsicModel.TRUE; model.intrinsicValue = IntrinsicModel.FALSE; return false; } } else if (model.keyColumn == null || m.getIndexValueBlockCapacity(index) > m.getIndexValueBlockCapacity(model.keyColumn)) { model.keyColumn = column; model.keyValues.clear(); model.keyValuePositions.clear(); model.keyExcludedValues.clear(); model.keyExcludedValuePositions.clear(); model.keyValues.add(value); model.keyValuePositions.add(b.position); resetNodes(); node.intrinsicValue = IntrinsicModel.TRUE; } keyNodes.add(node); return true; } //fall through default: return false; } } } return false; } private boolean analyzeGreater(IntrinsicModel model, ExpressionNode node, boolean equalsTo, FunctionParser functionParser, RecordMetadata metadata, SqlExecutionContext executionContext) throws SqlException { checkNodeValid(node); if (nodesEqual(node.lhs, node.rhs)) { model.intrinsicValue = IntrinsicModel.FALSE; return false; } if (timestamp == null) { return false; } if (node.lhs.type == ExpressionNode.LITERAL && Chars.equals(node.lhs.token, timestamp)) { return analyzeTimestampGreater(model, node, equalsTo, functionParser, metadata, executionContext, node.rhs); } else if (node.rhs.type == ExpressionNode.LITERAL && Chars.equals(node.rhs.token, timestamp)) { return analyzeTimestampLess(model, node, equalsTo, functionParser, metadata, executionContext, node.lhs); } return false; } private boolean analyzeTimestampGreater(IntrinsicModel model, ExpressionNode node, boolean equalsTo, FunctionParser functionParser, RecordMetadata metadata, SqlExecutionContext executionContext, ExpressionNode compareWithNode) throws SqlException { long lo; if (compareWithNode.type == ExpressionNode.CONSTANT) { try { lo = parseFullOrPartialDate(equalsTo, compareWithNode, true); } catch (NumericException e) { throw SqlException.invalidDate(compareWithNode.position); } model.intersectIntervals(lo, Long.MAX_VALUE); node.intrinsicValue = IntrinsicModel.TRUE; return true; } else if (compareWithNode.type == ExpressionNode.FUNCTION || compareWithNode.type == ExpressionNode.BIND_VARIABLE || compareWithNode.type == ExpressionNode.OPERATION) { Function function = functionParser.parseFunction(compareWithNode, metadata, executionContext); checkFunctionCanBeTimestamp(metadata, executionContext, function); if (function.isConstant()) { lo = function.getTimestamp(null); if (lo == Numbers.LONG_NaN) { // make it empty set model.intersectEmpty(); } else { model.intersectIntervals(lo + adjustComparison(equalsTo, true), Long.MAX_VALUE); } node.intrinsicValue = IntrinsicModel.TRUE; return true; } else if (function.isRuntimeConstant()) { model.intersectIntervals(function, Long.MAX_VALUE, adjustComparison(equalsTo, true)); node.intrinsicValue = IntrinsicModel.TRUE; return true; } } return false; } private void checkFunctionCanBeTimestamp(RecordMetadata metadata, SqlExecutionContext executionContext, Function function) throws SqlException { if (function.getType() == ColumnType.UNDEFINED) { int timestampType = metadata.getColumnType(metadata.getTimestampIndex()); function.assignType(timestampType, executionContext.getBindVariableService()); } else if (function.getType() != ColumnType.DATE && function.getType() != ColumnType.TIMESTAMP) { throw SqlException.invalidDate(function.getPosition()); } } private boolean analyzeIn(AliasTranslator translator, IntrinsicModel model, ExpressionNode node, RecordMetadata metadata) throws SqlException { if (node.paramCount < 2) { throw SqlException.$(node.position, "Too few arguments for 'in'"); } ExpressionNode col = node.paramCount < 3 ? node.lhs : node.args.getLast(); if (col.type != ExpressionNode.LITERAL) { return false; } CharSequence column = translator.translateAlias(col.token); if (metadata.getColumnIndexQuiet(column) == -1) { throw SqlException.invalidColumn(col.position, col.token); } return analyzeInInterval(model, col, node, false) || analyzeListOfValues(model, column, metadata, node) || analyzeInLambda(model, column, metadata, node); } private boolean analyzeInInterval(IntrinsicModel model, ExpressionNode col, ExpressionNode in, boolean isNegated) throws SqlException { if (!isTimestamp(col)) { return false; } if (in.paramCount > 3) { throw SqlException.$(in.args.getQuick(0).position, "Too many args"); } if (in.paramCount < 3) { throw SqlException.$(in.position, "Too few args"); } ExpressionNode lo = in.args.getQuick(1); ExpressionNode hi = in.args.getQuick(0); if (lo.type == ExpressionNode.CONSTANT && hi.type == ExpressionNode.CONSTANT) { long loMillis; long hiMillis; try { loMillis = TimestampFormatUtils.tryParse(lo.token, 1, lo.token.length() - 1); } catch (NumericException ignore) { throw SqlException.invalidDate(lo.position); } try { hiMillis = TimestampFormatUtils.tryParse(hi.token, 1, hi.token.length() - 1); } catch (NumericException ignore) { throw SqlException.invalidDate(hi.position); } if (isNegated) { model.subtractIntervals(loMillis, hiMillis); } else { model.intersectIntervals(loMillis, hiMillis); } in.intrinsicValue = IntrinsicModel.TRUE; return true; } return false; } private boolean analyzeInLambda(IntrinsicModel model, CharSequence columnName, RecordMetadata meta, ExpressionNode node) throws SqlException { int columnIndex = meta.getColumnIndex(columnName); boolean preferred = Chars.equalsIgnoreCaseNc(preferredKeyColumn, columnName); if (preferred || (preferredKeyColumn == null && meta.isColumnIndexed(columnIndex))) { if (preferredKeyColumn != null && !Chars.equalsIgnoreCase(columnName, preferredKeyColumn)) { return false; } if (node.rhs == null || node.rhs.type != ExpressionNode.QUERY) { return false; } // check if we already have indexed column and it is of worse selectivity if (model.keyColumn != null && (!Chars.equalsIgnoreCase(model.keyColumn, columnName)) && meta.getIndexValueBlockCapacity(columnIndex) <= meta.getIndexValueBlockCapacity(model.keyColumn)) { return false; } if ((Chars.equalsIgnoreCaseNc(model.keyColumn, columnName) && model.keySubQuery != null) || node.paramCount > 2) { throw SqlException.$(node.position, "Multiple lambda expressions not supported"); } model.keyValues.clear(); model.keyValuePositions.clear(); model.keyValuePositions.add(node.position); model.keySubQuery = node.rhs.queryModel; // revert previously processed nodes return revertProcessedNodes(keyNodes, model, columnName, node); } return false; } private boolean analyzeLess(IntrinsicModel model, ExpressionNode node, boolean equalsTo, FunctionParser functionParser, RecordMetadata metadata, SqlExecutionContext executionContext) throws SqlException { checkNodeValid(node); if (nodesEqual(node.lhs, node.rhs)) { model.intrinsicValue = IntrinsicModel.FALSE; return false; } if (timestamp == null) { return false; } if (node.lhs.type == ExpressionNode.LITERAL && Chars.equals(node.lhs.token, timestamp)) { return analyzeTimestampLess(model, node, equalsTo, functionParser, metadata, executionContext, node.rhs); } else if (node.rhs.type == ExpressionNode.LITERAL && Chars.equals(node.rhs.token, timestamp)) { return analyzeTimestampGreater(model, node, equalsTo, functionParser, metadata, executionContext, node.lhs); } return false; } private boolean analyzeTimestampLess(IntrinsicModel model, ExpressionNode node, boolean equalsTo, FunctionParser functionParser, RecordMetadata metadata, SqlExecutionContext executionContext, ExpressionNode compareWithNode) throws SqlException { if (compareWithNode.type == ExpressionNode.CONSTANT) { try { long hi = parseFullOrPartialDate(equalsTo, compareWithNode, false); model.intersectIntervals(Long.MIN_VALUE, hi); node.intrinsicValue = IntrinsicModel.TRUE; } catch (NumericException e) { throw SqlException.invalidDate(compareWithNode.position); } return true; } else if (compareWithNode.type == ExpressionNode.FUNCTION || compareWithNode.type == ExpressionNode.BIND_VARIABLE || compareWithNode.type == ExpressionNode.OPERATION) { Function function = functionParser.parseFunction(compareWithNode, metadata, executionContext); checkFunctionCanBeTimestamp(metadata, executionContext, function); if (function.isConstant()) { long hi = function.getTimestamp(null); if (hi == Numbers.LONG_NaN) { model.intersectEmpty(); } else { model.intersectIntervals(Long.MIN_VALUE, hi + adjustComparison(equalsTo, false)); } node.intrinsicValue = IntrinsicModel.TRUE; return true; } else if (function.isRuntimeConstant()) { model.intersectIntervals(Long.MIN_VALUE, function, adjustComparison(equalsTo, false)); node.intrinsicValue = IntrinsicModel.TRUE; return true; } } return false; } private static short adjustComparison(boolean equalsTo, boolean isLo) { return equalsTo ? 0 : isLo ? (short) 1 : (short) -1; } private boolean analyzeListOfValues(IntrinsicModel model, CharSequence columnName, RecordMetadata meta, ExpressionNode node) { final int columnIndex = meta.getColumnIndex(columnName); boolean newColumn = true; boolean preferred = Chars.equalsIgnoreCaseNc(preferredKeyColumn, columnName); if (preferred || (preferredKeyColumn == null && meta.isColumnIndexed(columnIndex))) { // check if we already have indexed column and it is of worse selectivity // "preferred" is an unfortunate name, this column is from "latest by" clause, I should name it better // if (model.keyColumn != null && (newColumn = !Chars.equals(model.keyColumn, columnName)) && meta.getIndexValueBlockCapacity(columnIndex) <= meta.getIndexValueBlockCapacity(model.keyColumn)) { return false; } int i = node.paramCount - 1; tempKeys.clear(); tempPos.clear(); // collect and analyze values of indexed field // if any of values is not an indexed constant - bail out if (i == 1) { if (node.rhs == null || (node.rhs.type != ExpressionNode.CONSTANT && node.rhs.type != ExpressionNode.BIND_VARIABLE)) { return false; } if (tempKeys.add(unquote(node.rhs.token))) { tempPos.add(node.position); } } else { for (i--; i > -1; i--) { ExpressionNode c = node.args.getQuick(i); if (c.type != ExpressionNode.CONSTANT && c.type != ExpressionNode.BIND_VARIABLE) { return false; } if (isNullKeyword(c.token)) { if (tempKeys.add(null)) { tempPos.add(c.position); } } else { if (tempKeys.add(unquote(c.token))) { tempPos.add(c.position); } } } } // clear values if this is new column // and reset intrinsic values on nodes associated with old column if (newColumn) { model.keyValues.clear(); model.keyValuePositions.clear(); model.keyValues.addAll(tempKeys); model.keyValuePositions.addAll(tempPos); return revertProcessedNodes(keyNodes, model, columnName, node); } else { if (model.keyValues.size() == 0) { model.keyValues.addAll(tempKeys); model.keyValuePositions.addAll(tempPos); } } if (model.keySubQuery == null) { // calculate overlap of values replaceAllWithOverlap(model, true); keyNodes.add(node); node.intrinsicValue = IntrinsicModel.TRUE; return true; } } return false; } private boolean analyzeNotEquals(AliasTranslator translator, IntrinsicModel model, ExpressionNode node, RecordMetadata m) throws SqlException { checkNodeValid(node); return analyzeNotEquals0(translator, model, node, node.lhs, node.rhs, m) || analyzeNotEquals0(translator, model, node, node.rhs, node.lhs, m); } private boolean analyzeNotEquals0(AliasTranslator translator, IntrinsicModel model, ExpressionNode node, ExpressionNode a, ExpressionNode b, RecordMetadata m) throws SqlException { if (Chars.equals(a.token, b.token)) { model.intrinsicValue = IntrinsicModel.FALSE; return true; } if (a.type == ExpressionNode.LITERAL && b.type == ExpressionNode.CONSTANT) { if (isTimestamp(a)) { model.subtractIntervals(b.token, 1, b.token.length() - 1, b.position); node.intrinsicValue = IntrinsicModel.TRUE; return true; } else { CharSequence column = translator.translateAlias(a.token); int index = m.getColumnIndexQuiet(column); if (index == -1) { throw SqlException.invalidColumn(a.position, a.token); } switch (m.getColumnType(index)) { case ColumnType.SYMBOL: case ColumnType.STRING: case ColumnType.LONG: case ColumnType.INT: if (m.isColumnIndexed(index)) { final boolean preferred = Chars.equalsIgnoreCaseNc(preferredKeyColumn, column); final boolean indexed = m.isColumnIndexed(index); if (indexed && preferredKeyColumn == null) { CharSequence value = isNullKeyword(b.token) ? null : unquote(b.token); if (Chars.equalsIgnoreCaseNc(model.keyColumn, column)) { if (model.keyExcludedValues.contains(value)) { // when we have "x not in ('a,'b') and x != 'a')" the x='b' can never happen // so we have to clear all other key values if (model.keyExcludedValues.size() > 1) { model.keyExcludedValues.clear(); model.keyExcludedValuePositions.clear(); model.keyExcludedValues.add(value); model.keyExcludedValuePositions.add(b.position); node.intrinsicValue = IntrinsicModel.TRUE; return true; } } else { if (model.keyValues.contains(value)) { if (model.keyValues.size() > 1) { int removedIndex = model.keyValues.remove(value); if (removedIndex > -1) { model.keyValuePositions.removeIndex(index); } model.keyValuePositions.remove(b.position); } else { model.keyValues.clear(); model.keyValuePositions.clear(); } removeNodes(b, keyNodes); } node.intrinsicValue = IntrinsicModel.TRUE; model.intrinsicValue = IntrinsicModel.FALSE; return false; } } else if (model.keyColumn == null || m.getIndexValueBlockCapacity(index) > m.getIndexValueBlockCapacity(model.keyColumn)) { model.keyColumn = column; model.keyValues.clear(); model.keyValuePositions.clear(); model.keyExcludedValues.clear(); model.keyExcludedValuePositions.clear(); model.keyExcludedValues.add(value); model.keyExcludedValuePositions.add(b.position); resetNodes(); node.intrinsicValue = IntrinsicModel.TRUE; } keyExclNodes.add(node); return true; } else if (preferred) { keyExclNodes.add(node); return false; } } return false; default: break; } } } return false; } private boolean analyzeNotIn(AliasTranslator translator, IntrinsicModel model, ExpressionNode notNode, RecordMetadata m) throws SqlException { ExpressionNode node = notNode.rhs; if (node.paramCount < 2) { throw SqlException.$(node.position, "Too few arguments for 'in'"); } ExpressionNode col = node.paramCount < 3 ? node.lhs : node.args.getLast(); if (col.type != ExpressionNode.LITERAL) { throw SqlException.$(col.position, "Column name expected"); } CharSequence column = translator.translateAlias(col.token); if (m.getColumnIndexQuiet(column) == -1) { throw SqlException.invalidColumn(col.position, col.token); } boolean ok = analyzeInInterval(model, col, node, true); if (ok) { notNode.intrinsicValue = IntrinsicModel.TRUE; } else { analyzeNotListOfValues(model, column, m, node, notNode); } return ok; } private void analyzeNotListOfValues(IntrinsicModel model, CharSequence columnName, RecordMetadata meta, ExpressionNode node, ExpressionNode notNode) { final int columnIndex = meta.getColumnIndex(columnName); boolean newColumn = true; boolean preferred = Chars.equalsIgnoreCaseNc(preferredKeyColumn, columnName); if (preferred || (preferredKeyColumn == null && meta.isColumnIndexed(columnIndex))) { if (model.keyColumn != null && (newColumn = !Chars.equals(model.keyColumn, columnName)) && meta.getIndexValueBlockCapacity(columnIndex) <= meta.getIndexValueBlockCapacity(model.keyColumn)) { return; } int i = node.paramCount - 1; tempKeys.clear(); tempPos.clear(); // collect and analyze values of indexed field // if any of values is not an indexed constant - bail out if (i == 1) { if (node.rhs == null || node.rhs.type != ExpressionNode.CONSTANT) { return; } if (tempKeys.add(unquote(node.rhs.token))) { tempPos.add(node.position); } } else { for (i--; i > -1; i--) { ExpressionNode c = node.args.getQuick(i); if (c.type != ExpressionNode.CONSTANT) { return; } if (isNullKeyword(c.token)) { if (tempKeys.add(null)) { tempPos.add(c.position); } } else { if (tempKeys.add(unquote(c.token))) { tempPos.add(c.position); } } } } // clear values if this is new column // and reset intrinsic values on nodes associated with old column if (newColumn) { model.keyExcludedValues.clear(); model.keyExcludedValuePositions.clear(); model.keyExcludedValues.addAll(tempKeys); model.keyExcludedValuePositions.addAll(tempPos); revertProcessedNodes(keyExclNodes, model, columnName, notNode); return; } else { if (model.keyExcludedValues.size() == 0) { model.keyExcludedValues.addAll(tempKeys); model.keyExcludedValuePositions.addAll(tempPos); } } if (model.keySubQuery == null) { // calculate overlap of values replaceAllWithOverlap(model, false); keyExclNodes.add(notNode); notNode.intrinsicValue = IntrinsicModel.TRUE; } } } private void applyKeyExclusions(AliasTranslator translator, IntrinsicModel model) { if (model.keyColumn != null && model.keyValues.size() > 0 && keyExclNodes.size() > 0) { OUT: for (int i = 0, n = keyExclNodes.size(); i < n; i++) { ExpressionNode parent = keyExclNodes.getQuick(i); ExpressionNode node = isNotKeyword(parent.token) ? parent.rhs : parent; // this could either be '=' or 'in' if (node.paramCount == 2) { ExpressionNode col; ExpressionNode val; if (node.lhs.type == ExpressionNode.LITERAL) { col = node.lhs; val = node.rhs; } else { col = node.rhs; val = node.lhs; } final CharSequence column = translator.translateAlias(col.token); if (Chars.equals(column, model.keyColumn)) { model.excludeValue(val); parent.intrinsicValue = IntrinsicModel.TRUE; if (model.intrinsicValue == IntrinsicModel.FALSE) { break; } } } if (node.paramCount > 2) { ExpressionNode col = node.args.getQuick(node.paramCount - 1); final CharSequence column = translator.translateAlias(col.token); if (Chars.equals(column, model.keyColumn)) { for (int j = node.paramCount - 2; j > -1; j--) { ExpressionNode val = node.args.getQuick(j); model.excludeValue(val); if (model.intrinsicValue == IntrinsicModel.FALSE) { break OUT; } } parent.intrinsicValue = IntrinsicModel.TRUE; } } } } keyExclNodes.clear(); } private ExpressionNode collapseIntrinsicNodes(ExpressionNode node) { if (node == null || node.intrinsicValue == IntrinsicModel.TRUE) { return null; } node.lhs = collapseIntrinsicNodes(collapseNulls0(node.lhs)); node.rhs = collapseIntrinsicNodes(collapseNulls0(node.rhs)); return collapseNulls0(node); } private ExpressionNode collapseNulls0(ExpressionNode node) { if (node == null || node.intrinsicValue == IntrinsicModel.TRUE) { return null; } if (node.queryModel == null && isAndKeyword(node.token)) { if (node.lhs == null || node.lhs.intrinsicValue == IntrinsicModel.TRUE) { return node.rhs; } if (node.rhs == null || node.rhs.intrinsicValue == IntrinsicModel.TRUE) { return node.lhs; } } return node; } IntrinsicModel extract( AliasTranslator translator, ExpressionNode node, RecordMetadata m, CharSequence preferredKeyColumn, int timestampIndex, FunctionParser functionParser, RecordMetadata metadata, SqlExecutionContext executionContext ) throws SqlException { this.timestamp = timestampIndex < 0 ? null : m.getColumnName(timestampIndex); this.preferredKeyColumn = preferredKeyColumn; IntrinsicModel model = models.next(); // pre-order iterative tree traversal // see: http://en.wikipedia.org/wiki/Tree_traversal if (removeAndIntrinsics(translator, model, node, m, functionParser, metadata, executionContext)) { return model; } ExpressionNode root = node; while (!stack.isEmpty() || node != null) { if (node != null) { if (isAndKeyword(node.token)) { if (!removeAndIntrinsics(translator, model, node.rhs, m, functionParser, metadata, executionContext)) { stack.push(node.rhs); } node = removeAndIntrinsics(translator, model, node.lhs, m, functionParser, metadata, executionContext) ? null : node.lhs; } else { node = stack.poll(); } } else { node = stack.poll(); } } applyKeyExclusions(translator, model); model.filter = collapseIntrinsicNodes(root); return model; } private boolean isTimestamp(ExpressionNode n) { return Chars.equalsNc(n.token, timestamp); } private long parseFullOrPartialDate(boolean equalsTo, ExpressionNode node, boolean isLo) throws NumericException { long ts; final int len = node.token.length(); if (len - 2 < 20) { if (isLo) { if (equalsTo) { ts = IntervalUtils.parseFloorPartialDate(node.token, 1, len - 1); } else { ts = IntervalUtils.parseCCPartialDate(node.token, 1, len - 1); } } else { if (equalsTo) { ts = IntervalUtils.parseCCPartialDate(node.token, 1, len - 1) - 1; } else { ts = IntervalUtils.parseFloorPartialDate(node.token, 1, len - 1) - 1; } } } else { long inc = equalsTo ? 0 : isLo ? 1 : -1; ts = TimestampFormatUtils.tryParse(node.token, 1, node.token.length() - 1) + inc; } return ts; } private boolean removeAndIntrinsics(AliasTranslator translator, IntrinsicModel model, ExpressionNode node, RecordMetadata m, FunctionParser functionParser, RecordMetadata metadata, SqlExecutionContext executionContext) throws SqlException { switch (intrinsicOps.get(node.token)) { case INTRINSIC_OP_IN: return analyzeIn(translator, model, node, m); case INTRINSIC_OP_GREATER: return analyzeGreater(model, node, false, functionParser, metadata, executionContext); case INTRINSIC_OP_GREATER_EQ: return analyzeGreater(model, node, true, functionParser, metadata, executionContext); case INTRINSIC_OP_LESS: return analyzeLess(model, node, false, functionParser, metadata, executionContext); case INTRINSIC_OP_LESS_EQ: return analyzeLess(model, node, true, functionParser, metadata, executionContext); case INTRINSIC_OP_EQUAL: return analyzeEquals(translator, model, node, m, functionParser, executionContext); case INTRINSIC_OP_NOT_EQ: return analyzeNotEquals(translator, model, node, m); case INTRINSIC_OP_NOT: return isInKeyword(node.rhs.token) && analyzeNotIn(translator, model, node, m); default: return false; } } private void removeNodes(ExpressionNode b, ObjList<ExpressionNode> nodes) { tempNodes.clear(); for (int i = 0, size = nodes.size(); i < size; i++) { ExpressionNode expressionNode = nodes.get(i); if ((expressionNode.lhs != null && Chars.equals(expressionNode.lhs.token, b.token)) || (expressionNode.rhs != null && Chars.equals(expressionNode.rhs.token, b.token))) { expressionNode.intrinsicValue = IntrinsicModel.TRUE; tempNodes.add(expressionNode); } } for (int i = 0, size = tempNodes.size(); i < size; i++) { nodes.remove(tempNodes.get(i)); } } private void replaceAllWithOverlap(IntrinsicModel model, boolean includedValues) { CharSequenceHashSet values; IntList positions; if (includedValues) { values = model.keyValues; positions = model.keyValuePositions; } else { values = model.keyExcludedValues; positions = model.keyExcludedValuePositions; } tempK.clear(); tempP.clear(); for (int i = 0, k = tempKeys.size(); i < k; i++) { if (values.contains(tempKeys.get(i)) && tempK.add(tempKeys.get(i))) { tempP.add(tempPos.get(i)); } } if (tempK.size() > 0) { values.clear(); positions.clear(); values.addAll(tempK); positions.addAll(tempP); } else { model.intrinsicValue = IntrinsicModel.FALSE; } } private void resetNodes() { for (int n = 0, k = keyNodes.size(); n < k; n++) { keyNodes.getQuick(n).intrinsicValue = IntrinsicModel.UNDEFINED; } keyNodes.clear(); for (int n = 0, k = keyExclNodes.size(); n < k; n++) { keyExclNodes.getQuick(n).intrinsicValue = IntrinsicModel.UNDEFINED; } keyExclNodes.clear(); } private boolean revertProcessedNodes(ObjList<ExpressionNode> nodes, IntrinsicModel model, CharSequence columnName, ExpressionNode node) { for (int n = 0, k = nodes.size(); n < k; n++) { nodes.getQuick(n).intrinsicValue = IntrinsicModel.UNDEFINED; } nodes.clear(); model.keyColumn = columnName; nodes.add(node); node.intrinsicValue = IntrinsicModel.TRUE; return true; } /** * Removes quotes and creates immutable char sequence. When value is not quoted it is returned verbatim. * * @param value immutable character sequence. * @return immutable character sequence without surrounding quote marks. */ private CharSequence unquote(CharSequence value) { if (Chars.isQuoted(value)) { return csPool.next().of(value, 1, value.length() - 2); } return value; } static { intrinsicOps.put("in", INTRINSIC_OP_IN); intrinsicOps.put(">", INTRINSIC_OP_GREATER); intrinsicOps.put(">=", INTRINSIC_OP_GREATER_EQ); intrinsicOps.put("<", INTRINSIC_OP_LESS); intrinsicOps.put("<=", INTRINSIC_OP_LESS_EQ); intrinsicOps.put("=", INTRINSIC_OP_EQUAL); intrinsicOps.put("!=", INTRINSIC_OP_NOT_EQ); intrinsicOps.put("not", INTRINSIC_OP_NOT); intrinsicOps.put("between", INTRINSIC_OP_IN); } }
package saga.microprofile.flightservice.exceptionHandler; import saga.microprofile.flightservice.error.ErrorMessage; import saga.microprofile.flightservice.error.ErrorType; import saga.microprofile.flightservice.error.UnsupportedStateTransition; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.ext.ExceptionMapper; import javax.ws.rs.ext.Provider; @Provider public class UnsupportedStateTransitionExceptionMapper implements ExceptionMapper<UnsupportedStateTransition> { @Override public Response toResponse(UnsupportedStateTransition exception) { ErrorMessage errorMessage = new ErrorMessage(ErrorType.NON_ALLOWED_STATE_TRANSITION, exception.getMessage()); return Response.status(Status.FORBIDDEN).entity(errorMessage).build(); } }
/*- * * * Copyright 2015 Skymind,Inc. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * */ package org.deeplearning4j.clustering.algorithm; import lombok.AccessLevel; import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.ArrayUtils; import org.deeplearning4j.clustering.cluster.Cluster; import org.deeplearning4j.clustering.cluster.ClusterSet; import org.deeplearning4j.clustering.cluster.ClusterUtils; import org.deeplearning4j.clustering.cluster.Point; import org.deeplearning4j.clustering.info.ClusterSetInfo; import org.deeplearning4j.clustering.iteration.IterationHistory; import org.deeplearning4j.clustering.iteration.IterationInfo; import org.deeplearning4j.clustering.strategy.ClusteringStrategy; import org.deeplearning4j.clustering.strategy.ClusteringStrategyType; import org.deeplearning4j.clustering.strategy.OptimisationStrategy; import org.deeplearning4j.clustering.util.MultiThreadUtils; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.Executor; /** * * adapted to ndarray matrices * * @author Adam Gibson * @author Julien Roch * */ @Slf4j @NoArgsConstructor(access = AccessLevel.PROTECTED) public class BaseClusteringAlgorithm implements ClusteringAlgorithm, Serializable { private static final long serialVersionUID = 338231277453149972L; private ClusteringStrategy clusteringStrategy; private IterationHistory iterationHistory; private int currentIteration = 0; private ClusterSet clusterSet; private List<Point> initialPoints; private transient Executor exec; protected BaseClusteringAlgorithm(ClusteringStrategy clusteringStrategy) { this.clusteringStrategy = clusteringStrategy; this.exec = MultiThreadUtils.newExecutorService(); } /** * * @param clusteringStrategy * @return */ public static BaseClusteringAlgorithm setup(ClusteringStrategy clusteringStrategy) { return new BaseClusteringAlgorithm(clusteringStrategy); } /** * * @param points * @return */ public ClusterSet applyTo(List<Point> points) { resetState(points); initClusters(); iterations(); return clusterSet; } private void resetState(List<Point> points) { this.iterationHistory = new IterationHistory(); this.currentIteration = 0; this.clusterSet = null; this.initialPoints = points; } /** Run clustering iterations until a * termination condition is hit. * This is done by first classifying all points, * and then updating cluster centers based on * those classified points */ private void iterations() { int iterationCount = 0; while ((clusteringStrategy.getTerminationCondition() != null && !clusteringStrategy.getTerminationCondition().isSatisfied(iterationHistory)) || iterationHistory.getMostRecentIterationInfo().isStrategyApplied()) { currentIteration++; removePoints(); classifyPoints(); applyClusteringStrategy(); log.info("Completed clustering iteration {}", ++iterationCount); } } protected void classifyPoints() { //Classify points. This also adds each point to the ClusterSet ClusterSetInfo clusterSetInfo = ClusterUtils.classifyPoints(clusterSet, initialPoints, exec); //Update the cluster centers, based on the points within each cluster ClusterUtils.refreshClustersCenters(clusterSet, clusterSetInfo, exec); iterationHistory.getIterationsInfos().put(currentIteration, new IterationInfo(currentIteration, clusterSetInfo)); } /** * Initialize the * cluster centers at random */ protected void initClusters() { log.info("Generating initial clusters"); List<Point> points = new ArrayList<>(initialPoints); //Initialize the ClusterSet with a single cluster center (based on position of one of the points chosen randomly) Random random = new Random(); clusterSet = new ClusterSet(clusteringStrategy.getDistanceFunction(), clusteringStrategy.inverseDistanceCalculation()); clusterSet.addNewClusterWithCenter(points.remove(random.nextInt(points.size()))); int initialClusterCount = clusteringStrategy.getInitialClusterCount(); //dxs: distances between // each point and nearest cluster to that point INDArray dxs = Nd4j.create(points.size()); dxs.addi(clusteringStrategy.inverseDistanceCalculation() ? -Double.MAX_VALUE : Double.MAX_VALUE); //Generate the initial cluster centers, by randomly selecting a point between 0 and max distance //Thus, we are more likely to select (as a new cluster center) a point that is far from an existing cluster while (clusterSet.getClusterCount() < initialClusterCount && !points.isEmpty()) { dxs = ClusterUtils.computeSquareDistancesFromNearestCluster(clusterSet, points, dxs, exec); double r = random.nextFloat() * dxs.maxNumber().doubleValue(); for (int i = 0; i < dxs.length(); i++) { if (dxs.getDouble(i) >= r) { clusterSet.addNewClusterWithCenter(points.remove(i)); dxs = Nd4j.create(ArrayUtils.remove(dxs.data().asDouble(), i)); break; } } } ClusterSetInfo initialClusterSetInfo = ClusterUtils.computeClusterSetInfo(clusterSet); iterationHistory.getIterationsInfos().put(currentIteration, new IterationInfo(currentIteration, initialClusterSetInfo)); } protected void applyClusteringStrategy() { if (!isStrategyApplicableNow()) return; ClusterSetInfo clusterSetInfo = iterationHistory.getMostRecentClusterSetInfo(); if (!clusteringStrategy.isAllowEmptyClusters()) { int removedCount = removeEmptyClusters(clusterSetInfo); if (removedCount > 0) { iterationHistory.getMostRecentIterationInfo().setStrategyApplied(true); if (clusteringStrategy.isStrategyOfType(ClusteringStrategyType.FIXED_CLUSTER_COUNT) && clusterSet.getClusterCount() < clusteringStrategy.getInitialClusterCount()) { int splitCount = ClusterUtils.splitMostSpreadOutClusters(clusterSet, clusterSetInfo, clusteringStrategy.getInitialClusterCount() - clusterSet.getClusterCount(), exec); if (splitCount > 0) iterationHistory.getMostRecentIterationInfo().setStrategyApplied(true); } } } if (clusteringStrategy.isStrategyOfType(ClusteringStrategyType.OPTIMIZATION)) optimize(); } protected void optimize() { ClusterSetInfo clusterSetInfo = iterationHistory.getMostRecentClusterSetInfo(); OptimisationStrategy optimization = (OptimisationStrategy) clusteringStrategy; boolean applied = ClusterUtils.applyOptimization(optimization, clusterSet, clusterSetInfo, exec); iterationHistory.getMostRecentIterationInfo().setStrategyApplied(applied); } private boolean isStrategyApplicableNow() { return clusteringStrategy.isOptimizationDefined() && iterationHistory.getIterationCount() != 0 && clusteringStrategy.isOptimizationApplicableNow(iterationHistory); } protected int removeEmptyClusters(ClusterSetInfo clusterSetInfo) { List<Cluster> removedClusters = clusterSet.removeEmptyClusters(); clusterSetInfo.removeClusterInfos(removedClusters); return removedClusters.size(); } protected void removePoints() { clusterSet.removePoints(); } }
/** * Copyright (c) 2012 Andrew Eells * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ package com.aeells.hibernate; public interface Updateable { /** * Defines update behaviour. * * @return <code>true</code> if the object can be updated; <code>false</code> otherwise. */ boolean isUpdateAllowed(); }
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser; import android.view.View; /** * An interface for pages that will be shown in a tab using Android views instead of html. */ public interface NativePage { /** * @return The View to display the page. This is always non-null. */ View getView(); /** * @return The title of the page. */ String getTitle(); /** * @return The URL of the page. */ String getUrl(); /** * @return The hostname for this page, e.g. "newtab" or "bookmarks". */ String getHost(); /** * @return The background color of the page. */ int getBackgroundColor(); /** * @return The theme color of the page. */ int getThemeColor(); /** * @return True if the native page needs the toolbar shadow to be drawn. */ boolean needsToolbarShadow(); /** * Updates the native page based on the given url. */ void updateForUrl(String url); /** * Called after a page has been removed from the view hierarchy and will no longer be used. */ void destroy(); }
package ru.job4j.collection.set; import org.junit.Test; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; /** * Тестирование простого множества. * * @author Bruki mammad. * @version $1.0$ * @since 13.09.2020 */ public class SimpleSetTest { @Test public void whenAddElementsThenNoDuplicates() { SimpleSet<Integer> data = new SimpleSet<>(); data.add(1); data.add(1); data.add(2); data.add(3); data.add(3); var it = data.iterator(); assertThat(it.hasNext(), is(true)); assertThat(it.next(), is(1)); assertThat(it.hasNext(), is(true)); assertThat(it.next(), is(2)); assertThat(it.hasNext(), is(true)); assertThat(it.next(), is(3)); assertThat(it.hasNext(), is(false)); } }
package com.devplant.snippets.jpa.model; import java.util.ArrayList; import java.util.List; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.OneToMany; import javax.persistence.Table; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; import lombok.ToString; @Data @Entity @Builder @Table(name = "trainer") @NoArgsConstructor @AllArgsConstructor @ToString(exclude = "students") public class Trainer { @Id @GeneratedValue private int id; private String name; @OneToMany(fetch = FetchType.LAZY, mappedBy = "trainer") private List<Student> students = new ArrayList<>(); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.utils.btree; import java.util.Comparator; import io.netty.util.Recycler; import static org.apache.cassandra.utils.btree.BTree.EMPTY_LEAF; import static org.apache.cassandra.utils.btree.BTree.FAN_SHIFT; import static org.apache.cassandra.utils.btree.BTree.POSITIVE_INFINITY; /** * A class for constructing a new BTree, either from an existing one and some set of modifications * or a new tree from a sorted collection of items. * <p/> * This is a fairly heavy-weight object, so a Recycled instance is created for making modifications to a tree */ final class TreeBuilder { private final static Recycler<TreeBuilder> builderRecycler = new Recycler<TreeBuilder>() { protected TreeBuilder newObject(Handle handle) { return new TreeBuilder(handle); } }; public static TreeBuilder newInstance() { return builderRecycler.get(); } private final Recycler.Handle recycleHandle; private final NodeBuilder rootBuilder = new NodeBuilder(); private TreeBuilder(Recycler.Handle handle) { this.recycleHandle = handle; } /** * At the highest level, we adhere to the classic b-tree insertion algorithm: * * 1. Add to the appropriate leaf * 2. Split the leaf if necessary, add the median to the parent * 3. Split the parent if necessary, etc. * * There is one important difference: we don't actually modify the original tree, but copy each node that we * modify. Note that every node on the path to the key being inserted or updated will be modified; this * implies that at a minimum, the root node will be modified for every update, so every root is a "snapshot" * of a tree that can be iterated or sliced without fear of concurrent modifications. * * The NodeBuilder class handles the details of buffering the copied contents of the original tree and * adding in our changes. Since NodeBuilder maintains parent/child references, it also handles parent-splitting * (easy enough, since any node affected by the split will already be copied into a NodeBuilder). * * One other difference from the simple algorithm is that we perform modifications in bulk; * we assume @param source has been sorted, e.g. by BTree.update, so the update of each key resumes where * the previous left off. */ public <C, K extends C, V extends C> Object[] update(Object[] btree, Comparator<C> comparator, Iterable<K> source, UpdateFunction<K, V> updateF) { assert updateF != null; NodeBuilder current = rootBuilder; current.reset(btree, POSITIVE_INFINITY, updateF, comparator); for (K key : source) { while (true) { if (updateF.abortEarly()) { rootBuilder.clear(); return null; } NodeBuilder next = current.update(key); if (next == null) break; // we were in a subtree from a previous key that didn't contain this new key; // retry against the correct subtree current = next; } } // finish copying any remaining keys from the original btree while (true) { NodeBuilder next = current.finish(); if (next == null) break; current = next; } // updating with POSITIVE_INFINITY means that current should be back to the root assert current.isRoot(); Object[] r = current.toNode(); current.clear(); builderRecycler.recycle(this, recycleHandle); return r; } public <C, K extends C, V extends C> Object[] build(Iterable<K> source, UpdateFunction<K, V> updateF, int size) { assert updateF != null; NodeBuilder current = rootBuilder; // we descend only to avoid wasting memory; in update() we will often descend into existing trees // so here we want to descend also, so we don't have lg max(N) depth in both directions while ((size >>= FAN_SHIFT) > 0) current = current.ensureChild(); current.reset(EMPTY_LEAF, POSITIVE_INFINITY, updateF, null); for (K key : source) current.addNewKey(key); current = current.ascendToRoot(); Object[] r = current.toNode(); current.clear(); builderRecycler.recycle(this, recycleHandle); return r; } public <C, K extends C, V extends C> Object[] build(K [] source, UpdateFunction<K, V> updateF, int size) { assert updateF != null; int origSize = size; NodeBuilder current = rootBuilder; // we descend only to avoid wasting memory; in update() we will often descend into existing trees // so here we want to descend also, so we don't have lg max(N) depth in both directions while ((size >>= FAN_SHIFT) > 0) current = current.ensureChild(); current.reset(EMPTY_LEAF, POSITIVE_INFINITY, updateF, null); for (int i = 0; i < origSize; i++) current.addNewKey(source[i]); current = current.ascendToRoot(); Object[] r = current.toNode(); current.clear(); builderRecycler.recycle(this, recycleHandle); return r; } }
package com.electropeyk.squenda.fragments; import android.annotation.SuppressLint; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import androidx.appcompat.widget.AppCompatImageView; import androidx.fragment.app.Fragment; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; import androidx.recyclerview.widget.SimpleItemAnimator; import com.electropeyk.squenda.R; import com.electropeyk.squenda.activities.DisplayBrightnessMelodyActivity; import com.electropeyk.squenda.activities.MediaActivity; import com.electropeyk.squenda.activities.MyHomeActivity; import com.electropeyk.squenda.activities.SettingActivity; import com.electropeyk.squenda.adpter.AudioAdapterDoor2; import com.electropeyk.squenda.utils.Common; import com.example.jean.jcplayer.model.JcAudio; import com.example.jean.jcplayer.view.JcPlayerView; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.Locale; import static android.os.Looper.getMainLooper; public class MelodyDoor2Fragment extends Fragment implements View.OnClickListener { private JcPlayerView player; private RecyclerView recyclerView; private AudioAdapterDoor2 audioAdapter; private AppCompatImageView btn_profile_melody, img_back_melody,btn_home_melody, btn_setting_melody; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment View rootView = inflater.inflate(R.layout.fragment_melody, container, false); recyclerView = (RecyclerView) rootView.findViewById(R.id.recycler_melody); player = rootView.findViewById(R.id.jcplayer); btn_profile_melody = rootView.findViewById(R.id.btn_profile_melody); btn_profile_melody.setOnClickListener(this); btn_home_melody = rootView.findViewById(R.id.btn_home_melody); btn_home_melody.setOnClickListener(this); btn_setting_melody = rootView.findViewById(R.id.btn_setting_melody); btn_setting_melody.setOnClickListener(this); img_back_melody = rootView.findViewById(R.id.img_back_melody); img_back_melody.setOnClickListener(this); TextView txt_date_melody=rootView.findViewById(R.id.txt_date_melody); final TextView txt_time_melody=rootView.findViewById(R.id.txt_time_melody); txt_time_melody.setText(new SimpleDateFormat("HH:mm", Locale.US).format(new Date())); final Handler someHandler = new Handler(getMainLooper()); someHandler.postDelayed(new Runnable() { @Override public void run() { txt_time_melody.setText(new SimpleDateFormat("HH:mm", Locale.US).format(new Date())); someHandler.postDelayed(this, 1000); } }, 10); int dayOfMonth = Calendar.getInstance().get(Calendar.DAY_OF_MONTH); String day = Common.days[Calendar.getInstance().get(Calendar.DAY_OF_WEEK) - 1]; String month = Common.months[Calendar.getInstance().get(Calendar.MONTH)]; String date = day + "," + month + " " + dayOfMonth; txt_date_melody.setText(date); ArrayList<JcAudio> jcAudios = new ArrayList<>(); jcAudios.add(JcAudio.createFromAssets("audio 1", "sound1.wav")); jcAudios.add(JcAudio.createFromAssets("audio 2", "sound2.wav")); jcAudios.add(JcAudio.createFromAssets("audio 3", "sound3.wav")); jcAudios.add(JcAudio.createFromAssets("audio 4", "sound4.wav")); jcAudios.add(JcAudio.createFromAssets("audio 5", "sound5.wav")); player.initPlaylist(jcAudios, null); adapterSetup(); return rootView; } private void adapterSetup() { audioAdapter = new AudioAdapterDoor2(getActivity(),player.getMyPlaylist()); audioAdapter.setOnItemClickListener(new AudioAdapterDoor2.OnItemClickListener() { @Override public void onItemClick(int position) { Common.startSound(getActivity(),player.getMyPlaylist().get(position).getPath()); } }); @SuppressLint("WrongConstant") LinearLayoutManager layoutManager = new LinearLayoutManager(getActivity(), LinearLayoutManager.VERTICAL, false); recyclerView.setLayoutManager(layoutManager); recyclerView.setAdapter(audioAdapter); ((SimpleItemAnimator) recyclerView.getItemAnimator()).setSupportsChangeAnimations(false); } @Override public void onClick(View view) { Intent intent; switch (view.getId()) { case R.id.btn_profile_melody: intent=new Intent(getActivity(), MediaActivity.class); startActivity(intent); getActivity().finish(); break; case R.id.btn_home_melody: intent=new Intent(getActivity(), MyHomeActivity.class); startActivity(intent); getActivity().finish(); break; case R.id.btn_setting_melody: intent=new Intent(getActivity(), SettingActivity.class); startActivity(intent); getActivity().finish(); break; case R.id.img_back_melody: intent=new Intent(getActivity(), DisplayBrightnessMelodyActivity.class); startActivity(intent); getActivity().finish(); break; } } }
package org.jrpq.rlci.core.graphs; import org.jrpq.rlci.core.RlcIndex; import it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap; import it.unimi.dsi.fastutil.objects.Object2IntMap; import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap; import org.jgrapht.alg.util.Pair; import org.jgrapht.graph.AbstractGraph; import org.jgrapht.opt.graph.sparse.SparseIntDirectedGraph; import java.util.*; @Deprecated public class EdgeLabeledJGraphT implements EdgeLabeledGraph<Integer, Integer> { AbstractGraph<Integer, Integer> graph; Int2IntOpenHashMap edgeToLabel; Object2IntOpenHashMap<String> edgeLabelEncoder; String graphName; public EdgeLabeledJGraphT(AbstractGraph<Integer, Integer> graph, Int2IntOpenHashMap edgeToLabel, Object2IntOpenHashMap<String> edgeLabelEncoder) { this.graph = graph; this.edgeToLabel = edgeToLabel; this.edgeLabelEncoder = edgeLabelEncoder; } public EdgeLabeledJGraphT(AbstractGraph<Integer, Integer> graph, Int2IntOpenHashMap edgeToLabel, Object2IntOpenHashMap<String> edgeLabelEncoder, String graphName) { this.graph = graph; this.edgeToLabel = edgeToLabel; this.edgeLabelEncoder = edgeLabelEncoder; this.graphName = graphName; } public static EdgeLabeledGraph<Integer, Integer> getASimpleInstance2() { // this method returns the example in Fig. 2 in the paper. List<Pair<Integer, Integer>> edgeList = new ArrayList<>(); Int2IntOpenHashMap edge2Label = new Int2IntOpenHashMap(); Object2IntOpenHashMap<String> edgeLabelEncoder = new Object2IntOpenHashMap<>(); int[] labels = new int[]{1, 2, 3}; for (int i = 1; i <= 3; i++) edgeLabelEncoder.put("l" + i, labels[i - 1]); int eId = 0; edgeList.add(Pair.of(1, 2)); edge2Label.put(eId++, edgeLabelEncoder.getInt("l1")); edgeList.add(Pair.of(2, 5)); edge2Label.put(eId++, edgeLabelEncoder.getInt("l1")); edgeList.add(Pair.of(1, 3)); edge2Label.put(eId++, edgeLabelEncoder.getInt("l2")); // change edgeList.add(Pair.of(3, 2)); edge2Label.put(eId++, edgeLabelEncoder.getInt("l1")); // change edgeList.add(Pair.of(3, 4)); edge2Label.put(eId++, edgeLabelEncoder.getInt("l2")); edgeList.add(Pair.of(4, 1)); edge2Label.put(eId++, edgeLabelEncoder.getInt("l1")); // change edgeList.add(Pair.of(4, 6)); edge2Label.put(eId++, edgeLabelEncoder.getInt("l3")); edgeList.add(Pair.of(5, 1)); edge2Label.put(eId++, edgeLabelEncoder.getInt("l1")); //new edge edgeList.add(Pair.of(3, 6)); edge2Label.put(eId++, edgeLabelEncoder.getInt("l1")); //new edge edgeList.add(Pair.of(3, 1)); edge2Label.put(eId++, edgeLabelEncoder.getInt("l2")); //new edge edgeList.add(Pair.of(2, 5)); edge2Label.put(eId, edgeLabelEncoder.getInt("l2")); SparseIntDirectedGraph sparseIntDirectedGraph = new SparseIntDirectedGraph(7, edgeList); // vid starts from 0, so there are 7 vertices return new EdgeLabeledJGraphT(sparseIntDirectedGraph, edge2Label, edgeLabelEncoder); } public static void main(String[] args) { RlcIndex<Integer, Integer> rlcIndex = new RlcIndex<>(getASimpleInstance2(), 2); rlcIndex.build(); System.out.println(rlcIndex); } @Override public int getEncodedEdgeLabel(Integer edge) { return edgeToLabel.get(edge.intValue()); } @Override public int getNumberOfVertices() { return graph.vertexSet().size(); } @Override public int getNumberOfEdges() { return edgeToLabel.size(); } @Override public int encodeEdgeLabel(String label) { return edgeLabelEncoder.getInt(label); } @Override public String decodeEdgeLabel(int encode) { for (Object2IntMap.Entry<String> stringEntry : edgeLabelEncoder.object2IntEntrySet()){ if (stringEntry.getIntValue() == encode) return stringEntry.getKey(); } return "null"; } @Override public Iterator<Integer> outEdgesIterator(Integer v) { return graph.outgoingEdgesOf(v).iterator(); } @Override public Iterator<Integer> inEdgesIterator(Integer v) { return graph.incomingEdgesOf(v).iterator(); } @Override public Integer getSourceOf(Integer e) { return graph.getEdgeSource(e); } @Override public Integer getTargetOf(Integer e) { return graph.getEdgeTarget(e); } @Override public int getVertexId(Integer v) { return v; } @Override public Iterator<Integer> getVertices() { return graph.vertexSet().iterator(); } @Override public Iterator<Integer> getEdges() { return graph.edgeSet().iterator(); } @Override public Iterator<Integer> sortVerticesBasedOnDegree() { // Descending order of vertex degrees return graph .vertexSet() .stream() // .sorted((o1, o2) -> (graph.inDegreeOf(o2) + 1) * (graph.outDegreeOf(o2) + 1) - (graph.inDegreeOf(o1) + 1) * (graph.outDegreeOf(o1) + 1)) .sorted((o1, o2) -> { Integer integer1 = (graph.inDegreeOf(o1) + 1) * (graph.outDegreeOf(o1) + 1); Integer integer2 = (graph.inDegreeOf(o2) + 1) * (graph.outDegreeOf(o2) + 1); return integer2.compareTo(integer1);}) .iterator(); } @Override public Set<String> getLabelSet() { return edgeLabelEncoder.keySet(); } @Override public Integer getVertex(int vId) { return vId; } @Override public String getGraphName() { return graphName; } }
/* * Copyright 2016 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.bot.model.event; import java.time.Instant; import com.fasterxml.jackson.annotation.JsonTypeName; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; import com.linecorp.bot.model.event.beacon.BeaconContent; import com.linecorp.bot.model.event.source.Source; import lombok.Builder; import lombok.Value; /** * Event object for when a user detects a LINE Beacon. You can reply to beacon events. */ @JsonTypeName("beacon") @Value @Builder(toBuilder = true) @JsonDeserialize(builder = BeaconEvent.BeaconEventBuilder.class) public class BeaconEvent implements Event, ReplyEvent { @JsonPOJOBuilder(withPrefix = "") public static class BeaconEventBuilder { // Providing builder instead of public constructor. Class body is filled by lombok. } /** * Token for replying to this event. */ String replyToken; /** * JSON object which contains the source of the event. */ Source source; /** * Content of the beacon event. */ BeaconContent beacon; /** * Time of the event. */ Instant timestamp; /** * Channel state. * <dl> * <dt>active</dt> * <dd>The channel is active. You can send a reply message or push message from the bot server that received * this webhook event.</dd> * <dt>standby (under development)</dt> * <dd>The channel is waiting. The bot server that received this webhook event shouldn't send any messages. * </dd> * </dl> */ EventMode mode; }
package com.github.rochedo098.libradioactive.api; import net.minecraft.entity.LivingEntity; import net.minecraft.util.math.ChunkPos; import net.minecraft.world.World; /** * Extends this class to create a RadiationType; * * spreadRate is the speed at which the radiation spreads; * * The affectedEntity method you write what will happen to the affected entity; * and * The affectedWorld method you write what will happen to the affected world; */ public abstract class RadiationType { protected int spreadRate; public RadiationType(int spreadRate) { this.spreadRate = spreadRate; } public int getRadiationSpreadRate() { return spreadRate; } public abstract void affectedEntity(LivingEntity entity); public abstract void affectedWorld(World world, ChunkPos pos); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.reef.examples.group.utils.math; import org.apache.reef.io.Tuple; /** * Represents an immutable vector. */ public interface ImmutableVector { /** * Access the value of the Vector at dimension i. * * @param i index * @return the value at index i */ double get(int i); /** * The size (dimensionality) of the Vector. * * @return the size of the Vector. */ int size(); /** * Computes the inner product with another Vector. * * @param that * @return the inner product between two Vectors. */ double dot(Vector that); /** * Computes the computeSum of all entries in the Vector. * * @return the computeSum of all entries in this Vector */ double sum(); /** * Computes the L2 norm of this Vector. * * @return the L2 norm of this Vector. */ double norm2(); /** * Computes the square of the L2 norm of this Vector. * * @return the square of the L2 norm of this Vector. */ double norm2Sqr(); /** * Computes the min of all entries in the Vector. * * @return the min of all entries in this Vector */ Tuple<Integer, Double> min(); }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fineract.portfolio.collectionsheet.api; import java.util.List; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.UriInfo; import org.apache.commons.lang.StringUtils; import org.apache.fineract.commands.domain.CommandWrapper; import org.apache.fineract.commands.service.CommandWrapperBuilder; import org.apache.fineract.commands.service.PortfolioCommandSourceWritePlatformService; import org.apache.fineract.infrastructure.core.api.ApiRequestParameterHelper; import org.apache.fineract.infrastructure.core.api.JsonQuery; import org.apache.fineract.infrastructure.core.data.CommandProcessingResult; import org.apache.fineract.infrastructure.core.serialization.ApiRequestJsonSerializationSettings; import org.apache.fineract.infrastructure.core.serialization.FromJsonHelper; import org.apache.fineract.infrastructure.core.serialization.ToApiJsonSerializer; import org.apache.fineract.infrastructure.core.service.AdvanceSearchParameters; import org.apache.fineract.infrastructure.core.service.Page; import org.apache.fineract.infrastructure.security.service.PlatformSecurityContext; import org.apache.fineract.portfolio.client.api.ClientApiConstants; import org.apache.fineract.portfolio.client.data.ClientData; import org.apache.fineract.portfolio.collectionsheet.CollectionSheetConstants; import org.apache.fineract.portfolio.collectionsheet.data.BulkReminderData; import org.apache.fineract.portfolio.collectionsheet.data.IndividualCollectionSheetData; import org.apache.fineract.portfolio.collectionsheet.service.CollectionSheetReadPlatformService; import org.apache.fineract.portfolio.collectionsheet.service.CollectionSheetWritePlatformService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import com.google.gson.JsonElement; @Path("/collectionsheet") @Component @Scope("singleton") public class CollectionSheetApiResourse { private final CollectionSheetReadPlatformService collectionSheetReadPlatformService; private final ToApiJsonSerializer<Object> toApiJsonSerializer; private final FromJsonHelper fromJsonHelper; private final ApiRequestParameterHelper apiRequestPrameterHelper; private final PortfolioCommandSourceWritePlatformService commandsSourceWritePlatformService; private final PlatformSecurityContext context; private final CollectionSheetWritePlatformService collectionSheetWritePlatformService; @Autowired public CollectionSheetApiResourse(final CollectionSheetReadPlatformService collectionSheetReadPlatformService, final ToApiJsonSerializer<Object> toApiJsonSerializer, final FromJsonHelper fromJsonHelper, final ApiRequestParameterHelper apiRequestPrameterHelper, final PortfolioCommandSourceWritePlatformService commandsSourceWritePlatformService, final PlatformSecurityContext context, final CollectionSheetWritePlatformService collectionSheetWritePlatformService) { this.collectionSheetReadPlatformService = collectionSheetReadPlatformService; this.toApiJsonSerializer = toApiJsonSerializer; this.fromJsonHelper = fromJsonHelper; this.apiRequestPrameterHelper = apiRequestPrameterHelper; this.commandsSourceWritePlatformService = commandsSourceWritePlatformService; this.context = context; this.collectionSheetWritePlatformService = collectionSheetWritePlatformService; } @POST @Consumes({ MediaType.APPLICATION_JSON }) @Produces({ MediaType.APPLICATION_JSON }) public String generateCollectionSheet(@QueryParam("command") final String commandParam, final String apiRequestBodyAsJson, @Context final UriInfo uriInfo) { final CommandWrapperBuilder builder = new CommandWrapperBuilder().withJson(apiRequestBodyAsJson); CommandProcessingResult result = null; if (is(commandParam, "generateCollectionSheet")) { this.context.authenticatedUser().validateHasReadPermission(CollectionSheetConstants.COLLECTIONSHEET_RESOURCE_NAME); final JsonElement parsedQuery = this.fromJsonHelper.parse(apiRequestBodyAsJson); final JsonQuery query = JsonQuery.from(apiRequestBodyAsJson, parsedQuery, this.fromJsonHelper); final IndividualCollectionSheetData collectionSheet = this.collectionSheetReadPlatformService .generateIndividualCollectionSheet(query); final ApiRequestJsonSerializationSettings settings = this.apiRequestPrameterHelper.process(uriInfo.getQueryParameters()); return this.toApiJsonSerializer.serialize(settings, collectionSheet); } else if (is(commandParam, "saveCollectionSheet")) { final CommandWrapper commandRequest = builder.saveIndividualCollectionSheet().build(); result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest); return this.toApiJsonSerializer.serialize(result); } return null; } @POST @Path("/reminder") @Consumes({ MediaType.APPLICATION_JSON }) @Produces({ MediaType.APPLICATION_JSON }) public String bulkReminder(@QueryParam("command") final String commandParam, final String apiRequestBodyAsJson, @Context final UriInfo uriInfo) { final CommandWrapperBuilder builder = new CommandWrapperBuilder().withJson(apiRequestBodyAsJson); CommandProcessingResult result = null; /* if (is(commandParam, "bulkReminder")) { this.context.authenticatedUser().validateHasReadPermission(CollectionSheetConstants.COLLECTIONSHEET_RESOURCE_NAME); final JsonElement parsedQuery = this.fromJsonHelper.parse(apiRequestBodyAsJson); final JsonQuery query = JsonQuery.from(apiRequestBodyAsJson, parsedQuery, this.fromJsonHelper); final IndividualCollectionSheetData collectionSheet = this.collectionSheetReadPlatformService .generateIndividualCollectionSheet(query); final ApiRequestJsonSerializationSettings settings = this.apiRequestPrameterHelper.process(uriInfo.getQueryParameters()); return this.toApiJsonSerializer.serialize(settings, collectionSheet); } else if (is(commandParam, "saveCollectionSheet")) { */ final CommandWrapper commandRequest = builder.bulkReminderSms().build(); result = this.commandsSourceWritePlatformService.logCommandSource(commandRequest); return this.toApiJsonSerializer.serialize(result); /* } return null;*/ } @GET @Path("smsReminderData") @Consumes({ MediaType.APPLICATION_JSON }) @Produces({ MediaType.APPLICATION_JSON }) public String advanceRetrieveAll(@Context final UriInfo uriInfo,@QueryParam("officeId") final String officeId, @QueryParam("centerId") final String centerId,@QueryParam("groupId") final String groupId, @QueryParam("loanOfficerId") final String loanOfficerId, @QueryParam("startDate") final String startDate,@QueryParam("endDate") final String endDate) { this.context.authenticatedUser().validateHasReadPermission("smsReminder"); final ApiRequestJsonSerializationSettings settings = this.apiRequestPrameterHelper.process(uriInfo.getQueryParameters()); final List<BulkReminderData> clientData = this.collectionSheetReadPlatformService.smsReminderData(officeId,centerId,groupId ,loanOfficerId,startDate,endDate); return this.toApiJsonSerializer.serialize(settings, clientData, BulkReminderApiConstants.BULK_REMINDER_RESPONSE_DATA_PARAMETERS); } @POST @Path("/sampleSms") @Consumes({ MediaType.APPLICATION_JSON }) @Produces({ MediaType.APPLICATION_JSON }) public void sampleSms( @Context final UriInfo uriInfo) { this.collectionSheetWritePlatformService.sampleSms(); /* } return null;*/ } private boolean is(final String commandParam, final String commandValue) { return StringUtils.isNotBlank(commandParam) && commandParam.trim().equalsIgnoreCase(commandValue); } }
package cn.hp.hp.model; import org.litepal.crud.DataSupport; /** * Created by liyu on 2018/3/2. */ public class Module extends DataSupport implements Cloneable { private String name; private int index; private boolean enable; private int menuId; private int resIcon; public Module(String name, int resIcon, int menuId, int index, boolean enable) { this.name = name; this.index = index; this.enable = enable; this.resIcon = resIcon; this.menuId = menuId; } public int getMenuId() { return menuId; } public void setMenuId(int menuId) { this.menuId = menuId; } public int getResIcon() { return resIcon; } public void setResIcon(int resIcon) { this.resIcon = resIcon; } public String getName() { return name; } public void setName(String name) { this.name = name; } public int getIndex() { return index; } public void setIndex(int index) { this.index = index; } public boolean isEnable() { return enable; } public void setEnable(boolean enable) { this.enable = enable; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Module module = (Module) o; if (index != module.index) return false; if (enable != module.enable) return false; if (menuId != module.menuId) return false; if (resIcon != module.resIcon) return false; return name != null ? name.equals(module.name) : module.name == null; } @Override public int hashCode() { int result = name != null ? name.hashCode() : 0; result = 31 * result + index; result = 31 * result + (enable ? 1 : 0); result = 31 * result + menuId; result = 31 * result + resIcon; return result; } public Object clone() throws CloneNotSupportedException { return super.clone(); } }
package bataille; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; public class Carte { private int valeur; private String valeurString; private String enseigne; private String enseigneString; private String pathImgFile; private Carte() { } public Carte(int valeur, String valeurString, String enseigne, String enseigneString, String pathImgFile) { this.valeur = valeur; this.valeurString = valeurString; this.enseigne = enseigne; this.enseigneString = enseigneString; this.pathImgFile = pathImgFile; } static Carte initialise(byte[] data) { Carte c = new Carte(); ByteArrayInputStream bis = new ByteArrayInputStream(data); DataInputStream dis = new DataInputStream(bis); try { c.setValeur(dis.readInt()); c.setValeurString(dis.readUTF()); c.setEnseigne(dis.readUTF()); c.setEnseigneString(dis.readUTF()); c.setPathImgFile(dis.readUTF()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return c; } byte[] serialise() { byte[] data = null; ByteArrayOutputStream bos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(bos); try { dos.writeInt(this.getValeur()); dos.writeUTF(this.getValeurString()); dos.writeUTF(this.getEnseigne()); dos.writeUTF(this.getEnseigneString()); dos.writeUTF(this.getPathImgFile()); data = bos.toByteArray(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return data; } /** * @return the valeur */ public int getValeur() { return valeur; } /** * @param valeur * the valeur to set */ public void setValeur(int valeur) { this.valeur = valeur; } public String getValeurString() { return valeurString; } public void setValeurString(String valeurString) { this.valeurString = valeurString; } /** * @return the enseigne */ public String getEnseigne() { return enseigne; } /** * @param enseigne * the enseigne to set */ public void setEnseigne(String enseigne) { this.enseigne = enseigne; } public String getEnseigneString() { return enseigneString; } public void setEnseigneString(String enseigneString) { this.enseigneString = enseigneString; } /** * @return the pathImgFile */ public String getPathImgFile() { return pathImgFile; } /** * @param pathImgFile * the pathImgFile to set */ public void setPathImgFile(String pathImgFile) { this.pathImgFile = pathImgFile; } /** * Override toString() * * @see java.lang.Object#toString() */ public String toString() { return "Carte " + this.valeurString + " de " + this.enseigne; } }
package au.gov.qld.fire.jms.web.module.file; import java.util.ArrayList; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.Predicate; import org.apache.commons.lang.math.NumberUtils; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import au.gov.qld.fire.domain.refdata.SupplierTypeEnum; import au.gov.qld.fire.domain.security.User; import au.gov.qld.fire.domain.supplier.Supplier; import au.gov.qld.fire.jms.domain.ase.AseChange; import au.gov.qld.fire.jms.domain.ase.AseChangeSearchCriteria; import au.gov.qld.fire.jms.domain.ase.AseChangeSupplier; import au.gov.qld.fire.jms.domain.ase.AseFile; import au.gov.qld.fire.jms.domain.file.File; import au.gov.qld.fire.jms.web.module.AbstractDispatchAction; import au.gov.qld.fire.util.DateUtils; import au.gov.qld.fire.util.ThreadLocalUtils; import au.gov.qld.fire.web.SessionConstants; import au.gov.qld.fire.web.WebUtils; /** * @author Valeri SHIBAEV (mailto:shibaevv@apollosoft.net) */ public class AseChangeAction extends AbstractDispatchAction { /* (non-Javadoc) * @see au.gov.qld.fire.jms.web.module.AbstractDispatchAction#populateRequest(org.apache.struts.action.ActionForm, javax.servlet.http.HttpServletRequest) */ @Override protected void populateRequest(ActionForm form, HttpServletRequest request) throws Exception { //set references request.setAttribute(SessionConstants.ASE_CONN_TYPES, getEntityService().findAseConnTypesActive()); request.setAttribute(SessionConstants.ASE_INSTALLATION_SUPPLIERS, getSupplierService() .findSuppliers(SupplierTypeEnum.ASE_INSTALLATION)); request.setAttribute(SessionConstants.TELCO_SUPPLIERS, getSupplierService().findSuppliers( SupplierTypeEnum.TELCO)); } /* (non-Javadoc) * @see au.gov.qld.fire.jms.web.module.AbstractDispatchAction#populateForm(org.apache.struts.action.ActionMapping, org.apache.struts.action.ActionForm, javax.servlet.http.HttpServletRequest) */ @Override protected void populateForm(ActionMapping mapping, ActionForm form, HttpServletRequest request) throws Exception { FileForm myform = (FileForm) form; setEntity(myform, request); File entity = myform.getEntity(); //aseChangeSuppliers List<AseChangeSupplier> aseChangeSuppliers = entity.getAseFile().getAseChange() .getAseChangeSuppliers(); myform.getAseInstallationAseChangeSuppliers().clear(); myform.getTelcoAseChangeSuppliers().clear(); for (AseChangeSupplier aseChangeSupplier : aseChangeSuppliers) { Supplier supplier = aseChangeSupplier.getSupplier(); if (supplier.isAseInstallation()) { myform.getAseInstallationAseChangeSuppliers().add(aseChangeSupplier); } else //if (supplier.isTelco()) { myform.getTelcoAseChangeSuppliers().add(aseChangeSupplier); } } } /** * Search by FCA, FileNo or Building Name (%) * Exclude completed ASE Changes * Show only ASE Installation or Carrier Installations or Both that are incomplete * Show Only FCA/Files/Buildings that no ASE change job times have been created * Limit Search by Supplier Name * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward find(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { LOG.debug("INSIDE find().. "); try { //Search by FCA, FileNo or Building Name (%) String fileId = request.getParameter(SessionConstants.FILE_ID); String fcaId = request.getParameter(SessionConstants.FCA_ID); String buildingId = request.getParameter(SessionConstants.BUILDING_ID); String buildingName = request.getParameter(SessionConstants.BUILDING_NAME); //Limit Search by Supplier Name User user = getUserService().findUserById(ThreadLocalUtils.getUser().getId()); Supplier supplier = user.getSupplier(); String supplierName = supplier == null ? null : supplier.getName(); AseChangeSearchCriteria criteria = new AseChangeSearchCriteria(); criteria.setFileNo(fileId); criteria.setFcaNo(fcaId); criteria.setBuildingId(NumberUtils.toLong(buildingId, 0L)); criteria.setBuildingName(buildingName); criteria.setSupplierName(supplierName); request.setAttribute(SessionConstants.ENTITIES, getFileService().findAseChangeFiles(criteria)); return mapping.getInputForward(); } catch (Exception e) { saveErrors(request, response, toActionErrors(e)); return mapping.getInputForward(); } } /** * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward addAseChangeSupplier(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { LOG.debug("INSIDE addAseChangeSupplier().."); try { //set form data FileForm myform = (FileForm) form; AseFile aseFile = myform.getEntity().getAseFile(); AseChange aseChange = aseFile.getAseChange(); // AseChangeSupplier aseChangeSupplier = new AseChangeSupplier(); //set parent aseChangeSupplier.setAseChange(aseChange); Supplier supplier = aseChangeSupplier.getSupplier(); if (Boolean.parseBoolean(request.getParameter("aseInstallation"))) { supplier.getSupplierType().setSupplierTypeId( SupplierTypeEnum.ASE_INSTALLATION.getId()); myform.getAseInstallationAseChangeSuppliers().add(aseChangeSupplier); } else //if (Boolean.parseBoolean(request.getParameter("telco"))) { supplier.getSupplierType().setSupplierTypeId(SupplierTypeEnum.TELCO.getId()); myform.getTelcoAseChangeSuppliers().add(aseChangeSupplier); } // populateRequest(form, request); return mapping.getInputForward(); } catch (Exception e) { saveErrors(request, response, toActionErrors(e)); populateRequest(form, request); return mapping.getInputForward(); } } /** * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward removeAseChangeSupplier(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { LOG.debug("INSIDE removeAseChangeSupplier().."); try { //get index to remove int index = WebUtils.getIndex(request); //set form data FileForm myform = (FileForm) form; //get aseInstallation/telco aseChangeSuppliers List<AseChangeSupplier> aseChangeSuppliers = null; if (Boolean.parseBoolean(request.getParameter("aseInstallation"))) { aseChangeSuppliers = myform.getAseInstallationAseChangeSuppliers(); } else //if (Boolean.parseBoolean(request.getParameter("telco"))) { aseChangeSuppliers = myform.getTelcoAseChangeSuppliers(); } AseChangeSupplier aseChangeSupplier = aseChangeSuppliers.get(index); if (aseChangeSupplier.getId() == null) { //was never saved aseChangeSuppliers.remove(aseChangeSupplier); } else { aseChangeSupplier.setLogicallyDeleted(Boolean.TRUE); } // populateRequest(form, request); return mapping.getInputForward(); } catch (Exception e) { saveErrors(request, response, toActionErrors(e)); populateRequest(form, request); return mapping.getInputForward(); } } /** * * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward completeAseChangeSupplier(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { LOG.debug("INSIDE completeAseChangeSupplier().."); try { Long aseChangeSupplierId = WebUtils.getLongId(request); if (aseChangeSupplierId != null) { //set form data FileForm myform = (FileForm) form; // List<AseChangeSupplier> aseChangeSuppliers; if (Boolean.parseBoolean(request.getParameter("aseInstallation"))) { aseChangeSuppliers = myform.getAseInstallationAseChangeSuppliers(); } else //if (Boolean.parseBoolean(request.getParameter("telco"))) { aseChangeSuppliers = myform.getTelcoAseChangeSuppliers(); } for (AseChangeSupplier aseChangeSupplier : aseChangeSuppliers) { if (aseChangeSupplierId.equals(aseChangeSupplier.getId())) { aseChangeSupplier.setDateCompleted(DateUtils.getCurrentDateTime()); break; } } } // populateRequest(form, request); return mapping.getInputForward(); } catch (Exception e) { saveErrors(request, response, toActionErrors(e)); populateRequest(form, request); return mapping.getInputForward(); } } /** * Present edit entity form. * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward view(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { LOG.debug("INSIDE view().."); try { populateForm(mapping, form, request); return mapping.getInputForward(); } catch (Exception e) { saveErrors(request, response, toActionErrors(e)); return mapping.getInputForward(); } } /** * Present edit entity form. * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward edit(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { LOG.debug("INSIDE edit().."); try { populateForm(mapping, form, request); populateRequest(form, request); return mapping.getInputForward(); } catch (Exception e) { saveErrors(request, response, toActionErrors(e)); populateRequest(form, request); return mapping.getInputForward(); } } /** * Update/Insert new user. * @param mapping * @param form * @param request * @param response * @return * @throws Exception */ public ActionForward save(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { LOG.debug("INSIDE save().."); try { ActionErrors errors = form.validate(mapping, request); if (!errors.isEmpty()) { saveErrors(request, response, errors); populateRequest(form, request); return findForwardError(mapping); } FileForm myform = (FileForm) form; AseFile aseFile = myform.getEntity().getAseFile(); AseChange entity = aseFile.getAseChange(); //update date change String dateString = myform.getAseDateChange(); String timeString = myform.getAseTimeChange(); entity.setDateChange(DateUtils.parse(dateString, timeString, DateUtils.D_M_YYYY_H_mm)); //remove AseInstallation and Telco List<AseChangeSupplier> aseChangeSuppliers = new ArrayList<AseChangeSupplier>(); CollectionUtils.select(entity.getAseChangeSuppliers(), new Predicate() { /* (non-Javadoc) * @see org.apache.commons.collections.Predicate#evaluate(java.lang.Object) */ public boolean evaluate(Object obj) { AseChangeSupplier aseChangeSupplier = (AseChangeSupplier) obj; Supplier supplier = aseChangeSupplier.getSupplier(); return !supplier.isAseInstallation() && !supplier.isTelco(); } }, aseChangeSuppliers); //add modified items aseChangeSuppliers.addAll(myform.getAseInstallationAseChangeSuppliers()); aseChangeSuppliers.addAll(myform.getTelcoAseChangeSuppliers()); entity.setAseChangeSuppliers(aseChangeSuppliers); //save changes (if any) getFileService().saveAseChange(entity); return findForwardSuccess(mapping); } catch (Exception e) { saveErrors(request, response, toActionErrors(e)); populateRequest(form, request); return findForwardError(mapping); } } }
package com.example.administrator.coolweather.model; /** * Created by Administrator on 2016-08-25. */ public class County { private int id; private String countyName; private String countyCode; private int cityId; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getCountyName() { return countyName; } public void setCountyName(String countyName) { this.countyName = countyName; } public String getCountyCode() { return countyCode; } public void setCountyCode(String countyCode) { this.countyCode = countyCode; } public int getCityId() { return cityId; } public void setCityId(int cityId) { this.cityId = cityId; } }
// Copyright 2021 The Terasology Foundation // SPDX-License-Identifier: Apache-2.0 package org.terasology.engine.rendering.nui.layers.mainMenu; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terasology.gestalt.assets.ResourceUrn; import org.terasology.engine.config.Config; import org.terasology.engine.i18n.TranslationSystem; import org.terasology.engine.rendering.nui.animation.MenuAnimationSystems; import org.terasology.nui.WidgetUtil; import org.terasology.nui.widgets.UIScrollingText; import org.terasology.engine.registry.In; import org.terasology.engine.rendering.nui.CoreScreenLayer; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; public class CreditsScreen extends CoreScreenLayer { public static final ResourceUrn ASSET_URI = new ResourceUrn("engine:creditsScreen"); @In private Config config; @In private TranslationSystem translationSystem; private UIScrollingText creditsScroll; @Override @SuppressWarnings("unchecked") public void initialise() { setAnimationSystem(MenuAnimationSystems.createDefaultSwipeAnimation()); WidgetUtil.trySubscribe(this, "back", button -> triggerBackAnimation()); creditsScroll = find("creditsScroll", UIScrollingText.class); if (creditsScroll != null) { StringBuilder credits = new StringBuilder(); ClassLoader classloader = getClass().getClassLoader(); InputStream is = classloader.getResourceAsStream("Credits.md"); if (is == null) { credits.append(translationSystem.translate("${engine:menu#error-credits-not-found}")); } else { try (BufferedReader br = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) { String line; while ((line = br.readLine()) != null) { line = line.replaceAll("\\[([^]]*)]\\(([^)]+)\\)", "$1").trim(); if (line.startsWith("* ")) { if (line.endsWith(":")) { credits.append(System.lineSeparator()); credits.append(line, 2, line.length() - 1); credits.append(System.lineSeparator()); credits.append(System.lineSeparator()); } else { credits.append(line, 2, line.length()); credits.append(System.lineSeparator()); } } else { credits.append(line); credits.append(System.lineSeparator()); } } } catch (IOException e) { Logger logger = LoggerFactory.getLogger(CreditsScreen.class); logger.info("Could not open Credits file"); credits = new StringBuilder(translationSystem.translate("${engine:menu#error-credits-open")); } } creditsScroll.setText(credits.toString()); creditsScroll.setAutoReset(false); creditsScroll.setScrollingSpeed(1); creditsScroll.startScrolling(); } } @Override public void onOpened() { super.onOpened(); if (creditsScroll != null) { creditsScroll.resetScrolling(); } } @Override public boolean isLowerLayerVisible() { return false; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.azkfw.biz.graphics.chart.entity; import java.util.List; /** * このクラスは、鶏頭図情報を格納したクラスです。 * * @since 1.1.0 * @version 1.1.0 2014/06/13 * @author Kawakicchi */ public final class PolarAreaChart extends AbstractChart { // data private List<PolarAreaChartData> datas; private PolarAreaChartAxis axis; private PolarAreaChartSubAxis subAxis; public void setDatas(final List<PolarAreaChartData> aDatas) { datas = aDatas; } public List<PolarAreaChartData> getDatas() { return datas; } public void setAxis(final PolarAreaChartAxis aAxis) { axis = aAxis; } public PolarAreaChartAxis getAxis() { return axis; } public void setSubAxis(final PolarAreaChartSubAxis aAxis) { subAxis = aAxis; } public PolarAreaChartSubAxis getSubAxis() { return subAxis; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.connectors.kinesis.util; import org.apache.flink.streaming.connectors.kinesis.config.AWSConfigConstants; import org.apache.flink.streaming.connectors.kinesis.config.ConsumerConfigConstants; import org.apache.flink.streaming.connectors.kinesis.config.ProducerConfigConstants; import com.amazonaws.services.kinesis.producer.KinesisProducerConfiguration; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import java.util.Properties; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; /** * Tests for KinesisConfigUtil. */ @RunWith(PowerMockRunner.class) @PrepareForTest(KinesisConfigUtil.class) public class KinesisConfigUtilTest { @Rule private ExpectedException exception = ExpectedException.none(); // ---------------------------------------------------------------------- // getValidatedProducerConfiguration() tests // ---------------------------------------------------------------------- @Test public void testUnparsableLongForProducerConfiguration() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Error trying to set field RateLimit with the value 'unparsableLong'"); Properties testConfig = new Properties(); testConfig.setProperty(AWSConfigConstants.AWS_REGION, "us-east-1"); testConfig.setProperty("RateLimit", "unparsableLong"); KinesisConfigUtil.getValidatedProducerConfiguration(testConfig); } @Test public void testRateLimitInProducerConfiguration() { Properties testConfig = new Properties(); testConfig.setProperty(AWSConfigConstants.AWS_REGION, "us-east-1"); KinesisProducerConfiguration kpc = KinesisConfigUtil.getValidatedProducerConfiguration(testConfig); assertEquals(100, kpc.getRateLimit()); testConfig.setProperty(KinesisConfigUtil.RATE_LIMIT, "150"); kpc = KinesisConfigUtil.getValidatedProducerConfiguration(testConfig); assertEquals(150, kpc.getRateLimit()); } @Test public void testThreadingModelInProducerConfiguration() { Properties testConfig = new Properties(); testConfig.setProperty(AWSConfigConstants.AWS_REGION, "us-east-1"); KinesisProducerConfiguration kpc = KinesisConfigUtil.getValidatedProducerConfiguration(testConfig); assertEquals(KinesisProducerConfiguration.ThreadingModel.POOLED, kpc.getThreadingModel()); testConfig.setProperty(KinesisConfigUtil.THREADING_MODEL, "PER_REQUEST"); kpc = KinesisConfigUtil.getValidatedProducerConfiguration(testConfig); assertEquals(KinesisProducerConfiguration.ThreadingModel.PER_REQUEST, kpc.getThreadingModel()); } @Test public void testThreadPoolSizeInProducerConfiguration() { Properties testConfig = new Properties(); testConfig.setProperty(AWSConfigConstants.AWS_REGION, "us-east-1"); KinesisProducerConfiguration kpc = KinesisConfigUtil.getValidatedProducerConfiguration(testConfig); assertEquals(10, kpc.getThreadPoolSize()); testConfig.setProperty(KinesisConfigUtil.THREAD_POOL_SIZE, "12"); kpc = KinesisConfigUtil.getValidatedProducerConfiguration(testConfig); assertEquals(12, kpc.getThreadPoolSize()); } @Test public void testReplaceDeprecatedKeys() { Properties testConfig = new Properties(); testConfig.setProperty(AWSConfigConstants.AWS_REGION, "us-east-1"); // these deprecated keys should be replaced testConfig.setProperty(ProducerConfigConstants.AGGREGATION_MAX_COUNT, "1"); testConfig.setProperty(ProducerConfigConstants.COLLECTION_MAX_COUNT, "2"); Properties replacedConfig = KinesisConfigUtil.replaceDeprecatedProducerKeys(testConfig); assertEquals("1", replacedConfig.getProperty(KinesisConfigUtil.AGGREGATION_MAX_COUNT)); assertEquals("2", replacedConfig.getProperty(KinesisConfigUtil.COLLECTION_MAX_COUNT)); } // ---------------------------------------------------------------------- // validateAwsConfiguration() tests // ---------------------------------------------------------------------- @Test public void testMissingAwsRegionInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("The AWS region ('" + AWSConfigConstants.AWS_REGION + "') must be set in the config."); Properties testConfig = new Properties(); testConfig.setProperty(AWSConfigConstants.AWS_ACCESS_KEY_ID, "accessKey"); testConfig.setProperty(AWSConfigConstants.AWS_SECRET_ACCESS_KEY, "secretKey"); KinesisConfigUtil.validateAwsConfiguration(testConfig); } @Test public void testUnrecognizableAwsRegionInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid AWS region"); Properties testConfig = new Properties(); testConfig.setProperty(AWSConfigConstants.AWS_REGION, "wrongRegionId"); testConfig.setProperty(AWSConfigConstants.AWS_ACCESS_KEY_ID, "accessKeyId"); testConfig.setProperty(AWSConfigConstants.AWS_SECRET_ACCESS_KEY, "secretKey"); KinesisConfigUtil.validateAwsConfiguration(testConfig); } @Test public void testCredentialProviderTypeSetToBasicButNoCredentialSetInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Please set values for AWS Access Key ID ('" + AWSConfigConstants.AWS_ACCESS_KEY_ID + "') " + "and Secret Key ('" + AWSConfigConstants.AWS_SECRET_ACCESS_KEY + "') when using the BASIC AWS credential provider type."); Properties testConfig = new Properties(); testConfig.setProperty(AWSConfigConstants.AWS_REGION, "us-east-1"); testConfig.setProperty(AWSConfigConstants.AWS_CREDENTIALS_PROVIDER, "BASIC"); KinesisConfigUtil.validateAwsConfiguration(testConfig); } @Test public void testUnrecognizableCredentialProviderTypeInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid AWS Credential Provider Type"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(AWSConfigConstants.AWS_CREDENTIALS_PROVIDER, "wrongProviderType"); KinesisConfigUtil.validateAwsConfiguration(testConfig); } // ---------------------------------------------------------------------- // validateConsumerConfiguration() tests // ---------------------------------------------------------------------- @Test public void testUnrecognizableStreamInitPositionTypeInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid initial position in stream"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.AWS_CREDENTIALS_PROVIDER, "BASIC"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION, "wrongInitPosition"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testStreamInitPositionTypeSetToAtTimestampButNoInitTimestampSetInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Please set value for initial timestamp ('" + ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP + "') when using AT_TIMESTAMP initial position."); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.AWS_CREDENTIALS_PROVIDER, "BASIC"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION, "AT_TIMESTAMP"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableDateForInitialTimestampInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for initial timestamp for AT_TIMESTAMP initial position in stream."); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.AWS_CREDENTIALS_PROVIDER, "BASIC"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION, "AT_TIMESTAMP"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP, "unparsableDate"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testIllegalValueForInitialTimestampInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for initial timestamp for AT_TIMESTAMP initial position in stream."); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.AWS_CREDENTIALS_PROVIDER, "BASIC"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION, "AT_TIMESTAMP"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP, "-1.0"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testDateStringForValidateOptionDateProperty() { String timestamp = "2016-04-04T19:58:46.480-00:00"; Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.AWS_CREDENTIALS_PROVIDER, "BASIC"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION, "AT_TIMESTAMP"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP, timestamp); try { KinesisConfigUtil.validateConsumerConfiguration(testConfig); } catch (Exception e) { e.printStackTrace(); fail(); } } @Test public void testUnixTimestampForValidateOptionDateProperty() { String unixTimestamp = "1459799926.480"; Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.AWS_CREDENTIALS_PROVIDER, "BASIC"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION, "AT_TIMESTAMP"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP, unixTimestamp); try { KinesisConfigUtil.validateConsumerConfiguration(testConfig); } catch (Exception e) { e.printStackTrace(); fail(); } } @Test public void testInvalidPatternForInitialTimestampInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for initial timestamp for AT_TIMESTAMP initial position in stream."); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.AWS_CREDENTIALS_PROVIDER, "BASIC"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION, "AT_TIMESTAMP"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP, "2016-03-14"); testConfig.setProperty(ConsumerConfigConstants.STREAM_TIMESTAMP_DATE_FORMAT, "InvalidPattern"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableDateForUserDefinedDateFormatForInitialTimestampInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for initial timestamp for AT_TIMESTAMP initial position in stream."); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.AWS_CREDENTIALS_PROVIDER, "BASIC"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION, "AT_TIMESTAMP"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP, "stillUnparsable"); testConfig.setProperty(ConsumerConfigConstants.STREAM_TIMESTAMP_DATE_FORMAT, "yyyy-MM-dd"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testDateStringForUserDefinedDateFormatForValidateOptionDateProperty() { String unixTimestamp = "2016-04-04"; String pattern = "yyyy-MM-dd"; Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.AWS_CREDENTIALS_PROVIDER, "BASIC"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION, "AT_TIMESTAMP"); testConfig.setProperty(ConsumerConfigConstants.STREAM_INITIAL_TIMESTAMP, unixTimestamp); testConfig.setProperty(ConsumerConfigConstants.STREAM_TIMESTAMP_DATE_FORMAT, pattern); try { KinesisConfigUtil.validateConsumerConfiguration(testConfig); } catch (Exception e) { e.printStackTrace(); fail(); } } @Test public void testUnparsableLongForDescribeStreamBackoffBaseMillisInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for describe stream operation base backoff milliseconds"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_BASE, "unparsableLong"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableLongForDescribeStreamBackoffMaxMillisInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for describe stream operation max backoff milliseconds"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_MAX, "unparsableLong"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableDoubleForDescribeStreamBackoffExponentialConstantInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for describe stream operation backoff exponential constant"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.STREAM_DESCRIBE_BACKOFF_EXPONENTIAL_CONSTANT, "unparsableDouble"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableIntForGetRecordsRetriesInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for maximum retry attempts for getRecords shard operation"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.SHARD_GETRECORDS_RETRIES, "unparsableInt"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableIntForGetRecordsMaxCountInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for maximum records per getRecords shard operation"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.SHARD_GETRECORDS_MAX, "unparsableInt"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableLongForGetRecordsBackoffBaseMillisInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for get records operation base backoff milliseconds"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.SHARD_GETRECORDS_BACKOFF_BASE, "unparsableLong"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableLongForGetRecordsBackoffMaxMillisInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for get records operation max backoff milliseconds"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.SHARD_GETRECORDS_BACKOFF_MAX, "unparsableLong"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableDoubleForGetRecordsBackoffExponentialConstantInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for get records operation backoff exponential constant"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.SHARD_GETRECORDS_BACKOFF_EXPONENTIAL_CONSTANT, "unparsableDouble"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableLongForGetRecordsIntervalMillisInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for getRecords sleep interval in milliseconds"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.SHARD_GETRECORDS_INTERVAL_MILLIS, "unparsableLong"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableIntForGetShardIteratorRetriesInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for maximum retry attempts for getShardIterator shard operation"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.SHARD_GETITERATOR_RETRIES, "unparsableInt"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableLongForGetShardIteratorBackoffBaseMillisInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for get shard iterator operation base backoff milliseconds"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.SHARD_GETITERATOR_BACKOFF_BASE, "unparsableLong"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableLongForGetShardIteratorBackoffMaxMillisInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for get shard iterator operation max backoff milliseconds"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.SHARD_GETITERATOR_BACKOFF_MAX, "unparsableLong"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableDoubleForGetShardIteratorBackoffExponentialConstantInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for get shard iterator operation backoff exponential constant"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.SHARD_GETITERATOR_BACKOFF_EXPONENTIAL_CONSTANT, "unparsableDouble"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } @Test public void testUnparsableLongForShardDiscoveryIntervalMillisInConfig() { exception.expect(IllegalArgumentException.class); exception.expectMessage("Invalid value given for shard discovery sleep interval in milliseconds"); Properties testConfig = getPropertiesWithRequiredFields(); testConfig.setProperty(ConsumerConfigConstants.SHARD_DISCOVERY_INTERVAL_MILLIS, "unparsableLong"); KinesisConfigUtil.validateConsumerConfiguration(testConfig); } private Properties getPropertiesWithRequiredFields() { Properties config = new Properties(); config.setProperty(ConsumerConfigConstants.AWS_REGION, "us-east-1"); config.setProperty(ConsumerConfigConstants.AWS_ACCESS_KEY_ID, "accessKeyId"); config.setProperty(ConsumerConfigConstants.AWS_SECRET_ACCESS_KEY, "secretKey"); return config; } }
/** * Copyright 2018 人人开源 http://www.renren.io * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.ly.lyadmin.shiro; import org.apache.shiro.SecurityUtils; import org.apache.shiro.subject.Subject; import org.springframework.stereotype.Component; /** * @Description: Shiro权限标签 * * @Author: SLIGHTLEE * @Date: 2019/10/15 11:34 下午 * */ @Component public class ShiroTag { /** * 是否拥有该权限 * @param permission 权限标识 * @return true:是 false:否 */ public boolean hasPermission(String permission) { Subject subject = SecurityUtils.getSubject(); return subject != null && subject.isPermitted(permission); } }
/** * */ /** * @author Lonnie * */ package net.lharbour.inlistext.exceptions;
package com.eltechs.axs.xserver.impl.drawables; import android.util.Log; import java.util.Collection; import java.util.Collections; public abstract class DrawablesFactoryImplBase implements DrawablesFactory { private final Visual preferredVisual; private final Collection<ImageFormat> supportedImageFormats; private final Collection<Visual> supportedVisuals; public DrawablesFactoryImplBase(Collection<Visual> collection, Collection<ImageFormat> collection2, Visual visual) { this.preferredVisual = visual; this.supportedVisuals = collection; this.supportedImageFormats = collection2; } public final Collection<Visual> getSupportedVisuals() { return Collections.unmodifiableCollection(this.supportedVisuals); } public final Collection<ImageFormat> getSupportedImageFormats() { return Collections.unmodifiableCollection(this.supportedImageFormats); } public final Visual getVisual(int i) { if (i == this.preferredVisual.getId()) { return this.preferredVisual; } for (Visual visual : this.supportedVisuals) { if (i == visual.getId() && visual.isDisplayable()) { return visual; } } return null; } public final Visual getPreferredVisual() { return this.preferredVisual; } @Override public final Visual getPreferredVisualForDepth(int depth) { Log.d("Exagear", "Checking " + depth + " == " + this.preferredVisual.getDepth()); if (depth == this.preferredVisual.getDepth()) { return this.preferredVisual; } for (Visual visual : this.supportedVisuals) { Log.d("Exagear", "Checking " + depth + " == " + visual.getDepth()); if (depth == visual.getDepth()) { return visual; } } return null; } }
/* * Copyright 2012 Achim Nierbeck. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package org.opennms.vaadin.extender; import org.osgi.framework.Bundle; public interface VaadinResourceService { void addResources(Bundle bundle); void removeResources(Bundle bundle); }
/* * Copyright 2009, Andrej Viepraŭ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rascal; import java.io.InputStream; public interface ConfigurationInputStreamAware { void setConfigurationInputStream(InputStream stream); }
/* file: InputDataCollection.java */ /******************************************************************************* * Copyright 2014-2018 Intel Corporation * All Rights Reserved. * * If this software was obtained under the Intel Simplified Software License, * the following terms apply: * * The source code, information and material ("Material") contained herein is * owned by Intel Corporation or its suppliers or licensors, and title to such * Material remains with Intel Corporation or its suppliers or licensors. The * Material contains proprietary information of Intel or its suppliers and * licensors. The Material is protected by worldwide copyright laws and treaty * provisions. No part of the Material may be used, copied, reproduced, * modified, published, uploaded, posted, transmitted, distributed or disclosed * in any way without Intel's prior express written permission. No license under * any patent, copyright or other intellectual property rights in the Material * is granted to or conferred upon you, either expressly, by implication, * inducement, estoppel or otherwise. Any license under such intellectual * property rights must be express and approved by Intel in writing. * * Unless otherwise agreed by Intel in writing, you may not remove or alter this * notice or any other notice embedded in Materials by Intel or Intel's * suppliers or licensors in any way. * * * If this software was obtained under the Apache License, Version 2.0 (the * "License"), the following terms apply: * * You may not use this file except in compliance with the License. You may * obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ /** * @defgroup svm_quality_metric_set Quality Metrics * @brief Contains classes to check the quality of the model trained with the SVM algorithm * @ingroup svm * @{ */ package com.intel.daal.algorithms.svm.quality_metric_set; import com.intel.daal.algorithms.ComputeMode; import com.intel.daal.algorithms.classifier.quality_metric.binary_confusion_matrix.BinaryConfusionMatrixInput; import com.intel.daal.services.DaalContext; /** * <a name="DAAL-CLASS-ALGORITHMS__SVM__QUALITY_METRIC_SET__INPUTDATACOLLECTION"></a> * @brief Class that implements functionality of the collection of input objects for the quality metrics algorithm */ public class InputDataCollection extends com.intel.daal.algorithms.quality_metric_set.InputDataCollection { /** @private */ static { System.loadLibrary("JavaAPI"); } public InputDataCollection(DaalContext context, long cAlgorithm, ComputeMode cmode) { super(context, cAlgorithm, cmode); } /** * Returns the element that matches the identifier * @param id Identifier of the quality metric * @return Input object */ public BinaryConfusionMatrixInput getInput(QualityMetricId id) { if (id != QualityMetricId.confusionMatrix) { throw new IllegalArgumentException("id unsupported"); } return new BinaryConfusionMatrixInput(getContext(), cGetInput(getCObject(), id.getValue())); } } /** @} */
package com.example.szegr.fragments1; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; /** * A simple {@link Fragment} subclass. */ public class MyFragmentB extends Fragment { public static MyFragmentB newInstance() { return new MyFragmentB(); } public MyFragmentB() { // Required empty public constructor } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment return inflater.inflate(R.layout.layout_fragment_b, container, false); } }
package pl.grzeslowski.jsupla.protocol.api.structs.sc; import pl.grzeslowski.jsupla.protocol.api.structs.StructTest; public class SuplaRegisterClientResultTest extends StructTest<SuplaRegisterClientResult> { @Override public Class<SuplaRegisterClientResult> getTestClass() { return SuplaRegisterClientResult.class; } }
//========================================= // KYLE RUSSELL // AUT UNIVERSITY 2016 // https://github.com/denkers/collector-app //========================================= package com.kyleruss.collector.ejb.entity; import java.io.Serializable; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.Table; @Entity @Table(name = "deck_cards") public class DeckCards implements Serializable { @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Basic(optional = false) @Column(name = "id") private Integer id; @Column(name = "deck_pos") private Integer deckPos; @JoinColumn(name = "card_id", referencedColumnName = "id") @ManyToOne(optional = false, fetch = FetchType.LAZY) private Cards cards; @JoinColumn(name = "deck_id", referencedColumnName = "id") @ManyToOne(optional = false, fetch = FetchType.LAZY) private Decks decks; public DeckCards() {} public DeckCards(Integer id) { this.id = id; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public Integer getDeckPos() { return deckPos; } public void setDeckPos(Integer deckPos) { this.deckPos = deckPos; } public Cards getCards() { return cards; } public void setCards(Cards cards) { this.cards = cards; } public Decks getDecks() { return decks; } public void setDecks(Decks decks) { this.decks = decks; } @Override public int hashCode() { int hash = 0; hash += (id != null ? id.hashCode() : 0); return hash; } @Override public boolean equals(Object object) { if (!(object instanceof DeckCards)) return false; DeckCards other = (DeckCards) object; return this.id.equals(other.id); } @Override public String toString() { return "com.kyleruss.collector.ejb.entity.DeckCards[ id=" + id + " ]"; } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.master.meta; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import alluxio.ConfigurationRule; import alluxio.conf.PropertyKey; import alluxio.conf.Configuration; import alluxio.grpc.BackupPStatus; import alluxio.grpc.BackupState; import alluxio.master.BackupManager; import alluxio.resource.CloseableResource; import alluxio.underfs.UfsFileStatus; import alluxio.underfs.UfsManager; import alluxio.underfs.UfsStatus; import alluxio.underfs.UnderFileSystem; import alluxio.util.CommonUtils; import alluxio.util.executor.ControllableScheduler; import alluxio.util.io.PathUtils; import alluxio.wire.BackupStatus; import com.google.common.collect.ImmutableMap; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import java.io.Closeable; import java.time.Instant; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.Random; import java.util.UUID; import java.util.concurrent.TimeUnit; /** * Tests the {@link DailyMetadataBackup}. */ public class DailyMetadataBackupTest { private MetaMaster mMetaMaster; private ControllableScheduler mScheduler; private UnderFileSystem mUfs; private UfsManager mUfsManager; private UfsManager.UfsClient mUfsClient; private Random mRandom; private String mBackupDir; @Before public void before() throws Exception { mRandom = new Random(); mBackupDir = "/tmp/test/alluxio_backups"; mMetaMaster = Mockito.mock(MetaMaster.class); when(mMetaMaster.backup(any(), any())) .thenReturn(BackupStatus.fromProto(BackupPStatus.newBuilder() .setBackupId(UUID.randomUUID().toString()).setBackupState(BackupState.Completed) .setBackupUri(PathUtils.concatPath(mBackupDir, generateBackupFileName())) .setBackupHost("localhost").build())); mUfs = Mockito.mock(UnderFileSystem.class); when(mUfs.getUnderFSType()).thenReturn("local"); when(mUfs.deleteFile(any())).thenReturn(true); mUfsClient = Mockito.mock(UfsManager.UfsClient.class); when(mUfsClient.acquireUfsResource()).thenReturn(new CloseableResource<UnderFileSystem>(mUfs) { @Override public void closeResource() { // Noop } }); mUfsManager = Mockito.mock(UfsManager.class); when(mUfsManager.getRoot()).thenReturn(mUfsClient); mScheduler = new ControllableScheduler(); } @Test public void test() throws Exception { int fileToRetain = 1; try (Closeable c = new ConfigurationRule(ImmutableMap.of( PropertyKey.MASTER_BACKUP_DIRECTORY, mBackupDir, PropertyKey.MASTER_DAILY_BACKUP_ENABLED, true, PropertyKey.MASTER_DAILY_BACKUP_FILES_RETAINED, fileToRetain), Configuration.modifiableGlobal()).toResource()) { DailyMetadataBackup dailyBackup = new DailyMetadataBackup(mMetaMaster, mScheduler, mUfsManager); dailyBackup.start(); int backUpFileNum = 0; when(mUfs.listStatus(mBackupDir)).thenReturn(generateUfsStatuses(++backUpFileNum)); mScheduler.jumpAndExecute(1, TimeUnit.DAYS); verify(mMetaMaster, times(backUpFileNum)).backup(any(), any()); int deleteFileNum = getNumOfDeleteFile(backUpFileNum, fileToRetain); verify(mUfs, times(deleteFileNum)).deleteFile(any()); when(mUfs.listStatus(mBackupDir)).thenReturn(generateUfsStatuses(++backUpFileNum)); mScheduler.jumpAndExecute(1, TimeUnit.DAYS); verify(mMetaMaster, times(backUpFileNum)).backup(any(), any()); deleteFileNum += getNumOfDeleteFile(backUpFileNum, fileToRetain); verify(mUfs, times(deleteFileNum)).deleteExistingFile(any()); when(mUfs.listStatus(mBackupDir)).thenReturn(generateUfsStatuses(++backUpFileNum)); mScheduler.jumpAndExecute(1, TimeUnit.DAYS); verify(mMetaMaster, times(backUpFileNum)).backup(any(), any()); deleteFileNum += getNumOfDeleteFile(backUpFileNum, fileToRetain); verify(mUfs, times(deleteFileNum)).deleteExistingFile(any()); } } /** * Generates ufs statues. * * @param num the number of backup files that exist in the returned statuses * @return the random generated ufs file statuses */ private UfsStatus[] generateUfsStatuses(int num) { UfsStatus[] statuses = new UfsFileStatus[num]; for (int i = 0; i < statuses.length; i++) { statuses[i] = new UfsFileStatus(generateBackupFileName(), CommonUtils.randomAlphaNumString(10), mRandom.nextLong(), mRandom.nextLong(), CommonUtils.randomAlphaNumString(10), CommonUtils.randomAlphaNumString(10), (short) mRandom.nextInt(), mRandom.nextLong()); } return statuses; } /** * Generates a backup file name used a time that includes some randomness. * * @return a backup file name */ private String generateBackupFileName() { Instant time = Instant.now().minusMillis(mRandom.nextInt()); return String.format(BackupManager.BACKUP_FILE_FORMAT, DateTimeFormatter.ISO_LOCAL_DATE.withZone(ZoneId.of("UTC")).format(time), time.toEpochMilli()); } /** * Gets the number of files that should be deleted. * * @param total the total number of files * @param retain the number of files that should be retained * @return the number of files that should be deleted */ private int getNumOfDeleteFile(int total, int retain) { int diff = total - retain; return diff >= 0 ? diff : 0; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.skywalking.apm.plugin.kafka; import java.util.Collection; import java.util.List; import org.apache.skywalking.apm.util.StringUtil; public class ConsumerEnhanceRequiredInfo { private String brokerServers; private String topics; private String groupId; private long startTime; public void setBrokerServers(List<String> brokerServers) { this.brokerServers = StringUtil.join(';', brokerServers.toArray(new String[0])); } public void setTopics(Collection<String> topics) { this.topics = StringUtil.join(';', topics.toArray(new String[0])); } public void setGroupId(String groupId) { this.groupId = groupId; } public String getBrokerServers() { return brokerServers; } public String getTopics() { return topics; } public String getGroupId() { return groupId; } public void setStartTime(long startTime) { this.startTime = startTime; } public long getStartTime() { return startTime; } }
/** * BSD-style license; for more info see http://pmd.sourceforge.net/license.html */ package net.sourceforge.pmd.lang.rule.properties; import static net.sourceforge.pmd.lang.rule.properties.ValueParser.BOOLEAN_PARSER; import java.util.Arrays; import java.util.List; import java.util.Map; import net.sourceforge.pmd.PropertyDescriptorFactory; import net.sourceforge.pmd.PropertyDescriptorField; import net.sourceforge.pmd.lang.rule.properties.ValueParser.Companion; /** * Defines a property type that supports multiple Boolean values. * * @author Brian Remedios */ public final class BooleanMultiProperty extends AbstractMultiValueProperty<Boolean> { /** Factory. */ public static final PropertyDescriptorFactory<List<Boolean>> FACTORY // @formatter:off = new MultiValuePropertyDescriptorFactory<Boolean>(Boolean.class) { @Override public BooleanMultiProperty createWith(Map<PropertyDescriptorField, String> valuesById, boolean isDefinedExternally) { char delimiter = delimiterIn(valuesById); return new BooleanMultiProperty(nameIn(valuesById), descriptionIn(valuesById), Companion.parsePrimitives(defaultValueIn(valuesById), delimiter, BOOLEAN_PARSER), 0f, isDefinedExternally); } }; // @formatter:on /** * Constructor using an array of defaults. * * @param theName Name * @param theDescription Description * @param defaultValues List of defaults * @param theUIOrder UI order */ public BooleanMultiProperty(String theName, String theDescription, Boolean[] defaultValues, float theUIOrder) { this(theName, theDescription, Arrays.asList(defaultValues), theUIOrder, false); } /** Master constructor. */ private BooleanMultiProperty(String theName, String theDescription, List<Boolean> defaultValues, float theUIOrder, boolean isDefinedExternally) { super(theName, theDescription, defaultValues, theUIOrder, isDefinedExternally); } /** * Constructor using a list of defaults. * * @param theName Name * @param theDescription Description * @param defaultValues List of defaults * @param theUIOrder UI order */ public BooleanMultiProperty(String theName, String theDescription, List<Boolean> defaultValues, float theUIOrder) { this(theName, theDescription, defaultValues, theUIOrder, false); } @Override protected Boolean createFrom(String toParse) { return BOOLEAN_PARSER.valueOf(toParse); } @Override public Class<Boolean> type() { return Boolean.class; } }
package com.mypurecloud.sdk.v2.model; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.mypurecloud.sdk.v2.model.SurveyAggregateDataContainer; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; import java.util.List; import java.io.Serializable; /** * SurveyAggregateQueryResponse */ public class SurveyAggregateQueryResponse implements Serializable { private List<SurveyAggregateDataContainer> results = new ArrayList<SurveyAggregateDataContainer>(); /** **/ public SurveyAggregateQueryResponse results(List<SurveyAggregateDataContainer> results) { this.results = results; return this; } @ApiModelProperty(example = "null", value = "") @JsonProperty("results") public List<SurveyAggregateDataContainer> getResults() { return results; } public void setResults(List<SurveyAggregateDataContainer> results) { this.results = results; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SurveyAggregateQueryResponse surveyAggregateQueryResponse = (SurveyAggregateQueryResponse) o; return Objects.equals(this.results, surveyAggregateQueryResponse.results); } @Override public int hashCode() { return Objects.hash(results); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SurveyAggregateQueryResponse {\n"); sb.append(" results: ").append(toIndentedString(results)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * MIT License * * Copyright (c) 2021 IceyLeagons and Contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package net.iceyleagons.icicle.serialization; import lombok.EqualsAndHashCode; import lombok.Getter; import java.lang.reflect.Field; import java.util.Collection; import java.util.Map; import static net.iceyleagons.icicle.utilities.StringUtils.containsIgnoresCase; /** * @author TOTHTOMI * @version 1.0.0 * @since Nov. 21, 2021 */ @Getter @EqualsAndHashCode public class ObjectValue { private final Class<?> javaType; private final Field field; private final String key; //name private final Object value; public ObjectValue(Class<?> javaType, Field field, Object value) { this.javaType = javaType; this.field = field; this.key = ObjectMapper.getName(field); this.value = value; } public static boolean isValuePrimitiveOrString(Class<?> type) { return type.isPrimitive() || type.equals(String.class); } public static boolean isArray(Class<?> type) { return type.isArray(); } public static boolean isCollection(Class<?> type) { return Collection.class.isAssignableFrom(type); } public static boolean isMap(Class<?> type) { return Map.class.isAssignableFrom(type); } public static boolean isSubObject(Class<?> type) { if (type.equals(MappedObject.class)) return true; // We check for types like this due to arrays. We could check with conventional stuff (#isArray(), etc.), but because primitives and objects can also be used // (int[], Integer[]), we rather do it this way to save space in code, and make the code more readable. String typeName = type.getTypeName(); return !containsIgnoresCase(typeName, "string") && !containsIgnoresCase(typeName, "int") && !containsIgnoresCase(typeName, "boolean") && !containsIgnoresCase(typeName, "long") && !containsIgnoresCase(typeName, "float") && !containsIgnoresCase(typeName, "double") && !containsIgnoresCase(typeName, "short") && !containsIgnoresCase(typeName, "byte") && !containsIgnoresCase(typeName, "char"); } public boolean isValuePrimitiveOrString() { return isValuePrimitiveOrString(this.javaType); } public boolean isArray() { return isArray(this.javaType); } public boolean isCollection() { return isCollection(this.javaType); } public boolean isMap() { return isMap(this.javaType); } public boolean isSubObject() { return isSubObject(this.javaType); } }
package br.com.ivanfsilva.estrategia3; import br.com.ivanfsilva.entidades.Conta; import br.com.ivanfsilva.entidades.Usuario; import br.com.ivanfsilva.service.ContaService; import br.com.ivanfsilva.service.UsuarioService; import com.github.javafaker.Faker; import org.openqa.selenium.By; import org.openqa.selenium.chrome.ChromeDriver; import java.sql.SQLException; import java.util.concurrent.TimeUnit; public class GeradorMassas { public static final String CHAVE_CONTA_SB = "CONTA_SB"; public static final String CHAVE_CONTA = "CONTA"; public void gerarContaSeuBarriga() throws SQLException, ClassNotFoundException { ChromeDriver driver; System.setProperty("webdriver.chrome.driver", "D:\\Users\\ivanf\\drivers-selenium\\chromedriver.exe"); driver = new ChromeDriver(); driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS); driver.get("https://seubarriga.wcaquino.me"); driver.findElement(By.id("email")).sendKeys("a@a"); driver.findElement(By.id("senha")).sendKeys("a"); driver.findElement(By.tagName("button")).click(); Faker faker = new Faker(); String registro = faker.gameOfThrones().character() + " " + faker.gameOfThrones().dragon(); driver.findElementByLinkText("Contas").click(); driver.findElementByLinkText("Adicionar").click(); driver.findElement(By.id("nome")).sendKeys(registro); driver.findElement(By.tagName("button")).click(); driver.quit(); new MassaDAOImpl().inserirMassa(CHAVE_CONTA_SB, registro); } public void gerarConta() throws Exception { Faker faker = new Faker(); ContaService service = new ContaService(); UsuarioService usuarioService = new UsuarioService(); Usuario usuarioGlobal = new Usuario(faker.name().fullName(), faker.internet().emailAddress(), faker.internet().password()); Usuario usuarioSalvo = usuarioService.salvar(usuarioGlobal); Conta conta = new Conta(faker.superhero().name(), usuarioSalvo); service.salvar(conta); new MassaDAOImpl().inserirMassa(CHAVE_CONTA, conta.getNome()); } public static void main(String[] args) throws Exception { GeradorMassas gerador = new GeradorMassas(); for (int i = 0; i < 10; i++) { gerador.gerarConta(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.tdb.junit; import junit.framework.Test ; import junit.framework.TestCase ; import junit.framework.TestSuite ; import org.apache.jena.rdf.model.Resource ; import org.apache.jena.sparql.junit.EarlReport ; import org.apache.jena.sparql.junit.SurpressedTest ; import org.apache.jena.sparql.junit.TestItem ; import org.apache.jena.sparql.vocabulary.TestManifestX ; import org.apache.jena.util.junit.TestFactoryManifest ; public class TestFactoryTDB extends TestFactoryManifest { public static EarlReport report = null ; public static void make(TestSuite ts, String manifestFile, String testRootName) { // for each graph type do TestSuite ts2 = makeSuite(manifestFile, testRootName) ; ts.addTest(ts2) ; } public static TestSuite makeSuite(String manifestFile, String testRootName) { TestFactoryTDB f = new TestFactoryTDB(testRootName) ; TestSuite ts = f.process(manifestFile) ; if ( testRootName != null ) ts.setName(testRootName+ts.getName()) ; return ts ; } // Factory public String testRootName ; public TestFactoryTDB(String testRootName) { this.testRootName = testRootName ; } @Override protected Test makeTest(Resource manifest, Resource entry, String testName, Resource action, Resource result) { if ( testRootName != null ) testName = testRootName+testName ; TestItem testItem = TestItem.create(entry, null) ; TestCase test = null ; if ( testItem.getTestType() != null ) { if ( testItem.getTestType().equals(TestManifestX.TestQuery) ) test = new QueryTestTDB(testName, report, testItem) ; if ( testItem.getTestType().equals(TestManifestX.TestSurpressed) ) test = new SurpressedTest(testName, report, testItem) ; if ( test == null ) System.err.println("Unrecognized test type: "+testItem.getTestType()) ; } // Default if ( test == null ) test = new QueryTestTDB(testName, report, testItem) ; return test ; } }
/* * Copyright 2014-2022 TNG Technology Consulting GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tngtech.archunit.core.domain; import java.util.ArrayList; import java.util.List; import java.util.Set; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.SetMultimap; import com.google.common.collect.Sets; import com.tngtech.archunit.base.Optional; final class ReverseDependencies { private final LoadingCache<JavaField, Set<JavaFieldAccess>> accessToFieldCache; private final LoadingCache<JavaMethod, Set<JavaMethodCall>> callToMethodCache; private final LoadingCache<JavaConstructor, Set<JavaConstructorCall>> callToConstructorCache; private final SetMultimap<JavaClass, JavaField> fieldTypeDependencies; private final SetMultimap<JavaClass, JavaMethod> methodParameterTypeDependencies; private final SetMultimap<JavaClass, JavaMethod> methodReturnTypeDependencies; private final SetMultimap<JavaClass, ThrowsDeclaration<JavaMethod>> methodsThrowsDeclarationDependencies; private final SetMultimap<JavaClass, JavaConstructor> constructorParameterTypeDependencies; private final SetMultimap<JavaClass, ThrowsDeclaration<JavaConstructor>> constructorThrowsDeclarationDependencies; private final SetMultimap<JavaClass, JavaAnnotation<?>> annotationTypeDependencies; private final SetMultimap<JavaClass, JavaAnnotation<?>> annotationParameterTypeDependencies; private final SetMultimap<JavaClass, InstanceofCheck> instanceofCheckDependencies; private final Supplier<SetMultimap<JavaClass, Dependency>> directDependenciesToClass; private ReverseDependencies(ReverseDependencies.Creation creation) { accessToFieldCache = CacheBuilder.newBuilder().build(new ResolvingAccessLoader<>(creation.fieldAccessDependencies.build())); callToMethodCache = CacheBuilder.newBuilder().build(new ResolvingAccessLoader<>(creation.methodCallDependencies.build())); callToConstructorCache = CacheBuilder.newBuilder().build(new ConstructorCallLoader(creation.constructorCallDependencies.build())); this.fieldTypeDependencies = creation.fieldTypeDependencies.build(); this.methodParameterTypeDependencies = creation.methodParameterTypeDependencies.build(); this.methodReturnTypeDependencies = creation.methodReturnTypeDependencies.build(); this.methodsThrowsDeclarationDependencies = creation.methodsThrowsDeclarationDependencies.build(); this.constructorParameterTypeDependencies = creation.constructorParameterTypeDependencies.build(); this.constructorThrowsDeclarationDependencies = creation.constructorThrowsDeclarationDependencies.build(); this.annotationTypeDependencies = creation.annotationTypeDependencies.build(); this.annotationParameterTypeDependencies = creation.annotationParameterTypeDependencies.build(); this.instanceofCheckDependencies = creation.instanceofCheckDependencies.build(); this.directDependenciesToClass = createDirectDependenciesToClassSupplier(creation.allDependencies); } private static Supplier<SetMultimap<JavaClass, Dependency>> createDirectDependenciesToClassSupplier(final List<JavaClassDependencies> allDependencies) { return Suppliers.memoize(new Supplier<SetMultimap<JavaClass, Dependency>>() { @Override public SetMultimap<JavaClass, Dependency> get() { ImmutableSetMultimap.Builder<JavaClass, Dependency> result = ImmutableSetMultimap.builder(); for (JavaClassDependencies dependencies : allDependencies) { for (Dependency dependency : dependencies.getDirectDependenciesFromClass()) { result.put(dependency.getTargetClass(), dependency); } } return result.build(); } }); } Set<JavaFieldAccess> getAccessesTo(JavaField field) { return accessToFieldCache.getUnchecked(field); } Set<JavaMethodCall> getCallsTo(JavaMethod method) { return callToMethodCache.getUnchecked(method); } Set<JavaConstructorCall> getCallsTo(JavaConstructor constructor) { return callToConstructorCache.getUnchecked(constructor); } Set<JavaField> getFieldsWithTypeOf(JavaClass clazz) { return fieldTypeDependencies.get(clazz); } Set<JavaMethod> getMethodsWithParameterTypeOf(JavaClass clazz) { return methodParameterTypeDependencies.get(clazz); } Set<JavaMethod> getMethodsWithReturnTypeOf(JavaClass clazz) { return methodReturnTypeDependencies.get(clazz); } Set<ThrowsDeclaration<JavaMethod>> getMethodThrowsDeclarationsWithTypeOf(JavaClass clazz) { return methodsThrowsDeclarationDependencies.get(clazz); } Set<JavaConstructor> getConstructorsWithParameterTypeOf(JavaClass clazz) { return constructorParameterTypeDependencies.get(clazz); } Set<ThrowsDeclaration<JavaConstructor>> getConstructorsWithThrowsDeclarationTypeOf(JavaClass clazz) { return constructorThrowsDeclarationDependencies.get(clazz); } Set<JavaAnnotation<?>> getAnnotationsWithTypeOf(JavaClass clazz) { return annotationTypeDependencies.get(clazz); } Set<JavaAnnotation<?>> getAnnotationsWithParameterTypeOf(JavaClass clazz) { return annotationParameterTypeDependencies.get(clazz); } Set<InstanceofCheck> getInstanceofChecksWithTypeOf(JavaClass clazz) { return instanceofCheckDependencies.get(clazz); } Set<Dependency> getDirectDependenciesTo(JavaClass clazz) { return directDependenciesToClass.get().get(clazz); } static final ReverseDependencies EMPTY = new ReverseDependencies(new Creation()); static class Creation { private final ImmutableSetMultimap.Builder<JavaClass, JavaFieldAccess> fieldAccessDependencies = ImmutableSetMultimap.builder(); private final ImmutableSetMultimap.Builder<JavaClass, JavaMethodCall> methodCallDependencies = ImmutableSetMultimap.builder(); private final ImmutableSetMultimap.Builder<String, JavaConstructorCall> constructorCallDependencies = ImmutableSetMultimap.builder(); private final ImmutableSetMultimap.Builder<JavaClass, JavaField> fieldTypeDependencies = ImmutableSetMultimap.builder(); private final ImmutableSetMultimap.Builder<JavaClass, JavaMethod> methodParameterTypeDependencies = ImmutableSetMultimap.builder(); private final ImmutableSetMultimap.Builder<JavaClass, JavaMethod> methodReturnTypeDependencies = ImmutableSetMultimap.builder(); private final ImmutableSetMultimap.Builder<JavaClass, ThrowsDeclaration<JavaMethod>> methodsThrowsDeclarationDependencies = ImmutableSetMultimap.builder(); private final ImmutableSetMultimap.Builder<JavaClass, JavaConstructor> constructorParameterTypeDependencies = ImmutableSetMultimap.builder(); private final ImmutableSetMultimap.Builder<JavaClass, ThrowsDeclaration<JavaConstructor>> constructorThrowsDeclarationDependencies = ImmutableSetMultimap.builder(); private final ImmutableSetMultimap.Builder<JavaClass, JavaAnnotation<?>> annotationTypeDependencies = ImmutableSetMultimap.builder(); private final ImmutableSetMultimap.Builder<JavaClass, JavaAnnotation<?>> annotationParameterTypeDependencies = ImmutableSetMultimap.builder(); private final ImmutableSetMultimap.Builder<JavaClass, InstanceofCheck> instanceofCheckDependencies = ImmutableSetMultimap.builder(); private final List<JavaClassDependencies> allDependencies = new ArrayList<>(); public void registerDependenciesOf(JavaClass clazz, JavaClassDependencies classDependencies) { registerAccesses(clazz); registerFields(clazz); registerMethods(clazz); registerConstructors(clazz); registerAnnotations(clazz); registerStaticInitializer(clazz); allDependencies.add(classDependencies); } private void registerAccesses(JavaClass clazz) { for (JavaFieldAccess access : clazz.getFieldAccessesFromSelf()) { fieldAccessDependencies.put(access.getTargetOwner(), access); } for (JavaMethodCall call : clazz.getMethodCallsFromSelf()) { methodCallDependencies.put(call.getTargetOwner(), call); } for (JavaConstructorCall call : clazz.getConstructorCallsFromSelf()) { constructorCallDependencies.put(call.getTarget().getFullName(), call); } } private void registerFields(JavaClass clazz) { for (JavaField field : clazz.getFields()) { fieldTypeDependencies.put(field.getRawType(), field); } } private void registerMethods(JavaClass clazz) { for (JavaMethod method : clazz.getMethods()) { for (JavaClass parameter : method.getRawParameterTypes()) { methodParameterTypeDependencies.put(parameter, method); } methodReturnTypeDependencies.put(method.getRawReturnType(), method); for (ThrowsDeclaration<JavaMethod> throwsDeclaration : method.getThrowsClause()) { methodsThrowsDeclarationDependencies.put(throwsDeclaration.getRawType(), throwsDeclaration); } for (InstanceofCheck instanceofCheck : method.getInstanceofChecks()) { instanceofCheckDependencies.put(instanceofCheck.getRawType(), instanceofCheck); } } } private void registerConstructors(JavaClass clazz) { for (JavaConstructor constructor : clazz.getConstructors()) { for (JavaClass parameter : constructor.getRawParameterTypes()) { constructorParameterTypeDependencies.put(parameter, constructor); } for (ThrowsDeclaration<JavaConstructor> throwsDeclaration : constructor.getThrowsClause()) { constructorThrowsDeclarationDependencies.put(throwsDeclaration.getRawType(), throwsDeclaration); } for (InstanceofCheck instanceofCheck : constructor.getInstanceofChecks()) { instanceofCheckDependencies.put(instanceofCheck.getRawType(), instanceofCheck); } } } private void registerAnnotations(JavaClass clazz) { for (final JavaAnnotation<?> annotation : findAnnotations(clazz)) { annotationTypeDependencies.put(annotation.getRawType(), annotation); annotation.accept(new JavaAnnotation.DefaultParameterVisitor() { @Override public void visitClass(String propertyName, JavaClass javaClass) { annotationParameterTypeDependencies.put(javaClass, annotation); } @Override public void visitEnumConstant(String propertyName, JavaEnumConstant enumConstant) { annotationParameterTypeDependencies.put(enumConstant.getDeclaringClass(), annotation); } @Override public void visitAnnotation(String propertyName, JavaAnnotation<?> memberAnnotation) { annotationParameterTypeDependencies.put(memberAnnotation.getRawType(), annotation); memberAnnotation.accept(this); } }); } } private Set<JavaAnnotation<?>> findAnnotations(JavaClass clazz) { Set<JavaAnnotation<?>> result = Sets.<JavaAnnotation<?>>newHashSet(clazz.getAnnotations()); for (JavaMember member : clazz.getMembers()) { result.addAll(member.getAnnotations()); } return result; } private void registerStaticInitializer(JavaClass clazz) { if (clazz.getStaticInitializer().isPresent()) { for (InstanceofCheck instanceofCheck : clazz.getStaticInitializer().get().getInstanceofChecks()) { instanceofCheckDependencies.put(instanceofCheck.getRawType(), instanceofCheck); } } } void finish(Iterable<JavaClass> classes) { ReverseDependencies reverseDependencies = new ReverseDependencies(this); for (JavaClass clazz : classes) { clazz.setReverseDependencies(reverseDependencies); } } } private static class ResolvingAccessLoader<MEMBER extends JavaMember, ACCESS extends JavaAccess<?>> extends CacheLoader<MEMBER, Set<ACCESS>> { private final SetMultimap<JavaClass, ACCESS> accessesToSelf; private ResolvingAccessLoader(SetMultimap<JavaClass, ACCESS> accessesToSelf) { this.accessesToSelf = accessesToSelf; } @Override public Set<ACCESS> load(MEMBER member) { ImmutableSet.Builder<ACCESS> result = ImmutableSet.builder(); for (final JavaClass javaClass : getPossibleTargetClassesForAccess(member.getOwner())) { for (ACCESS access : this.accessesToSelf.get(javaClass)) { Optional<? extends JavaMember> target = access.getTarget().resolveMember(); if (target.isPresent() && target.get().equals(member)) { result.add(access); } } } return result.build(); } private Set<JavaClass> getPossibleTargetClassesForAccess(JavaClass owner) { return ImmutableSet.<JavaClass>builder() .add(owner) .addAll(owner.getAllSubclasses()) .build(); } } private static class ConstructorCallLoader extends CacheLoader<JavaConstructor, Set<JavaConstructorCall>> { private final SetMultimap<String, JavaConstructorCall> accessesToSelf; private ConstructorCallLoader(SetMultimap<String, JavaConstructorCall> accessesToSelf) { this.accessesToSelf = accessesToSelf; } @Override public Set<JavaConstructorCall> load(JavaConstructor member) { ImmutableSet.Builder<JavaConstructorCall> result = ImmutableSet.builder(); result.addAll(accessesToSelf.get(member.getFullName())); return result.build(); } } }
/* * Copyright 2010 Ning, Inc. * * Ning licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.asynchttpclient.async; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNull; import static org.testng.Assert.fail; import org.asynchttpclient.AsyncCompletionHandler; import org.asynchttpclient.AsyncCompletionHandlerBase; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.AsyncHttpClientConfig; import org.asynchttpclient.Response; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.annotations.Test; import java.io.IOException; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; public abstract class ConnectionPoolTest extends AbstractBasicTest { protected final Logger log = LoggerFactory.getLogger(AbstractBasicTest.class); @Test(groups = { "standalone", "default_provider" }) public void testMaxTotalConnections() { AsyncHttpClient client = getAsyncHttpClient(new AsyncHttpClientConfig.Builder().setAllowPoolingConnection(true).setMaximumConnectionsTotal(1).build()); try { String url = getTargetUrl(); int i; Exception exception = null; for (i = 0; i < 3; i++) { try { log.info("{} requesting url [{}]...", i, url); Response response = client.prepareGet(url).execute().get(); log.info("{} response [{}].", i, response); } catch (Exception ex) { exception = ex; } } assertNull(exception); } finally { client.close(); } } @Test(groups = { "standalone", "default_provider" }) public void testMaxTotalConnectionsException() { AsyncHttpClient client = getAsyncHttpClient(new AsyncHttpClientConfig.Builder().setAllowPoolingConnection(true).setMaximumConnectionsTotal(1).build()); try { String url = getTargetUrl(); int i; Exception exception = null; for (i = 0; i < 20; i++) { try { log.info("{} requesting url [{}]...", i, url); if (i < 5) { client.prepareGet(url).execute().get(); } else { client.prepareGet(url).execute(); } } catch (Exception ex) { exception = ex; break; } } assertNotNull(exception); assertNotNull(exception.getMessage()); assertEquals(exception.getMessage(), "Too many connections 1"); } finally { client.close(); } } @Test(groups = { "standalone", "default_provider", "async" }, enabled = true, invocationCount = 10, alwaysRun = true) public void asyncDoGetKeepAliveHandlerTest_channelClosedDoesNotFail() throws Exception { AsyncHttpClient client = getAsyncHttpClient(null); try { // Use a l in case the assert fail final CountDownLatch l = new CountDownLatch(2); final Map<String, Boolean> remoteAddresses = new ConcurrentHashMap<String, Boolean>(); AsyncCompletionHandler<Response> handler = new AsyncCompletionHandlerAdapter() { @Override public Response onCompleted(Response response) throws Exception { System.out.println("ON COMPLETED INVOKED " + response.getHeader("X-KEEP-ALIVE")); try { assertEquals(response.getStatusCode(), 200); remoteAddresses.put(response.getHeader("X-KEEP-ALIVE"), true); } finally { l.countDown(); } return response; } }; client.prepareGet(getTargetUrl()).execute(handler).get(); server.stop(); server.start(); client.prepareGet(getTargetUrl()).execute(handler); if (!l.await(TIMEOUT, TimeUnit.SECONDS)) { fail("Timed out"); } assertEquals(remoteAddresses.size(), 2); } finally { client.close(); } } @Test(groups = { "standalone", "default_provider" }) public void multipleMaxConnectionOpenTest() throws Exception { AsyncHttpClientConfig cg = new AsyncHttpClientConfig.Builder().setAllowPoolingConnection(true).setConnectionTimeoutInMs(5000).setMaximumConnectionsTotal(1).build(); AsyncHttpClient c = getAsyncHttpClient(cg); try { String body = "hello there"; // once Response response = c.preparePost(getTargetUrl()).setBody(body).execute().get(TIMEOUT, TimeUnit.SECONDS); assertEquals(response.getResponseBody(), body); // twice Exception exception = null; try { c.preparePost(String.format("http://127.0.0.1:%d/foo/test", port2)).setBody(body).execute().get(TIMEOUT, TimeUnit.SECONDS); fail("Should throw exception. Too many connections issued."); } catch (Exception ex) { ex.printStackTrace(); exception = ex; } assertNotNull(exception); assertEquals(exception.getMessage(), "Too many connections 1"); } finally { c.close(); } } @Test(groups = { "standalone", "default_provider" }) public void multipleMaxConnectionOpenTestWithQuery() throws Exception { AsyncHttpClientConfig cg = new AsyncHttpClientConfig.Builder().setAllowPoolingConnection(true).setConnectionTimeoutInMs(5000).setMaximumConnectionsTotal(1).build(); AsyncHttpClient c = getAsyncHttpClient(cg); try { String body = "hello there"; // once Response response = c.preparePost(getTargetUrl() + "?foo=bar").setBody(body).execute().get(TIMEOUT, TimeUnit.SECONDS); assertEquals(response.getResponseBody(), "foo_" + body); // twice Exception exception = null; try { response = c.preparePost(getTargetUrl()).setBody(body).execute().get(TIMEOUT, TimeUnit.SECONDS); } catch (Exception ex) { ex.printStackTrace(); exception = ex; } assertNull(exception); assertNotNull(response); assertEquals(response.getStatusCode(), 200); } finally { c.close(); } } /** * This test just make sure the hack used to catch disconnected channel under win7 doesn't throw any exception. The onComplete method must be only called once. * * @throws Exception * if something wrong happens. */ @Test(groups = { "standalone", "default_provider" }) public void win7DisconnectTest() throws Exception { final AtomicInteger count = new AtomicInteger(0); AsyncHttpClient client = getAsyncHttpClient(null); try { AsyncCompletionHandler<Response> handler = new AsyncCompletionHandlerAdapter() { @Override public Response onCompleted(Response response) throws Exception { count.incrementAndGet(); StackTraceElement e = new StackTraceElement("sun.nio.ch.SocketDispatcher", "read0", null, -1); IOException t = new IOException(); t.setStackTrace(new StackTraceElement[] { e }); throw t; } }; try { client.prepareGet(getTargetUrl()).execute(handler).get(); fail("Must have received an exception"); } catch (ExecutionException ex) { assertNotNull(ex); assertNotNull(ex.getCause()); assertEquals(ex.getCause().getClass(), IOException.class); assertEquals(count.get(), 1); } } finally { client.close(); } } @Test(groups = { "standalone", "default_provider" }) public void asyncHandlerOnThrowableTest() throws Exception { AsyncHttpClient client = getAsyncHttpClient(null); try { final AtomicInteger count = new AtomicInteger(); final String THIS_IS_NOT_FOR_YOU = "This is not for you"; final CountDownLatch latch = new CountDownLatch(16); for (int i = 0; i < 16; i++) { client.prepareGet(getTargetUrl()).execute(new AsyncCompletionHandlerBase() { @Override public Response onCompleted(Response response) throws Exception { throw new Exception(THIS_IS_NOT_FOR_YOU); } }); client.prepareGet(getTargetUrl()).execute(new AsyncCompletionHandlerBase() { @Override public void onThrowable(Throwable t) { if (t.getMessage() != null && t.getMessage().equalsIgnoreCase(THIS_IS_NOT_FOR_YOU)) { count.incrementAndGet(); } } @Override public Response onCompleted(Response response) throws Exception { latch.countDown(); return response; } }); } latch.await(TIMEOUT, TimeUnit.SECONDS); assertEquals(count.get(), 0); } finally { client.close(); } } }
package com.github.scribejava.core.extractors; import java.util.regex.Matcher; import java.util.regex.Pattern; import com.github.scribejava.core.exceptions.OAuthException; import com.github.scribejava.core.model.OAuth2AccessToken; import com.github.scribejava.core.utils.OAuthEncoder; import com.github.scribejava.core.utils.Preconditions; /** * Custom implementation of {@link TokenExtractor} for OAuth 2.0 */ public class OAuth2AccessTokenExtractor implements TokenExtractor<OAuth2AccessToken> { private static final String ACCESS_TOKEN_REGEX = "access_token=([^&]+)"; private static final String TOKEN_TYPE_REGEX = "token_type=([^&]+)"; private static final String EXPIRES_IN_REGEX = "expires_in=([^&]+)"; private static final String REFRESH_TOKEN_REGEX = "refresh_token=([^&]+)"; private static final String SCOPE_REGEX = "scope=([^&]+)"; protected OAuth2AccessTokenExtractor() { } private static class InstanceHolder { private static final OAuth2AccessTokenExtractor INSTANCE = new OAuth2AccessTokenExtractor(); } public static OAuth2AccessTokenExtractor instance() { return InstanceHolder.INSTANCE; } /** * {@inheritDoc} */ @Override public OAuth2AccessToken extract(String response) { Preconditions.checkEmptyString(response, "Response body is incorrect. Can't extract a token from an empty string"); final String accessToken = extractParameter(response, ACCESS_TOKEN_REGEX, true); final String tokenType = extractParameter(response, TOKEN_TYPE_REGEX, false); final String expiresInString = extractParameter(response, EXPIRES_IN_REGEX, false); Integer expiresIn; try { expiresIn = expiresInString == null ? null : Integer.valueOf(expiresInString); } catch (NumberFormatException nfe) { expiresIn = null; } final String refreshToken = extractParameter(response, REFRESH_TOKEN_REGEX, false); final String scope = extractParameter(response, SCOPE_REGEX, false); return new OAuth2AccessToken(accessToken, tokenType, expiresIn, refreshToken, scope, response); } private static String extractParameter(String response, String regex, boolean required) throws OAuthException { final Matcher matcher = Pattern.compile(regex).matcher(response); if (matcher.find()) { return OAuthEncoder.decode(matcher.group(1)); } else if (required) { throw new OAuthException("Response body is incorrect. Can't extract a '" + regex + "' from this: '" + response + "'", null); } else { return null; } } }
/* * * * ****************************************************************************** * * * Copyright (c) 2015-2019 Skymind Inc. * * * Copyright (c) 2019 Konduit AI. * * * * * * This program and the accompanying materials are made available under the * * * terms of the Apache License, Version 2.0 which is available at * * * https://www.apache.org/licenses/LICENSE-2.0. * * * * * * Unless required by applicable law or agreed to in writing, software * * * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * * * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * * * License for the specific language governing permissions and limitations * * * under the License. * * * * * * SPDX-License-Identifier: Apache-2.0 * * ***************************************************************************** * * */ package ai.konduit.serving.pipeline.step; import ai.konduit.serving.config.Input.DataFormat; import ai.konduit.serving.config.Output; import ai.konduit.serving.config.Output.PredictionType; import ai.konduit.serving.config.SchemaType; import ai.konduit.serving.model.PythonConfig; import ai.konduit.serving.pipeline.BasePipelineStep; import ai.konduit.serving.pipeline.PipelineStep; import org.datavec.python.PythonVariables; import lombok.*; import lombok.experimental.SuperBuilder; import lombok.extern.slf4j.Slf4j; import org.datavec.api.transform.schema.Schema; import java.util.Arrays; import java.util.HashMap; import java.util.Map; /** * PythonStep defines a custom Python {@link PipelineStep} * from a {@link PythonConfig}. */ @SuperBuilder @AllArgsConstructor @NoArgsConstructor @Slf4j public class PythonStep extends BasePipelineStep<PythonStep> { @Getter @Setter @Singular private Map<String, PythonConfig> pythonConfigs; public PythonStep(PythonConfig pythonConfig) throws Exception { this.step(pythonConfig); } /** * Create a PythonConfig Step with default input and output names * from column names, schema types and the actual PythonConfig * * @param pythonConfig Konduit PythonConfig * @param inputColumnNames input column names * @param inputTypes input schema types * @param outputColumnNames output column names * @param outputTypes output schema types * @throws Exception key error */ public PythonStep(PythonConfig pythonConfig, String[] inputColumnNames, SchemaType[] inputTypes, String[] outputColumnNames, SchemaType[] outputTypes) throws Exception { String defaultName = "default"; this.setInput(defaultName, inputColumnNames, inputTypes); this.setOutput(defaultName, outputColumnNames, outputTypes); this.pythonConfig(defaultName, pythonConfig); } /** * Create a PythonConfig Step with default input and output names * from input column names, input schema types and the actual PythonConfig * * @param pythonConfig {@link PythonConfig} * @param inputColumnNames input column names * @param inputTypes input schema types * @throws Exception key error */ public PythonStep(PythonConfig pythonConfig, String[] inputColumnNames, SchemaType[] inputTypes) throws Exception { this(pythonConfig, inputColumnNames, inputTypes, new String[]{}, new SchemaType[]{}); } /** * Create a PythonConfig Step with default input and output names * just from input/output schema and the actual PythonConfig * * @param inputSchema {@link Schema} for data input * @param outputSchema {@link Schema} for data output * @param pythonConfig {@link PythonConfig} * @throws Exception key error */ public PythonStep(Schema inputSchema, Schema outputSchema, PythonConfig pythonConfig) throws Exception { String defaultName = "default"; this.setInput(defaultName, inputSchema); this.setOutput(defaultName, outputSchema); this.pythonConfig(defaultName, pythonConfig); } private static SchemaType[] pythonToDataVecVarTypes(String[] pythonVarTypes) { return Arrays.stream(pythonVarTypes) .map(type -> pythonToDataVecVarTypes(PythonVariables.Type.valueOf(type))) .toArray(SchemaType[]::new); } private static SchemaType pythonToDataVecVarTypes(PythonVariables.Type pythonVarType) { try { switch (pythonVarType) { case BOOL: return ai.konduit.serving.config.SchemaType.Boolean; case STR: return ai.konduit.serving.config.SchemaType.String; case INT: return ai.konduit.serving.config.SchemaType.Integer; case FLOAT: return ai.konduit.serving.config.SchemaType.Float; case NDARRAY: return ai.konduit.serving.config.SchemaType.NDArray; case LIST: case FILE: case DICT: default: throw new IllegalArgumentException(String.format("Can't convert (%s) to (%s) enum", pythonVarType.name(), ai.konduit.serving.config.SchemaType.class.getName())); } } catch (Exception e) { log.error("Unable to convert type " + pythonVarType + ". Error was",e); } return null; } @Override public PredictionType[] validPredictionTypes() { return new PredictionType[] { PredictionType.RAW }; } @Override public DataFormat[] validInputTypes() { return new DataFormat[] { DataFormat.ARROW, DataFormat.NUMPY, DataFormat.JSON, DataFormat.IMAGE }; } @Override public Output.DataFormat[] validOutputTypes() { return new Output.DataFormat[] { Output.DataFormat.ARROW, Output.DataFormat.ND4J, Output.DataFormat.NUMPY, Output.DataFormat.JSON }; } /** * Define a single, named step for a Python pipeline. * * @param pythonConfig {@link PythonConfig} * @throws Exception key error */ public PythonStep step(PythonConfig pythonConfig) throws Exception { this.step("default", pythonConfig); return this; } /** * Define a single, named step for a Python pipeline. * * @param pythonConfig {@link PythonConfig} * @param inputSchema {@link Schema} for data input * @param outputSchema {@link Schema} for data output * @return this python step * @throws Exception key error */ public PythonStep step(PythonConfig pythonConfig, Schema inputSchema, Schema outputSchema) throws Exception { return this.step("default", pythonConfig, inputSchema, outputSchema); } /** * Define a single, named step for a Python pipeline. * * @param stepName input and output name for this step * @param pythonConfig {@link PythonConfig} * @param inputSchema {@link Schema} for data input * @param outputSchema {@link Schema} for data output * @return this python step * @throws Exception key error */ public PythonStep step(String stepName, PythonConfig pythonConfig, Schema inputSchema, Schema outputSchema) throws Exception { this.setInput(stepName, inputSchema); this.setOutput(stepName, outputSchema); this.pythonConfig(stepName, pythonConfig); return this; } /** * Define a single, named step for a Python pipeline. * * @param stepName input and output name for this step * @param pythonConfig {@link PythonConfig} * @param inputColumnNames input column names * @param inputTypes input schema types * @param outputColumnNames output column names * @param outputTypes output schema types * @throws Exception key error */ public PythonStep step(String stepName, PythonConfig pythonConfig, String[] inputColumnNames, SchemaType[] inputTypes, String[] outputColumnNames, SchemaType[] outputTypes) throws Exception { this.setInput(stepName, inputColumnNames, inputTypes); this.setOutput(stepName, outputColumnNames, outputTypes); this.pythonConfig(stepName, pythonConfig); return this; } /** * Define a single, named step for a Python pipeline. * * @param stepName input and output name for this step * @param pythonConfig Konduit {@link PythonConfig} * @param inputColumnNames input column names * @param inputTypes input schema types * @throws Exception key error */ public PythonStep step(String stepName, PythonConfig pythonConfig, String[] inputColumnNames, SchemaType[] inputTypes) throws Exception { this.setInput(stepName, inputColumnNames, inputTypes); this.setOutput(stepName, new String[]{}, new SchemaType[]{}); this.pythonConfig(stepName, pythonConfig); return this; } /** * Define a Python config for this step. * * @param pythonConfig Konduit {@link PythonConfig} * @return this Python step */ public PythonStep pythonConfig(PythonConfig pythonConfig) { if (pythonConfigs == null) { pythonConfigs = new HashMap<>(); } pythonConfigs.put("default", pythonConfig); return this; } /** * Define a Python config for this step. * * @param inputName input name * @param pythonConfig Konduit PythonConfig * @return this Python step */ public PythonStep pythonConfig(String inputName, PythonConfig pythonConfig) { if (pythonConfigs == null) { pythonConfigs = new HashMap<>(); } pythonConfigs.put(inputName, pythonConfig); return this; } /** * Define a single, named step for a Python pipeline. * * @param stepName input and output name for this step * @param pythonConfig {@link PythonConfig} * @throws Exception key error */ public PythonStep step(String stepName, PythonConfig pythonConfig) throws Exception { Map<String, String> pythonInputs = pythonConfig.getPythonInputs(), pythonOutputs = pythonConfig.getPythonOutputs(); this.step(stepName, pythonConfig, pythonInputs.keySet().toArray(new String[0]), pythonToDataVecVarTypes(pythonInputs.values().toArray(new String[pythonInputs.size()])), pythonOutputs.keySet().toArray(new String[0]), pythonToDataVecVarTypes(pythonOutputs.values().toArray(new String[pythonOutputs.size()]))); return this; } @Override public String pipelineStepClazz() { return "ai.konduit.serving.pipeline.steps.PythonStepRunner"; } }
package com.king.frame.mvvmframe.base; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.view.Gravity; import com.king.frame.mvvmframe.R; import androidx.annotation.NonNull; /** * @author <a href="mailto:jenly1314@gmail.com">Jenly</a> */ public class BaseProgressDialog extends Dialog { public static BaseProgressDialog newInstance(Context context) { return new BaseProgressDialog(context); } public BaseProgressDialog(@NonNull Context context) { this(context, R.style.mvvmframe_progress_dialog); } public BaseProgressDialog(@NonNull Context context, int themeResId) { super(context, themeResId); initUI(); } public BaseProgressDialog(@NonNull Context context, boolean cancelable, DialogInterface.OnCancelListener cancelListener) { super(context, cancelable, cancelListener); initUI(); } private void initUI() { getWindow().getAttributes().gravity = Gravity.CENTER; setCanceledOnTouchOutside(false); } }
package biz.braham.rssreader.services; import biz.braham.rssreader.exceptions.ReadFeedException; import biz.braham.rssreader.models.Feed; import biz.braham.rssreader.models.NewsItem; import com.sun.syndication.feed.synd.SyndEntryImpl; import com.sun.syndication.feed.synd.SyndFeed; import com.sun.syndication.io.SyndFeedInput; import com.sun.syndication.io.XmlReader; import org.jsoup.Jsoup; import org.springframework.stereotype.Service; import java.net.URL; import java.util.ArrayList; import java.util.Date; import java.util.List; /** * Implementation of the RSS reader interface based on the rome reader */ @Service public class RomeBasedReader implements RssReaderService { @Override public ArrayList<NewsItem> readFeed(Feed feed) throws ReadFeedException { return readFeed(feed.getUrl()); } @Override public ArrayList<NewsItem> readFeed(URL feedUrl) throws ReadFeedException { ArrayList<NewsItem> newsItems = new ArrayList<NewsItem>(); try { SyndFeedInput input = new SyndFeedInput(); SyndFeed rssFeed = input.build(new XmlReader(feedUrl)); ((List<SyndEntryImpl>) rssFeed.getEntries()).forEach((SyndEntryImpl newsItem) -> { String itemTitle = newsItem.getTitle(); //convert item description to plaintext using jsoup String itemDescription = Jsoup.parse(newsItem.getDescription().getValue()).text(); Date itemDate = newsItem.getPublishedDate(); newsItems.add(new NewsItem(itemTitle, itemDescription, itemDate)); }); return newsItems; } catch (Exception e) { throw new ReadFeedException(); } } }
/* * Copyright (C) 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.example.wordlistsql; import android.content.Intent; import android.os.Bundle; import android.support.design.widget.FloatingActionButton; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.TextUtils; import android.view.View; import android.widget.Toast; /** Mengimplementasikan RecyclerView yang menampilkan daftar kata dari database SQL. * - Mengeklik tombol keren membuka aktivitas kedua untuk menambahkan kata ke database. * - Mengklik tombol Edit akan membuka aktivitas untuk mengedit kata yang ada di database. * - Mengklik tombol Delete akan menghapus kata yang ada dari database. */ public class MainActivity extends AppCompatActivity { private static final String TAG = MainActivity.class.getSimpleName(); public static final int WORD_EDIT = 1; public static final int WORD_ADD = -1; private WordListOpenHelper mDB; private RecyclerView mRecyclerView; private WordListAdapter mAdapter; private int mLastPsition; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); mDB = new WordListOpenHelper(this); // Membuat Recycle View. mRecyclerView = (RecyclerView) findViewById(R.id.recyclerview); // Buat mAdapter dan berikan data yang akan ditampilkan. mAdapter = new WordListAdapter(this, mDB); // Sambungkan mAdapter dengan tampilan pendaur ulang. mRecyclerView.setAdapter(mAdapter); // Beri tampilan pendaur ulang sebagai pengelola tata letak default. mRecyclerView.setLayoutManager(new LinearLayoutManager(this)); // Tambahkan pengendali klik tindakan terapung untuk membuat entri baru. FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab); fab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { // Mulai aktivitas edit kosong. Intent intent = new Intent(getBaseContext(), EditWordActivity.class); startActivityForResult(intent, WORD_EDIT); } }); } public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); // Tambahkan kode untuk menambahkan database. if (requestCode == WORD_EDIT){ if (resultCode == RESULT_OK){ String word = data.getStringExtra(EditWordActivity.EXTRA_REPLY); // Update Database if (!TextUtils.isEmpty(word)){ int id = data.getIntExtra(WordListAdapter.EXTRA_ID, -99); if (id == WORD_ADD){ mDB.insert(word); } else if (id>=0){ mDB.update(id, word); } // Update UI mAdapter.notifyDataSetChanged(); }else { Toast.makeText( getApplicationContext(), R.string.empty_not_saved, Toast.LENGTH_LONG).show(); } } } } }
package main; import java.awt.AlphaComposite; import java.awt.Graphics2D; import java.awt.Image; import java.awt.RenderingHints; import java.awt.image.BufferedImage; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import javax.imageio.ImageIO; import annotations.*; /** * The {@code Main} class of the {@code Minecraft_Texture_Resizer} program. * <p>Options that the user may choose to modify are : * <ul> * <li>{@link #size} : The size that textures will be resized to. Default: 16, Minimum: 1, Maximum: 32767 (inclusive).</li> * <li>{@link #useScale} : Whether or not a scaling should be applied to non-square, 16x16 textures.</li> * <li>{@link #block} : Whether or not to resize block textures.</li> * <li>{@link #effect} : Whether or not to resize effect textures. (This only includes "dither.png", which is an unused texture.)</li> * <li>{@link #entity} : Whether or not to resize entity textures.</li> * <li>{@link #environment} : Whether or not to resize environment textures.</li> * <li>{@link #font} : Whether or not to resize font textures.</li> * <li>{@link #gui} : Whether or not to resize gui textures.</li> * <li>{@link #item} : Whether or not to resize item textures.</li> * <li>{@link #map} : Whether or not to resize map textures.</li> * <li>{@link #misc} : Whether or not to resize misc (miscellaneous) textures.</li> * <li>{@link #mob_effect} : Whether or not to resize mob_effect (status effect icon) textures.</li> * <li>{@link #models} : Whether or not to resize models (armor) textures.</li> * <li>{@link #painting} : Whether or not to resize painting textures.</li> * <li>{@link #particle} : Whether or not to resize particle textures.</li> * <li>{@link #name_out} : The name that the output resourcepack should assume upon completion. If left blank, will follow the format of "Resized Textures NxN" where 'N' is {@link size}.</li> * <li>{@link #pack} : The name of the resourcepack to use as a reference for original images.</li> * <li>{@link #pack_format} : The version that this resourcepack is intended for. Before modifying, fully read the {@link #pack_format} description.</li> * </ul> * </p> */ public class Main { /** * The {@code size} variable represents the size that textures are meant to be resized to. * * <p>The {@code size} variable is of the {@link Short} type and expects an integer value between 1 and 32767.</p> * <p>Default value: 16, min value: 1, max value: 32767</p> * <p><strong>NOTE:</strong> Larger numbers run slower and may cause crashes.</p> */ @ShortRangeDefaultValue(value = 16, minimum = 1, maximum = 32767) public static final short size = 16; /** * The {@code useScale} variable determines if the resizing program should resize images by scale. * <p>If {@code useScale = true}, images will be resized to {@code (}{@link #scale}{@code *w, }{@link #scale}{@code *h)}.</p> * <p>If {@code useScale = false}, images will be resized to {@code (}{@link #size}{@code , }{@link #size}{@code )}.</p> */ @BooleanDefaultValue(true) public static final boolean useScale = true; /** * The {@code block} variable determines if images in {@code pack/assets/minecraft/textures/block} should be resized. * <hr /> * <p>Examples: * <li>Dirt</li> * <li>Stone</li> * <li>Bedrock</li> * </p> */ @BooleanDefaultValue(true) public static final boolean block = true; /** * The {@code effect} variable determines if images in {@code pack/assets/minecraft/textures/effect} should be resized. * <hr /> * <p>This only has "dither"</p> */ @BooleanDefaultValue(false) public static final boolean effect = false; /** * The {@code entity} variable determines if images in {@code pack/assets/minecraft/textures/entity} should be resized. * <hr /> * <p>Examples: * <li>Pig</li> * <li>Creeper</li> * <li>Armor Stand</li> * <li>Tile entities, including bells and banners</li> * </p> */ @BooleanDefaultValue(false) public static final boolean entity = false; /** * The {@code environment} variable determines if images in {@code pack/assets/minecraft/textures/environment} should be resized. * <hr /> * <p>Examples: * <li>Clouds</li> * <li>Rain</li> * <li>Sun</li> * <li>Moon</li> * </p> */ @BooleanDefaultValue(false) public static final boolean environment = false; /** * The {@code font} variable determines if images in {@code pack/assets/minecraft/textures/font} should be resized. * <hr /> * <p><strong>NOTE:</strong> Will resize <em>all unicode characters</em>, often rendering text <em>unreadable</em>.</p> */ @BooleanDefaultValue(false) public static final boolean font = false; /** * The {@code gui} variable determines if images in {@code pack/assets/minecraft/textures/gui} should be resized. * <hr /> * <p><strong>NOTE:</strong> This often makes playing a bit difficult, as certain gui elements become difficult to see/use. * <p>Examples: * <li>Inventory</li> * <li>Game Menu (pause menu)</li> * <li>Title screen</li> * </p> */ @BooleanDefaultValue(false) public static final boolean gui = false; /** * The {@code item} variable determines if images in {@code pack/assets/minecraft/textures/item} should be resized. * <hr /> * <p>Examples: * <li>Dirt</li> * <li>Swords</li> * <li>Food</li> * </p> */ @BooleanDefaultValue(true) public static final boolean item = true; /** * The {@code map} variable determines if images in {@code pack/assets/minecraft/textures/map} should be resized. * <hr /> * <p>Examples: * <li>Map background texture</li> * <li>Map Icons</li> * </p> */ @BooleanDefaultValue(false) public static final boolean map = false; /** * The {@code misc} variable determines if images in {@code pack/assets/minecraft/textures/misc} should be resized. * <hr /> * <p>Examples: * <li>Underwater Texture</li> * <li>Pumkpin Overlay</li> * <li>Vignette Effect</li> * </p> */ @BooleanDefaultValue(false) public static final boolean misc = false; /** * The {@code mob_effect} variable determines if images in {@code pack/assets/minecraft/textures/mob_effect} should be resized. * <hr /> * <p>Effect icons such as: * <li>Regeneration</li> * <li>Slowness</li> * <li>Haste</li> * </p> */ @BooleanDefaultValue(false) public static final boolean mob_effect = false; /** * The {@code models} variable determines if images in {@code pack/assets/minecraft/textures/models} should be resized. * <hr /> * <p>All armor models (when worn), such as: * <li>Leather Armor</li> * <li>Iron Armor</li> * <li>Chainmail Armor</li> * </p> */ @BooleanDefaultValue(false) public static final boolean models = false; // i.e. armor /** * The {@code painting} variable determines if images in {@code pack/assets/minecraft/textures/painting} should be resized. * <hr /> * <p>Examples: * <li>Wanderer</li> * <li>Wither</li> * <li>Burning Skull</li> * </p> */ @BooleanDefaultValue(false) public static final boolean painting = false; /** * The {@code particle} variable determines if images in {@code pack/assets/minecraft/textures/particle} should be resized. * <hr /> * <p>All particles such as: * <li>Sword Sweep</li> * <li>Enchanting Table</li> * <li>Water Drip</li> * </p> */ @BooleanDefaultValue(false) public static final boolean particle = false; /** * The {@code name_out} variable represents the name of the output folder that contains the resized textures. * <p>If {@code name_out} is empty ("") or null (null), then the output folder will follow the name structure "{@code Resized Textures 16x16}", if {@code 16} is the {@link #size} of the output textures. * <hr /> * <p><strong>Naming conventions:</strong> Do not use any of the following characters in the {@code name_out} variable's value: * <li>Percent (%)</li> * <li>Ampersand (&)</li> * <li>Asterisk (*)</li> * <li>Angle brackets (<>)</li> * <li>Backslash (\)</li> * <li>Colon (:)</li> * <li>Question mark (?)</li> * <li>Slash (/)</li> * <li>Pipe (|)</li> * <li>Quotation mark (")</li> */ @StringDefaultValue("") public static final String name_out = ""; /** * The {@code pack} variable determines which resourcepack to take as an input from the resources folder. */ @StringDefaultValue("Default") public static final String pack = "Default"; /** * <em>Advanced users only:</em> * <p>The {@code pack_format} variable represents the "pack_format" that the pack.mcmeta file uses.</p> * <hr /> * <p>{@code pack_format} value uses by number: * <ul> * <li>{@code pack_format} = 1 is used for Java Editions 1.6 - 1.8</li> * <li>{@code pack_format} = 2 is used for Java Editions 1.9 - 1.10</li> * <li>{@code pack_format} = 3 is used for Java Editions 1.11 - 1.12</li> * <li>{@code pack_format} = 4 is used for Java Editions 1.13 - 1.14</li> * <li>{@code pack_format} = 5 is used for Java Edition 1.15</li> * </ul> */ @ShortRangeDefaultValue(value = 4, minimum = 1, maximum = 5) public static final short pack_format = 4; //// ====-====-====-====-====-====-====-====-====-====-====-====-====-====-====-==== //// // OTHER REQUISITES, DO NOT TOUCH! /** * <strong>DO NOT MODIFY</strong> * <p>The {@code scale} variable is used to sclae the {@code width} and {@code height} of images, if {@link #useScale} is {@code true}.</p> */ public static final double scale = 16.0/size; /** * <strong>DO NOT MODIFY</strong> * <p>The {@code timeInit} variable stores the initialization time of the program as a {@code long} type variable.</p> * <p>This is used to calculate the time that the program was active upon termination.</p> */ public static final long timeInit = System.currentTimeMillis(); /** * <strong>DO NOT MODIFY</strong> * <p>The {@code loc} variable stores the location of this project's source folder (Minecraft_Texture_Resizer) on this device's storage.</p> * <p>This is used to make absolute references to local resources, i.e. taking the input resourcepack, and the output images and resourcepack.</p> */ public static final String loc = new File("").getAbsolutePath().replace("\\", "/"); /** * <strong>DO NOT MODIFY :</strong> Although this is similar to many of the other optional settings, this must be false due to occasional issues with game crashes. * <p>The {@code colormap} variable determines if images in {@code pack/assets/minecraft/textures/colormap} should be resized.</p> * <hr /> * <p>Examples: * <ul> * <li>name1</li> * <li>name2</li> * </ul> * </p> */ public static final boolean colormap = false; /** * The {@code main} method of the {@code Minecraft_Texture_Resizer} program. * @param args * @throws IOException */ @SuppressWarnings("unused") public static void main(String[] args) throws IOException { if (size <= 0) throw new IllegalArgumentException("Value for size " + size + " is invalid. size must be an integer greater than 0. Values above 16 will not look very different."); // Size cannot be 0 or smaller // Find the output location String locOut; if (name_out != null && !name_out.equals("")) { locOut = loc + "\\src\\out\\" + name_out + "\\"; } else { locOut = loc + "\\src\\out\\Resized Textures " + size + "x" + size + "\\"; } locOut.replace('\\', '/'); if (!new File(locOut).exists()) { // if the folder named (name_out) doesn't exist in "(loc)/src/out/" new File(locOut).mkdirs(); // create it. } // Create all needed lists ArrayList<File> source_files = new ArrayList<File>(); ArrayList<String> imgs_loc = new ArrayList<String>(); ArrayList<BufferedImage> imgs_in = new ArrayList<BufferedImage>(); ArrayList<File> unused_files = new ArrayList<File>(); // used for non-image files, i.e. mcmeta files // Get the file from the root, in this case, (pack) source_files.add(new File(loc + "/src/resources/" + pack)); System.out.println(loc + "/src/resources/" + pack); // Recursively get all images into imgs_in (BufferedImage format) for (int i = 0; i < source_files.size(); i++) { // For each file in (source_files), File f = source_files.get(i); // Make an easily referenced variable if(f.isDirectory()) { // if (f) is a directory String s = f.getAbsolutePath().substring(loc.length()).replace('\\', '/'); // Get the pathname (to s) if (!block && s.endsWith("block")) { // If (s) ends with block (is the block directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed block."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!colormap && s.endsWith("colormap")) { // If (s) ends with colormap (is the colormap directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed colormap."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!effect && s.endsWith("effect")) { // If (s) ends with effect (is the effect directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed effect."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!entity && s.endsWith("entity")) { // If (s) ends with entity (is the entity directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed entity."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!environment && s.endsWith("environment")) { // If (s) ends with environment (is the environment directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed environment."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!font && s.endsWith("font")) { // If (s) ends with font (is the font directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed font."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!gui && s.endsWith("gui")) { // If (s) ends with gui (is the gui directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed gui."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!item && s.endsWith("item")) { // If (s) ends with item (is the item directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed item."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!map && s.endsWith("map")) { // If (s) ends with map (is the map directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed map."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!misc && s.endsWith("misc")) { // If (s) ends with misc (is the misc directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed misc."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!mob_effect && s.endsWith("mob_effect")) { // If (s) ends with mob_effect (is the mob_effect directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed mob_effect."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!models && s.endsWith("models")) { // If (s) ends with models (is the models directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed models."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!painting && s.endsWith("painting")) { // If (s) ends with painting (is the painting directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed painting."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } if (!particle && s.endsWith("particle")) { // If (s) ends with particle (is the particle directory) source_files.remove(i); // Remove it i--; // Go back one step to account for list shortening System.out.println("Removed particle."); // Inform the user continue; // Skip every other "if" statement; this has already been removed. Skip to the next iteration of the loop. } appendFileArrayToArrayList(source_files.remove(i).listFiles(), source_files); // Move contents from this directory into (source_files) i--; // Go back one step to account for list shortening continue; // Skip to the next iteration of the loop. } else { // if source_files.get(i) is NOT a directory (i.e. it's a file) try { // Prepare to catch an error when trying to BufferedImage img = ImageIO.read(source_files.get(i)); // read this file as an image if (img != null) { // If this is indeed an image, imgs_in.add(img); // Add this file to (imgs_in), as a BufferedImage type. (Create a BufferedImage from f, and add it to the list) imgs_loc.add(source_files.get(i).getAbsolutePath()); // Make sure to add the location of the image in the corresponding location in (imgs_loc) } else { // If this is not an image unused_files.add(source_files.get(i)); // This file is not an image, keep it for later source_files.get(i); // Get rid of this file } source_files.remove(i); // Get rid of this file from (source_files) i--; // Go back one step to account for list shortening continue; // Skip to the next iteration of the loop. } catch (IOException e) { // In case there was either an (IOException) e.printStackTrace(); i--; } } } // Create the (count) variable to represent the number of the total of files int count = imgs_in.size()+unused_files.size(); System.out.println("Number of images to resize: " + count); // Inform the user about the quantity of images collected for (int i = 0; i < imgs_in.size(); i++) { // For each image (BufferedImage) BufferedImage img_in = imgs_in.get(i); // Make an easily referenceable variable for this file (BufferedImage) BufferedImage img_out; // Make a variable to represent the output image (BufferedImage) // Resize the (img_in) and store it to (img_out) if (useScale) { // If (useScale) is true, int w = (int) (img_in.getWidth()/scale); // Create a variable that represents the width of the output image int h = (int) (img_in.getHeight()/scale); // Create a variable that represents the height of the output image img_out = resizeImage(img_in, (w <= 0 ? 1:w), (h <= 0 ? 1:h)); // Set (img_out) to the resized scaled version. } else { // If (useScale) is false, img_out = resizeImage(img_in, size, size); // Set (img_out) to the resized version. } // Prepare image for writing as a file to this device File outputfile = new File(locOut + imgs_loc.get(i).substring(imgs_loc.get(i).replace("\\\\", "/").lastIndexOf(pack)+pack.length())); // Using quadruple backslashes because the replace method needs to escape the backslashes inside the String. String dir = outputfile.getAbsolutePath().substring(0, outputfile.getAbsolutePath().lastIndexOf("\\")); // Using double backslashes because Java reads "\\" as "\" (the character '\' is an escape character). if (!new File(dir).exists()) new File(dir).mkdirs(); // If this is in a null directory (the folder the image will go in does not exist), create the directory ImageIO.write(img_out, "png", outputfile); // Write (img_out) to the disk in the location (outputFile) as a "png" type System.out.printf("%.2f", 100.0*i/count); // Inform the user what percentage of images have been resized System.out.println("% : " + imgs_loc.get(i).substring(imgs_loc.get(i).replace("\\\\", "/").lastIndexOf(pack)+pack.length()).replace("\\\\", "/")); // Inform the user which file has just been created. } for (int i = 0; i < unused_files.size(); i++) { File f = unused_files.get(i); if (f.getAbsolutePath().endsWith(".png.mcmeta")) { BufferedReader r = new BufferedReader(new FileReader(f)); // Make a (BufferdReader) that makes reading text from files easier ArrayList<String> content = new ArrayList<String>(); // Make a place to store the input text String s = r.readLine(); // Store the first line while (s != null) { // While there is actually text, content.add(s); // Add it to the list of current lines s = r.readLine(); // then get the next line } r.close(); // Done reading it, close it. File outputfile = new File(locOut + f.getAbsolutePath().substring(f.getAbsolutePath().replace("\\\\", "/").lastIndexOf(pack)+pack.length())); // Using quadruple backslashes because the replace method needs to escape the backslashes inside the String. String dir = outputfile.getAbsolutePath().substring(0, outputfile.getAbsolutePath().lastIndexOf("\\")); // Using double backslashes because Java reads "\\" as "\" (the character '\' is an escape character). if (!new File(dir).exists()) new File(dir).mkdirs(); // If this is in a null directory (the folder the image will go in does not exist), create the directory BufferedWriter outputWriter = new BufferedWriter(new FileWriter(outputfile)); // Create a text file writer (BufferedWriter) for (int j = 0; j < content.size(); j++) { // For each (String) in (content) outputWriter.write(content.get(j)); // Write this (String) to [pack.mcmeta] } // Done with the (BufferedWriter), close it. outputWriter.flush(); outputWriter.close(); unused_files.remove(i); i--; // COPY IT System.out.printf("%.2f", 100.0*(count-unused_files.size())/count); // Inform the user what percentage of images have been resized System.out.println("% : " + f.getAbsolutePath().substring(f.getAbsolutePath().replace("\\\\", "/").lastIndexOf(pack)+pack.length()).replace("\\\\", "/")); // Inform the user which file has just been created continue; } } // Copy pack.mcmeta String[] content = new String[] // Create the list of strings that represent the content of [pack.mcmeta] {"{", "\t\"pack\": {", "\t\t\"pack_format\": " + pack_format + ",", "\t\t\"description\": \"" + ("A Resized Resourcepack! Texture Resizer " + size + "x" + size + ", and did" + (useScale ? " ":" not ") + "use scaling.") + "\"", "\t}", "}"}; BufferedWriter outputWriter = new BufferedWriter(new FileWriter(new File(locOut + "pack.mcmeta"))); // Create a text file writer (BufferedWriter) for (int i = 0; i < content.length; i++) { // For each (String) in (content) outputWriter.write(content[i] + "\r\n"); // Write this (String) to [pack.mcmeta] } // Done with the (BufferedWriter), close it. outputWriter.flush(); outputWriter.close(); // Inform the user of the completion of writing [pack.mcmeta] System.out.println("100.00% : \\pack.mcmeta"); // Inform the user of the completion of the program. System.out.printf("Operation completed; took %.2f seconds.", ((System.currentTimeMillis() - timeInit)/1000.0)); } public static BufferedImage resizeImage(final Image image, int width, int height) { final BufferedImage bufferedImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB); final Graphics2D graphics2D = bufferedImage.createGraphics(); graphics2D.setComposite(AlphaComposite.Src); graphics2D.setRenderingHint(RenderingHints.KEY_INTERPOLATION,RenderingHints.VALUE_INTERPOLATION_BILINEAR); graphics2D.setRenderingHint(RenderingHints.KEY_RENDERING,RenderingHints.VALUE_RENDER_QUALITY); graphics2D.setRenderingHint(RenderingHints.KEY_ANTIALIASING,RenderingHints.VALUE_ANTIALIAS_ON); graphics2D.drawImage(image, 0, 0, width, height, null); graphics2D.dispose(); return bufferedImage; } public static void appendFileArrayToArrayList(File[] a, ArrayList<File> al) { for (int i = 0; i < a.length; i++) al.add(a[i]); } }
/** * Copyright © 2016-2021 The Thingsboard Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.thingsboard.server.common.transport.config.ssl; import org.thingsboard.server.common.data.StringUtils; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.TrustManagerFactory; import java.io.IOException; import java.security.GeneralSecurityException; import java.security.KeyStore; import java.security.KeyStore.PrivateKeyEntry; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.PublicKey; import java.security.UnrecoverableEntryException; import java.security.UnrecoverableKeyException; import java.security.cert.Certificate; import java.security.cert.X509Certificate; import java.util.Collections; import java.util.Enumeration; import java.util.HashSet; import java.util.Set; public abstract class AbstractSslCredentials implements SslCredentials { private char[] keyPasswordArray; private KeyStore keyStore; private PrivateKey privateKey; private PublicKey publicKey; private X509Certificate[] chain; private X509Certificate[] trusts; @Override public void init(boolean trustsOnly) throws IOException, GeneralSecurityException { String keyPassword = getKeyPassword(); if (StringUtils.isEmpty(keyPassword)) { this.keyPasswordArray = new char[0]; } else { this.keyPasswordArray = keyPassword.toCharArray(); } this.keyStore = this.loadKeyStore(trustsOnly, this.keyPasswordArray); Set<X509Certificate> trustedCerts = getTrustedCerts(this.keyStore); this.trusts = trustedCerts.toArray(new X509Certificate[0]); if (!trustsOnly) { PrivateKeyEntry privateKeyEntry = null; String keyAlias = this.getKeyAlias(); if (!StringUtils.isEmpty(keyAlias)) { privateKeyEntry = tryGetPrivateKeyEntry(this.keyStore, keyAlias, this.keyPasswordArray); } else { for (Enumeration<String> e = this.keyStore.aliases(); e.hasMoreElements(); ) { String alias = e.nextElement(); privateKeyEntry = tryGetPrivateKeyEntry(this.keyStore, alias, this.keyPasswordArray); if (privateKeyEntry != null) { this.updateKeyAlias(alias); break; } } } if (privateKeyEntry == null) { throw new IllegalArgumentException("Failed to get private key from the keystore or pem files. " + "Please check if the private key exists in the keystore or pem files and if the provided private key password is valid."); } this.chain = asX509Certificates(privateKeyEntry.getCertificateChain()); this.privateKey = privateKeyEntry.getPrivateKey(); if (this.chain.length > 0) { this.publicKey = this.chain[0].getPublicKey(); } } } @Override public KeyStore getKeyStore() { return this.keyStore; } @Override public PrivateKey getPrivateKey() { return this.privateKey; } @Override public PublicKey getPublicKey() { return this.publicKey; } @Override public X509Certificate[] getCertificateChain() { return this.chain; } @Override public X509Certificate[] getTrustedCertificates() { return this.trusts; } @Override public TrustManagerFactory createTrustManagerFactory() throws NoSuchAlgorithmException, KeyStoreException { TrustManagerFactory tmFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); tmFactory.init(this.keyStore); return tmFactory; } @Override public KeyManagerFactory createKeyManagerFactory() throws NoSuchAlgorithmException, UnrecoverableKeyException, KeyStoreException { KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); kmf.init(this.keyStore, this.keyPasswordArray); return kmf; } protected abstract boolean canUse(); protected abstract KeyStore loadKeyStore(boolean isPrivateKeyRequired, char[] keyPasswordArray) throws IOException, GeneralSecurityException; protected abstract void updateKeyAlias(String keyAlias); private static X509Certificate[] asX509Certificates(Certificate[] certificates) { if (null == certificates || 0 == certificates.length) { throw new IllegalArgumentException("certificates missing!"); } X509Certificate[] x509Certificates = new X509Certificate[certificates.length]; for (int index = 0; certificates.length > index; ++index) { if (null == certificates[index]) { throw new IllegalArgumentException("[" + index + "] is null!"); } try { x509Certificates[index] = (X509Certificate) certificates[index]; } catch (ClassCastException e) { throw new IllegalArgumentException("[" + index + "] is not a x509 certificate! Instead it's a " + certificates[index].getClass().getName()); } } return x509Certificates; } private static PrivateKeyEntry tryGetPrivateKeyEntry(KeyStore keyStore, String alias, char[] pwd) { PrivateKeyEntry entry = null; try { if (keyStore.entryInstanceOf(alias, KeyStore.PrivateKeyEntry.class)) { try { entry = (KeyStore.PrivateKeyEntry) keyStore .getEntry(alias, new KeyStore.PasswordProtection(pwd)); } catch (UnsupportedOperationException e) { PrivateKey key = (PrivateKey) keyStore.getKey(alias, pwd); Certificate[] certs = keyStore.getCertificateChain(alias); entry = new KeyStore.PrivateKeyEntry(key, certs); } } } catch (KeyStoreException | UnrecoverableEntryException | NoSuchAlgorithmException ignored) {} return entry; } private static Set<X509Certificate> getTrustedCerts(KeyStore ks) { Set<X509Certificate> set = new HashSet<>(); try { for (Enumeration<String> e = ks.aliases(); e.hasMoreElements(); ) { String alias = e.nextElement(); if (ks.isCertificateEntry(alias)) { Certificate cert = ks.getCertificate(alias); if (cert instanceof X509Certificate) { set.add((X509Certificate)cert); } } else if (ks.isKeyEntry(alias)) { Certificate[] certs = ks.getCertificateChain(alias); if ((certs != null) && (certs.length > 0) && (certs[0] instanceof X509Certificate)) { set.add((X509Certificate)certs[0]); } } } } catch (KeyStoreException ignored) {} return Collections.unmodifiableSet(set); } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.sunshine.sync; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.net.Uri; import android.support.annotation.NonNull; import com.example.android.sunshine.data.WeatherContract; import com.firebase.jobdispatcher.Constraint; import com.firebase.jobdispatcher.Driver; import com.firebase.jobdispatcher.FirebaseJobDispatcher; import com.firebase.jobdispatcher.GooglePlayDriver; import com.firebase.jobdispatcher.Job; import com.firebase.jobdispatcher.Lifetime; import com.firebase.jobdispatcher.Trigger; import java.util.concurrent.TimeUnit; public class SunshineSyncUtils { // c (10) Add constant values to sync Sunshine every 3 - 4 hours private static final int SYNC_INTERVAL_HOURS = 3; private static final int SYNC_INTERVAL_SECONDS = (int) TimeUnit.HOURS.toSeconds(SYNC_INTERVAL_HOURS); private static final int SYNC_FLEXTIME_SECONDS = SYNC_INTERVAL_SECONDS / 3; private static boolean sInitialized; // c (11) Add a sync tag to identify our sync job private static final String SUNSHINE_SYNC_TAG = "sunshine-sync"; // c (12) Create a method to schedule our periodic weather sync static void scheduleFirebaseJobDispatcherSync(@NonNull final Context context) { Driver driver = new GooglePlayDriver(context); FirebaseJobDispatcher dispatcher = new FirebaseJobDispatcher(driver); /* Create the Job to periodically sync Sunshine */ Job syncSunshineJob = dispatcher.newJobBuilder() /* The Service that will be used to sync Sunshine's data */ .setService(SunshineFirebaseJobService.class) /* Set the UNIQUE tag used to identify this Job */ .setTag(SUNSHINE_SYNC_TAG) /* * Network constraints on which this Job should run. We choose to run on any * network, but you can also choose to run only on un-metered networks or when the * device is charging. It might be a good idea to include a preference for this, * as some users may not want to download any data on their mobile plan. ($$$) */ .setConstraints(Constraint.ON_ANY_NETWORK) /* * setLifetime sets how long this job should persist. The options are to keep the * Job "forever" or to have it die the next time the device boots up. */ .setLifetime(Lifetime.FOREVER) /* * We want Sunshine's weather data to stay up to date, so we tell this Job to recur. */ .setRecurring(true) /* * We want the weather data to be synced every 3 to 4 hours. The first argument for * Trigger's static executionWindow method is the start of the time frame when the * sync should be performed. The second argument is the latest point in time at * which the data should be synced. Please note that this end time is not * guaranteed, but is more of a guideline for FirebaseJobDispatcher to go off of. */ .setTrigger(Trigger.executionWindow( SYNC_INTERVAL_SECONDS, SYNC_INTERVAL_SECONDS + SYNC_FLEXTIME_SECONDS)) /* * If a Job with the tag with provided already exists, this new job will replace * the old one. */ .setReplaceCurrent(true) /* Once the Job is ready, call the builder's build method to return the Job */ .build(); /* Schedule the Job with the dispatcher */ dispatcher.schedule(syncSunshineJob); } /** * Creates periodic sync tasks and checks to see if an immediate sync is required. If an * immediate sync is required, this method will take care of making sure that sync occurs. * * @param context Context that will be passed to other methods and used to access the * ContentResolver */ synchronized public static void initialize(@NonNull final Context context) { /* * Only perform initialization once per app lifetime. If initialization has already been * performed, we have nothing to do in this method. */ if (sInitialized) return; sInitialized = true; // c (13) Call the method you created to schedule a periodic weather sync scheduleFirebaseJobDispatcherSync(context); /* * We need to check to see if our ContentProvider has data to display in our forecast * list. However, performing a query on the main thread is a bad idea as this may * cause our UI to lag. Therefore, we create a thread in which we will run the query * to check the contents of our ContentProvider. */ Thread checkForEmpty = new Thread(new Runnable() { @Override public void run() { /* URI for every row of weather data in our weather table*/ Uri forecastQueryUri = WeatherContract.WeatherEntry.CONTENT_URI; /* * Since this query is going to be used only as a check to see if we have any * data (rather than to display data), we just need to PROJECT the ID of each * row. In our queries where we display data, we need to PROJECT more columns * to determine what weather details need to be displayed. */ String[] projectionColumns = {WeatherContract.WeatherEntry._ID}; String selectionStatement = WeatherContract.WeatherEntry .getSqlSelectForTodayOnwards(); /* Here, we perform the query to check to see if we have any weather data */ Cursor cursor = context.getContentResolver().query( forecastQueryUri, projectionColumns, selectionStatement, null, null); /* * A Cursor object can be null for various different reasons. A few are * listed below. * * 1) Invalid URI * 2) A certain ContentProvider's query method returns null * 3) A RemoteException was thrown. * * Bottom line, it is generally a good idea to check if a Cursor returned * from a ContentResolver is null. * * If the Cursor was null OR if it was empty, we need to sync immediately to * be able to display data to the user. */ if (null == cursor || cursor.getCount() == 0) { startImmediateSync(context); } /* Make sure to close the Cursor to avoid memory leaks! */ cursor.close(); } }); /* Finally, once the thread is prepared, fire it off to perform our checks. */ checkForEmpty.start(); } /** * Helper method to perform a sync immediately using an IntentService for asynchronous * execution. * * @param context The Context used to start the IntentService for the sync. */ public static void startImmediateSync(@NonNull final Context context) { Intent intentToSyncImmediately = new Intent(context, SunshineSyncIntentService.class); context.startService(intentToSyncImmediately); } }
package com.hedera.services.records; /*- * ‌ * Hedera Services Node * ​ * Copyright (C) 2018 - 2020 Hedera Hashgraph, LLC * ​ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ‍ */ import com.hedera.services.context.TransactionContext; import com.hedera.services.ledger.HederaLedger; import com.hedera.services.state.EntityCreator; import com.hedera.services.state.expiry.ExpiryManager; import com.hedera.services.state.merkle.MerkleAccount; import com.hedera.services.state.merkle.MerkleEntityId; import com.hederahashgraph.api.proto.java.TransactionRecord; import com.swirlds.fcmap.FCMap; import java.util.Optional; import java.util.function.Supplier; /** * Provides a {@link AccountRecordsHistorian} using the natural collaborators. * * @author Michael Tinker */ public class TxnAwareRecordsHistorian implements AccountRecordsHistorian { private HederaLedger ledger; private TransactionRecord lastCreatedRecord; private EntityCreator creator; private final RecordCache recordCache; private final ExpiryManager expiries; private final TransactionContext txnCtx; private final Supplier<FCMap<MerkleEntityId, MerkleAccount>> accounts; public TxnAwareRecordsHistorian( RecordCache recordCache, TransactionContext txnCtx, Supplier<FCMap<MerkleEntityId, MerkleAccount>> accounts, ExpiryManager expiries ) { this.expiries = expiries; this.txnCtx = txnCtx; this.accounts = accounts; this.recordCache = recordCache; } @Override public Optional<TransactionRecord> lastCreatedRecord() { return Optional.ofNullable(lastCreatedRecord); } @Override public void setCreator(EntityCreator creator) { this.creator = creator; } @Override public void setLedger(HederaLedger ledger) { this.ledger = ledger; } @Override public void addNewRecords() { lastCreatedRecord = txnCtx.recordSoFar(); long now = txnCtx.consensusTime().getEpochSecond(); long submittingMember = txnCtx.submittingSwirldsMember(); var accessor = txnCtx.accessor(); var payerRecord = creator.createExpiringRecord( txnCtx.effectivePayer(), lastCreatedRecord, now, submittingMember); recordCache.setPostConsensus( accessor.getTxnId(), lastCreatedRecord.getReceipt().getStatus(), payerRecord); } @Override public void purgeExpiredRecords() { expiries.purgeExpiredRecordsAt(txnCtx.consensusTime().getEpochSecond(), ledger); } @Override public void reviewExistingRecords() { expiries.restartTrackingFrom(accounts.get()); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.test.spring.boot; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.tuple; import org.flowable.cmmn.rest.service.api.repository.CaseDefinitionResponse; import org.flowable.common.rest.api.DataResponse; import org.flowable.content.rest.service.api.content.ContentItemResponse; import org.flowable.dmn.rest.service.api.repository.DmnDeploymentResponse; import org.flowable.rest.service.api.identity.GroupResponse; import org.flowable.rest.service.api.repository.FormDefinitionResponse; import org.flowable.rest.service.api.repository.ProcessDefinitionResponse; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.client.AutoConfigureWebClient; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.json.BasicJsonTester; import org.springframework.boot.test.web.client.TestRestTemplate; import org.springframework.boot.web.server.LocalServerPort; import org.springframework.core.ParameterizedTypeReference; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.test.context.junit4.SpringRunner; import flowable.Application; /** * @author Filip Hrisafov */ @RunWith(SpringRunner.class) @SpringBootTest(classes = Application.class, webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) @AutoConfigureWebClient(registerRestTemplate = true) public class RestApiApplicationTest { @Autowired private TestRestTemplate restTemplate; @LocalServerPort private int serverPort; @Test public void testRestApiIntegration() { String processDefinitionsUrl = "http://localhost:" + serverPort + "/process-api/repository/process-definitions"; ResponseEntity<DataResponse<ProcessDefinitionResponse>> response = restTemplate .exchange(processDefinitionsUrl, HttpMethod.GET, null, new ParameterizedTypeReference<DataResponse<ProcessDefinitionResponse>>() { }); assertThat(response.getStatusCode()) .as("Status code") .isEqualTo(HttpStatus.OK); DataResponse<ProcessDefinitionResponse> processDefinitions = response.getBody(); assertThat(processDefinitions).isNotNull(); assertThat(processDefinitions.getTotal()).isEqualTo(1); assertThat(processDefinitions.getData()) .extracting(ProcessDefinitionResponse::getKey, ProcessDefinitionResponse::getUrl) .containsExactlyInAnyOrder( tuple("dogeProcess", "http://localhost:" + serverPort + "/process-api/repository/process-definitions/dogeProcess:1:7") ); } @Test public void testCmmnRestApiIntegration() { String processDefinitionsUrl = "http://localhost:" + serverPort + "/cmmn-api/cmmn-repository/case-definitions"; ResponseEntity<DataResponse<CaseDefinitionResponse>> response = restTemplate .exchange(processDefinitionsUrl, HttpMethod.GET, null, new ParameterizedTypeReference<DataResponse<CaseDefinitionResponse>>() { }); assertThat(response.getStatusCode()) .as("Status code") .isEqualTo(HttpStatus.OK); DataResponse<CaseDefinitionResponse> caseDefinitions = response.getBody(); assertThat(caseDefinitions).isNotNull(); assertThat(caseDefinitions.getData()) .extracting(CaseDefinitionResponse::getKey, CaseDefinitionResponse::getUrl) .containsExactlyInAnyOrder( tuple("case1", "http://localhost:" + serverPort + "/cmmn-api/cmmn-repository/case-definitions/3") ); assertThat(caseDefinitions.getTotal()).isEqualTo(1); } @Test public void testCmmnRestApiIntegrationNotFound() { String processDefinitionsUrl = "http://localhost:" + serverPort + "/cmmn-api/cmmn-repository/case-definitions/does-not-exist"; ResponseEntity<String> response = restTemplate.getForEntity(processDefinitionsUrl, String.class); BasicJsonTester jsonTester = new BasicJsonTester(getClass()); assertThat(jsonTester.from(response.getBody())).isEqualToJson("{" + "\"message\": \"Not found\"," + "\"exception\": \"no deployed case definition found with id 'does-not-exist'\"" + "}"); assertThat(response.getStatusCode()) .as("Status code") .isEqualTo(HttpStatus.NOT_FOUND); } @Test public void testContentRestApiIntegration() { String processDefinitionsUrl = "http://localhost:" + serverPort + "/content-api/content-service/content-items"; ResponseEntity<DataResponse<ContentItemResponse>> response = restTemplate .exchange(processDefinitionsUrl, HttpMethod.GET, null, new ParameterizedTypeReference<DataResponse<ContentItemResponse>>() { }); assertThat(response.getStatusCode()) .as("Status code") .isEqualTo(HttpStatus.OK); DataResponse<ContentItemResponse> contentItems = response.getBody(); assertThat(contentItems).isNotNull(); assertThat(contentItems.getData()) .isEmpty(); assertThat(contentItems.getTotal()).isEqualTo(0); } @Test public void testDmnRestApiIntegration() { String processDefinitionsUrl = "http://localhost:" + serverPort + "/dmn-api/dmn-repository/deployments"; ResponseEntity<DataResponse<DmnDeploymentResponse>> response = restTemplate .exchange(processDefinitionsUrl, HttpMethod.GET, null, new ParameterizedTypeReference<DataResponse<DmnDeploymentResponse>>() { }); assertThat(response.getStatusCode()) .as("Status code") .isEqualTo(HttpStatus.OK); DataResponse<DmnDeploymentResponse> deployments = response.getBody(); assertThat(deployments).isNotNull(); assertThat(deployments.getData()) .isEmpty(); assertThat(deployments.getTotal()).isEqualTo(0); } @Test public void testFormRestApiIntegration() { String processDefinitionsUrl = "http://localhost:" + serverPort + "/form-api/form-repository/form-definitions"; ResponseEntity<DataResponse<FormDefinitionResponse>> response = restTemplate .exchange(processDefinitionsUrl, HttpMethod.GET, null, new ParameterizedTypeReference<DataResponse<FormDefinitionResponse>>() { }); assertThat(response.getStatusCode()) .as("Status code") .isEqualTo(HttpStatus.OK); DataResponse<FormDefinitionResponse> formDefinitions = response.getBody(); assertThat(formDefinitions).isNotNull(); assertThat(formDefinitions.getData()) .isEmpty(); assertThat(formDefinitions.getTotal()).isEqualTo(0); } @Test public void testIdmRestApiIntegration() { String processDefinitionsUrl = "http://localhost:" + serverPort + "/idm-api/groups"; ResponseEntity<DataResponse<GroupResponse>> response = restTemplate .exchange(processDefinitionsUrl, HttpMethod.GET, null, new ParameterizedTypeReference<DataResponse<GroupResponse>>() { }); assertThat(response.getStatusCode()) .as("Status code") .isEqualTo(HttpStatus.OK); DataResponse<GroupResponse> groups = response.getBody(); assertThat(groups).isNotNull(); assertThat(groups.getData()) .extracting(GroupResponse::getId, GroupResponse::getType, GroupResponse::getName, GroupResponse::getUrl) .containsExactlyInAnyOrder( tuple("user", "security-role", "users", null) ); assertThat(groups.getTotal()).isEqualTo(1); } }
/* * Copyright 2011 Google Inc. * Copyright 2014 Andreas Schildbach * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bitcoinj.core; import com.google.common.base.Objects; import org.bitcoinj.script.*; import org.bitcoinj.wallet.Wallet; import org.slf4j.*; import javax.annotation.*; import java.io.*; import java.util.*; import static com.google.common.base.Preconditions.*; /** * <p>A TransactionOutput message contains a scriptPubKey that controls who is able to spend its value. It is a sub-part * of the Transaction message.</p> * * <p>Instances of this class are not safe for use by multiple threads.</p> */ public class TransactionOutput extends ChildMessage { private static final Logger log = LoggerFactory.getLogger(TransactionOutput.class); // The output's value is kept as a native type in order to save class instances. private long value; // A transaction output has a script used for authenticating that the redeemer is allowed to spend // this output. private byte[] scriptBytes; // The script bytes are parsed and turned into a Script on demand. private Script scriptPubKey; // These fields are not Bitcoin serialized. They are used for tracking purposes in our wallet // only. If set to true, this output is counted towards our balance. If false and spentBy is null the tx output // was owned by us and was sent to somebody else. If false and spentBy is set it means this output was owned by // us and used in one of our own transactions (eg, because it is a change output). private boolean availableForSpending; @Nullable private TransactionInput spentBy; private int scriptLen; /** * Deserializes a transaction output message. This is usually part of a transaction message. */ public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, byte[] payload, int offset) throws ProtocolException { super(params, payload, offset); setParent(parent); availableForSpending = true; } /** * Deserializes a transaction output message. This is usually part of a transaction message. * * @param params NetworkParameters object. * @param payload Bitcoin protocol formatted byte array containing message content. * @param offset The location of the first payload byte within the array. * @param serializer the serializer to use for this message. * @throws ProtocolException */ public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, byte[] payload, int offset, MessageSerializer serializer) throws ProtocolException { super(params, payload, offset, parent, serializer, UNKNOWN_LENGTH); availableForSpending = true; } /** * Creates an output that sends 'value' to the given address (public key hash). The amount should be created with * something like {@link Coin#valueOf(int, int)}. Typically you would use * {@link Transaction#addOutput(Coin, Address)} instead of creating a TransactionOutput directly. */ public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, Coin value, Address to) { this(params, parent, value, ScriptBuilder.createOutputScript(to).getProgram()); } /** * Creates an output that sends 'value' to the given public key using a simple CHECKSIG script (no addresses). The * amount should be created with something like {@link Coin#valueOf(int, int)}. Typically you would use * {@link Transaction#addOutput(Coin, ECKey)} instead of creating an output directly. */ public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, Coin value, ECKey to) { this(params, parent, value, ScriptBuilder.createOutputScript(to).getProgram()); } public TransactionOutput(NetworkParameters params, @Nullable Transaction parent, Coin value, byte[] scriptBytes) { super(params); // Negative values obviously make no sense, except for -1 which is used as a sentinel value when calculating // SIGHASH_SINGLE signatures, so unfortunately we have to allow that here. checkArgument(value.signum() >= 0 || value.equals(Coin.NEGATIVE_SATOSHI), "Negative values not allowed"); checkArgument(!params.hasMaxMoney() || value.compareTo(params.getMaxMoney()) <= 0, "Values larger than MAX_MONEY not allowed"); this.value = value.value; this.scriptBytes = scriptBytes; setParent(parent); availableForSpending = true; length = 8 + VarInt.sizeOf(scriptBytes.length) + scriptBytes.length; } public Script getScriptPubKey() throws ScriptException { if (scriptPubKey == null) { scriptPubKey = new Script(scriptBytes); } return scriptPubKey; } /** * <p>If the output script pays to an address as in <a href="https://bitcoin.org/en/developer-guide#term-p2pkh"> * P2PKH</a>, return the address of the receiver, i.e., a base58 encoded hash of the public key in the script. </p> * * @param networkParameters needed to specify an address * @return null, if the output script is not the form <i>OP_DUP OP_HASH160 <PubkeyHash> OP_EQUALVERIFY OP_CHECKSIG</i>, * i.e., not P2PKH * @return an address made out of the public key hash */ @Nullable public Address getAddressFromP2PKHScript(NetworkParameters networkParameters) throws ScriptException{ if (getScriptPubKey().isSentToAddress()) return getScriptPubKey().getToAddress(networkParameters); return null; } /** * <p>If the output script pays to a redeem script, return the address of the redeem script as described by, * i.e., a base58 encoding of [one-byte version][20-byte hash][4-byte checksum], where the 20-byte hash refers to * the redeem script.</p> * * <p>P2SH is described by <a href="https://github.com/bitcoin/bips/blob/master/bip-0016.mediawiki">BIP 16</a> and * <a href="https://bitcoin.org/en/developer-guide#p2sh-scripts">documented in the Bitcoin Developer Guide</a>.</p> * * @param networkParameters needed to specify an address * @return null if the output script does not pay to a script hash * @return an address that belongs to the redeem script */ @Nullable public Address getAddressFromP2SH(NetworkParameters networkParameters) throws ScriptException{ if (getScriptPubKey().isPayToScriptHash()) return getScriptPubKey().getToAddress(networkParameters); return null; } @Override protected void parse() throws ProtocolException { value = readInt64(); scriptLen = (int) readVarInt(); length = cursor - offset + scriptLen; scriptBytes = readBytes(scriptLen); } @Override protected void bitcoinSerializeToStream(OutputStream stream) throws IOException { checkNotNull(scriptBytes); Utils.int64ToByteStreamLE(value, stream); // TODO: Move script serialization into the Script class, where it belongs. stream.write(new VarInt(scriptBytes.length).encode()); stream.write(scriptBytes); } /** * Returns the value of this output. This is the amount of currency that the destination address * receives. */ public Coin getValue() { try { return Coin.valueOf(value); } catch (IllegalArgumentException e) { throw new IllegalStateException(e.getMessage(), e); } } /** * Sets the value of this output. */ public void setValue(Coin value) { checkNotNull(value); unCache(); this.value = value.value; } /** * Gets the index of this output in the parent transaction, or throws if this output is free standing. Iterates * over the parents list to discover this. */ public int getIndex() { List<TransactionOutput> outputs = getParentTransaction().getOutputs(); for (int i = 0; i < outputs.size(); i++) { if (outputs.get(i) == this) return i; } throw new IllegalStateException("Output linked to wrong parent transaction?"); } /** * Will this transaction be relayable and mined by default miners? */ public boolean isDust() { // Transactions that are OP_RETURN can't be dust regardless of their value. if (getScriptPubKey().isOpReturn()) return false; return getValue().isLessThan(getMinNonDustValue()); } /** * <p>Gets the minimum value for a txout of this size to be considered non-dust by Bitcoin Core * (and thus relayed). See: CTxOut::IsDust() in Bitcoin Core. The assumption is that any output that would * consume more than a third of its value in fees is not something the Bitcoin system wants to deal with right now, * so we call them "dust outputs" and they're made non standard. The choice of one third is somewhat arbitrary and * may change in future.</p> * * <p>You probably should use {@link org.bitcoinj.core.TransactionOutput#getMinNonDustValue()} which uses * a safe fee-per-kb by default.</p> * * @param feePerKb The fee required per kilobyte. Note that this is the same as Bitcoin Core's -minrelaytxfee * 3 */ public Coin getMinNonDustValue(Coin feePerKb) { // A typical output is 33 bytes (pubkey hash + opcodes) and requires an input of 148 bytes to spend so we add // that together to find out the total amount of data used to transfer this amount of value. Note that this // formula is wrong for anything that's not a pay-to-address output, unfortunately, we must follow Bitcoin Core's // wrongness in order to ensure we're considered standard. A better formula would either estimate the // size of data needed to satisfy all different script types, or just hard code 33 below. final long size = this.unsafeBitcoinSerialize().length + 148; return feePerKb.multiply(size).divide(1000); } /** * Returns the minimum value for this output to be considered "not dust", i.e. the transaction will be relayable * and mined by default miners. For normal pay to address outputs, this is 2730 satoshis, the same as * {@link Transaction#MIN_NONDUST_OUTPUT}. */ public Coin getMinNonDustValue() { return getMinNonDustValue(Transaction.REFERENCE_DEFAULT_MIN_TX_FEE.multiply(3)); } /** * Sets this objects availableForSpending flag to false and the spentBy pointer to the given input. * If the input is null, it means this output was signed over to somebody else rather than one of our own keys. * @throws IllegalStateException if the transaction was already marked as spent. */ public void markAsSpent(TransactionInput input) { checkState(availableForSpending); availableForSpending = false; spentBy = input; if (parent != null) if (log.isDebugEnabled()) log.debug("Marked {}:{} as spent by {}", getParentTransactionHash(), getIndex(), input); else if (log.isDebugEnabled()) log.debug("Marked floating output as spent by {}", input); } /** * Resets the spent pointer / availableForSpending flag to null. */ public void markAsUnspent() { if (parent != null) if (log.isDebugEnabled()) log.debug("Un-marked {}:{} as spent by {}", getParentTransactionHash(), getIndex(), spentBy); else if (log.isDebugEnabled()) log.debug("Un-marked floating output as spent by {}", spentBy); availableForSpending = true; spentBy = null; } /** * Returns whether {@link TransactionOutput#markAsSpent(TransactionInput)} has been called on this class. A * {@link Wallet} will mark a transaction output as spent once it sees a transaction input that is connected to it. * Note that this flag can be false when an output has in fact been spent according to the rest of the network if * the spending transaction wasn't downloaded yet, and it can be marked as spent when in reality the rest of the * network believes it to be unspent if the signature or script connecting to it was not actually valid. */ public boolean isAvailableForSpending() { return availableForSpending; } /** * The backing script bytes which can be turned into a Script object. * @return the scriptBytes */ public byte[] getScriptBytes() { return scriptBytes; } /** * Returns true if this output is to a key in the wallet or to an address/script we are watching. */ public boolean isMineOrWatched(TransactionBag transactionBag) { return isMine(transactionBag) || isWatched(transactionBag); } /** * Returns true if this output is to a key, or an address we have the keys for, in the wallet. */ public boolean isWatched(TransactionBag transactionBag) { try { Script script = getScriptPubKey(); return transactionBag.isWatchedScript(script); } catch (ScriptException e) { // Just means we didn't understand the output of this transaction: ignore it. log.debug("Could not parse tx output script: {}", e.toString()); return false; } } /** * Returns true if this output is to a key, or an address we have the keys for, in the wallet. */ public boolean isMine(TransactionBag transactionBag) { try { Script script = getScriptPubKey(); if (script.isSentToRawPubKey()) { byte[] pubkey = script.getPubKey(); return transactionBag.isPubKeyMine(pubkey); } if (script.isPayToScriptHash()) { return transactionBag.isPayToScriptHashMine(script.getPubKeyHash()); } else { byte[] pubkeyHash = script.getPubKeyHash(); return transactionBag.isPubKeyHashMine(pubkeyHash); } } catch (ScriptException e) { // Just means we didn't understand the output of this transaction: ignore it. log.debug("Could not parse tx {} output script: {}", parent != null ? parent.getHash() : "(no parent)", e.toString()); return false; } } /** * Returns a human readable debug string. */ @Override public String toString() { try { Script script = getScriptPubKey(); StringBuilder buf = new StringBuilder("TxOut of "); buf.append(Coin.valueOf(value).toFriendlyString()); if (script.isSentToAddress() || script.isPayToScriptHash()) buf.append(" to ").append(script.getToAddress(params)); else if (script.isSentToRawPubKey()) buf.append(" to pubkey ").append(Utils.HEX.encode(script.getPubKey())); else if (script.isSentToMultiSig()) buf.append(" to multisig"); else buf.append(" (unknown type)"); buf.append(" script:").append(script); return buf.toString(); } catch (ScriptException e) { throw new RuntimeException(e); } } /** * Returns the connected input. */ @Nullable public TransactionInput getSpentBy() { return spentBy; } /** * Returns the transaction that owns this output. */ @Nullable public Transaction getParentTransaction() { return (Transaction)parent; } /** * Returns the transaction hash that owns this output. */ @Nullable public Sha256Hash getParentTransactionHash() { return parent == null ? null : parent.getHash(); } /** * Returns the depth in blocks of the parent tx. * * <p>If the transaction appears in the top block, the depth is one. If it's anything else (pending, dead, unknown) * then -1.</p> * @return The tx depth or -1. */ public int getParentTransactionDepthInBlocks() { if (getParentTransaction() != null) { TransactionConfidence confidence = getParentTransaction().getConfidence(); if (confidence.getConfidenceType() == TransactionConfidence.ConfidenceType.BUILDING) { return confidence.getDepthInBlocks(); } } return -1; } /** * Returns a new {@link TransactionOutPoint}, which is essentially a structure pointing to this output. * Requires that this output is not detached. */ public TransactionOutPoint getOutPointFor() { return new TransactionOutPoint(params, getIndex(), getParentTransaction()); } /** Returns a copy of the output detached from its containing transaction, if need be. */ public TransactionOutput duplicateDetached() { return new TransactionOutput(params, null, Coin.valueOf(value), org.bouncycastle.util.Arrays.clone(scriptBytes)); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TransactionOutput other = (TransactionOutput) o; return value == other.value && (parent == null || (parent == other.parent && getIndex() == other.getIndex())) && Arrays.equals(scriptBytes, other.scriptBytes); } @Override public int hashCode() { return Objects.hashCode(value, parent, Arrays.hashCode(scriptBytes)); } public boolean isOpReturn() { return getScriptPubKey() != null && getScriptPubKey().isOpReturn(); } public byte[] getOpReturnData() { if (isOpReturn()) { return getScriptPubKey().getChunks().get(1).data; } return null; } }
package com.yinglan.scg.orderreceiving; import android.content.Intent; import android.view.View; import android.widget.LinearLayout; import android.widget.TextView; import com.common.cklibrary.common.BaseActivity; import com.common.cklibrary.common.BindView; import com.common.cklibrary.common.ViewInject; import com.common.cklibrary.utils.ActivityTitleUtils; import com.common.cklibrary.utils.DataUtil; import com.common.cklibrary.utils.JsonUtil; import com.common.cklibrary.utils.myview.WebViewLayout; import com.common.cklibrary.utils.rx.MsgEvent; import com.common.cklibrary.utils.rx.RxBus; import com.kymjs.common.StringUtils; import com.yinglan.scg.R; import com.yinglan.scg.entity.orderreceiving.CharterDetailsBean; import com.yinglan.scg.entity.orderreceiving.TransferDetailsBean.DataBean.ModelListBean; import com.yinglan.scg.loginregister.LoginActivity; import com.yinglan.scg.orderreceiving.dialog.OrderReceivingDialog; import com.yinglan.scg.orderreceiving.dialog.SelectVehicleDialog; import com.yinglan.scg.orderreceiving.dialog.UnwillingnessTakeOrdersDialog; import java.util.List; import cn.bingoogolapple.titlebar.BGATitleBar; /** * 包车订单详情 */ public class CharterDetailsActivity extends BaseActivity implements CharterDetailsContract.View { @BindView(id = R.id.tv_title) private TextView tv_title; @BindView(id = R.id.tv_orderPrice) private TextView tv_orderPrice; @BindView(id = R.id.tv_demand) private TextView tv_demand; @BindView(id = R.id.tv_time) private TextView tv_time; @BindView(id = R.id.tv_serviceTime) private TextView tv_serviceTime; @BindView(id = R.id.tv_placeDeparture) private TextView tv_placeDeparture; @BindView(id = R.id.tv_deliveredAirport) private TextView tv_deliveredAirport; @BindView(id = R.id.tv_reserveRequirements) private TextView tv_reserveRequirements; @BindView(id = R.id.tv_orderNumber) private TextView tv_orderNumber; @BindView(id = R.id.web_dueThat) private WebViewLayout web_dueThat; // @BindView(id = R.id.clv_income) // private ChildListView clv_income; @BindView(id = R.id.tv_orderIncome) private TextView tv_orderIncome; @BindView(id = R.id.tv_aggregate) private TextView tv_aggregate; @BindView(id = R.id.web_descriptionThat) private WebViewLayout web_descriptionThat; @BindView(id = R.id.tv_licensePlateNumber) private TextView tv_licensePlateNumber; @BindView(id = R.id.tv_models) private TextView tv_models; @BindView(id = R.id.tv_selectVehicle, click = true) private TextView tv_selectVehicle; @BindView(id = R.id.tv_quickOrder, click = true) private TextView tv_quickOrder; @BindView(id = R.id.tv_endTheOrder) private TextView tv_endTheOrder; @BindView(id = R.id.ll_bottom) private LinearLayout ll_bottom; private String order_number; private int model_id = 0; private SelectVehicleDialog selectVehicleDialog = null; private UnwillingnessTakeOrdersDialog unwillingnessTakeOrdersDialog = null; private OrderReceivingDialog orderReceivingDialog = null; @Override public void setRootView() { setContentView(R.layout.activity_charterdetails); } @Override public void initData() { super.initData(); mPresenter = new CharterDetailsPresenter(this); order_number = getIntent().getStringExtra("order_number"); initDialog(); initDialog1(); showLoadingDialog(getString(R.string.dataLoad)); ((CharterDetailsContract.Presenter) mPresenter).getTravelOrderDetails(order_number); } private void initDialog1() { orderReceivingDialog = new OrderReceivingDialog(this) { @Override public void toDetermine() { quickOrder(); } }; } private void quickOrder() { showLoadingDialog(getString(R.string.submissionLoad)); ((CharterDetailsContract.Presenter) mPresenter).postGuideSubmitOrder(model_id, order_number); } private void initDialog() { unwillingnessTakeOrdersDialog = new UnwillingnessTakeOrdersDialog(this, order_number); } @Override public void initWidget() { super.initWidget(); BGATitleBar.SimpleDelegate simpleDelegate = new BGATitleBar.SimpleDelegate() { @Override public void onClickLeftCtv() { super.onClickLeftCtv(); if (getIntent().getIntExtra("type", 0) == 1) { finish(); return; } if (unwillingnessTakeOrdersDialog == null) { initDialog(); } if (unwillingnessTakeOrdersDialog != null && !unwillingnessTakeOrdersDialog.isShowing()) { unwillingnessTakeOrdersDialog.show(); } } }; ActivityTitleUtils.initToolbar(aty, "", "", R.id.titlebar, simpleDelegate); web_dueThat.setTitleVisibility(false); web_dueThat.getWebView().setLayoutParams(new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT)); web_descriptionThat.setTitleVisibility(false); web_descriptionThat.getWebView().setLayoutParams(new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT)); tv_quickOrder.setVisibility(View.VISIBLE); tv_endTheOrder.setVisibility(View.GONE); if (getIntent().getIntExtra("type", 0) == 1) { ll_bottom.setVisibility(View.GONE); } } @Override public void widgetClick(View v) { super.widgetClick(v); switch (v.getId()) { case R.id.tv_selectVehicle: selectVehicleDialog.show(); break; case R.id.tv_quickOrder: if (orderReceivingDialog == null) { initDialog(); } if (orderReceivingDialog != null && !orderReceivingDialog.isShowing()) { orderReceivingDialog.show(); } break; } } @Override public void setPresenter(CharterDetailsContract.Presenter presenter) { mPresenter = presenter; } @Override public void getSuccess(String success, int flag) { dismissLoadingDialog(); if (flag == 0) { CharterDetailsBean charterDetailsBean = (CharterDetailsBean) JsonUtil.getInstance().json2Obj(success, CharterDetailsBean.class); if (charterDetailsBean == null || charterDetailsBean.getData() == null) { errorMsg(getString(R.string.serverError), 0); return; } tv_title.setText(charterDetailsBean.getData().getTitle()); tv_orderPrice.setText(getString(R.string.renminbi) + charterDetailsBean.getData().getOrder_price()); tv_demand.setText(charterDetailsBean.getData().getSubtitle()); tv_time.setText(DataUtil.formatData(StringUtils.toLong(charterDetailsBean.getData().getStart_time()), "yyyy-MM-dd E")); tv_serviceTime.setText(DataUtil.formatData(StringUtils.toLong(charterDetailsBean.getData().getStart_time()), "yyyy-MM-dd E")); tv_placeDeparture.setText(charterDetailsBean.getData().getOrigin_name()); tv_deliveredAirport.setText(charterDetailsBean.getData().getDestination_name()); tv_reserveRequirements.setText(charterDetailsBean.getData().getBooking_request()); tv_orderNumber.setText(charterDetailsBean.getData().getOrder_number()); tv_orderIncome.setText(getString(R.string.rmb) + " " + charterDetailsBean.getData().getOrder_price()); tv_aggregate.setText(getString(R.string.rmb) + " " + charterDetailsBean.getData().getOrder_price()); if (StringUtils.isEmpty(charterDetailsBean.getData().getBook_comment())) { charterDetailsBean.getData().setBook_comment(getString(R.string.dueThat1)); } String book_comment = "<!DOCTYPE html><html lang=\"zh\"><head>\t<meta charset=\"UTF-8\"/><meta name=\"viewport\" content=\"width=device-width,initial-scale=1,minimum-scale=1,maximum-scale=1,user-scalable=no\" /><title></title></head><body>" + charterDetailsBean.getData().getBook_comment() + "</body></html>"; web_dueThat.loadDataWithBaseURL("", book_comment, "text/html", "utf-8", null); if (StringUtils.isEmpty(charterDetailsBean.getData().getPrice_comment())) { charterDetailsBean.getData().setPrice_comment(getString(R.string.descriptionThat1)); } String price_description = "<!DOCTYPE html><html lang=\"zh\"><head>\t<meta charset=\"UTF-8\"/><meta name=\"viewport\" content=\"width=device-width,initial-scale=1,minimum-scale=1,maximum-scale=1,user-scalable=no\" /><title></title></head><body>" + charterDetailsBean.getData().getPrice_comment() + "</body></html>"; web_descriptionThat.loadDataWithBaseURL("", price_description, "text/html", "utf-8", null); if (charterDetailsBean.getData() != null && charterDetailsBean.getData().getModel_list() != null && charterDetailsBean.getData().getModel_list().size() == 1) { tv_licensePlateNumber.setText(charterDetailsBean.getData().getModel_list().get(0).getLicense_plate()); tv_models.setText(charterDetailsBean.getData().getModel_list().get(0).getModel_name()); model_id = charterDetailsBean.getData().getModel_list().get(0).getId(); tv_selectVehicle.setVisibility(View.GONE); } if (charterDetailsBean.getData() != null && charterDetailsBean.getData().getModel_list() != null && charterDetailsBean.getData().getModel_list().size() > 1) { tv_selectVehicle.setVisibility(View.VISIBLE); setDialog(charterDetailsBean.getData().getModel_list()); } } else if (flag == 1) { /** * 发送消息 */ RxBus.getInstance().post(new MsgEvent<String>("RxBusOrderReceivingEvent")); ViewInject.toast(getString(R.string.orderReceivedSuccessfully)); Intent intent = new Intent(); // 获取内容 setResult(RESULT_OK, intent); finish(); } } private void setDialog(List<ModelListBean> list) { for (int i = 0; i < list.size(); i++) { if (list.get(i).getIs_default() == 1) { tv_licensePlateNumber.setText(list.get(i).getLicense_plate()); tv_models.setText(list.get(i).getModel_name()); model_id = list.get(i).getId(); break; } } selectVehicleDialog = new SelectVehicleDialog(this, list) { @Override public void getModel(ModelListBean bean) { model_id = bean.getId(); tv_licensePlateNumber.setText(bean.getLicense_plate()); tv_models.setText(bean.getModel_name()); } }; // selectVehicleDialog.setList(list); } @Override public void errorMsg(String msg, int flag) { dismissLoadingDialog(); if (isLogin(msg)) { showActivity(aty, LoginActivity.class); if (flag == 0) { finish(); } return; } ViewInject.toast(msg); } @Override protected void onDestroy() { super.onDestroy(); if (selectVehicleDialog != null) { selectVehicleDialog.cancel(); } selectVehicleDialog = null; if (unwillingnessTakeOrdersDialog != null) { unwillingnessTakeOrdersDialog.cancel(); } unwillingnessTakeOrdersDialog = null; if (orderReceivingDialog != null) { orderReceivingDialog.cancel(); } orderReceivingDialog = null; } }
package com.chenenyu.router.chain; import android.content.Intent; import android.support.annotation.NonNull; import com.chenenyu.router.AptHub; import com.chenenyu.router.MatcherRegistry; import com.chenenyu.router.RealInterceptorChain; import com.chenenyu.router.RouteInterceptor; import com.chenenyu.router.RouteRequest; import com.chenenyu.router.RouteResponse; import com.chenenyu.router.RouteStatus; import com.chenenyu.router.matcher.AbsImplicitMatcher; import com.chenenyu.router.matcher.AbsMatcher; import com.chenenyu.router.util.RLog; import java.util.List; import java.util.Map; import java.util.Set; /** * Created by chenenyu on 2018/6/15. */ public class IntentProcessor implements RouteInterceptor { @NonNull @Override public RouteResponse intercept(Chain chain) { RealInterceptorChain realChain = (RealInterceptorChain) chain; RouteRequest request = chain.getRequest(); List<AbsMatcher> matcherList = MatcherRegistry.getMatcher(); List<AbsImplicitMatcher> implicitMatcherList = MatcherRegistry.getImplicitMatcher(); Set<Map.Entry<String, Class<?>>> entries = AptHub.routeTable.entrySet(); boolean hasIntent = false; if (AptHub.routeTable.isEmpty()) { for (AbsImplicitMatcher implicitMatcher : implicitMatcherList) { if (implicitMatcher.match(chain.getContext(), request.getUri(), null, request)) { RLog.i(String.format("{uri=%s, matcher=%s}", chain.getRequest().getUri(), implicitMatcher.getClass().getCanonicalName())); realChain.setTargetClass(null); Object result = implicitMatcher.generate(chain.getContext(), request.getUri(), null); if (result instanceof Intent) { hasIntent = true; realChain.setTargetObject(result); } else { return RouteResponse.assemble(RouteStatus.FAILED, String.format( "The matcher can't generate an intent for uri: %s", request.getUri().toString())); } break; } } } else { MATCHER: for (AbsMatcher matcher : matcherList) { boolean isImplicit = matcher instanceof AbsImplicitMatcher; if (isImplicit) { if (matcher.match(chain.getContext(), request.getUri(), null, request)) { RLog.i(String.format("{uri=%s, matcher=%s}", chain.getRequest().getUri(), matcher.getClass().getCanonicalName())); realChain.setTargetClass(null); Object result = matcher.generate(chain.getContext(), request.getUri(), null); if (result instanceof Intent) { hasIntent = true; realChain.setTargetObject(result); } else { return RouteResponse.assemble(RouteStatus.FAILED, String.format( "The matcher can't generate an intent for uri: %s", request.getUri().toString())); } break; } } else { for (Map.Entry<String, Class<?>> entry : entries) { if (matcher.match(chain.getContext(), request.getUri(), entry.getKey(), request)) { RLog.i(String.format("{uri=%s, matcher=%s}", chain.getRequest().getUri(), matcher.getClass().getCanonicalName())); realChain.setTargetClass(entry.getValue()); Object result = matcher.generate(chain.getContext(), request.getUri(), entry.getValue()); if (result instanceof Intent) { hasIntent = true; realChain.setTargetObject(result); } else { return RouteResponse.assemble(RouteStatus.FAILED, String.format( "The matcher can't generate an intent for uri: %s", request.getUri().toString())); } break MATCHER; } } } } } if (!hasIntent) { return RouteResponse.assemble(RouteStatus.NOT_FOUND, String.format( "Can't find an activity that matches the given uri: %s", request.getUri().toString())); } return chain.process(); } }
package ohi.andre.consolelauncher.platform; import android.content.Context; import androidx.test.platform.app.InstrumentationRegistry; import androidx.test.ext.junit.runners.AndroidJUnit4; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.*; /** * Instrumented test, which will execute on an Android device. * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ @RunWith(AndroidJUnit4.class) public class ExampleInstrumentedTest { @Test public void useAppContext() { // Context of the app under test. Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext(); assertEquals("ohi.andre.consolelauncher.platform.test", appContext.getPackageName()); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.store; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.nodes.BaseNodeRequest; import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.TransportNodesAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.AsyncShardFetch; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.IndexStoreModule; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<TransportNodesListShardStoreMetaData.Request, TransportNodesListShardStoreMetaData.NodesStoreFilesMetaData, TransportNodesListShardStoreMetaData.NodeRequest, TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> implements AsyncShardFetch.List<TransportNodesListShardStoreMetaData.NodesStoreFilesMetaData, TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> { public static final String ACTION_NAME = "internal:cluster/nodes/indices/shard/store"; private final IndicesService indicesService; private final NodeEnvironment nodeEnv; @Inject public TransportNodesListShardStoreMetaData(Settings settings, ClusterName clusterName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, IndicesService indicesService, NodeEnvironment nodeEnv, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, ACTION_NAME, clusterName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, Request.class, NodeRequest.class, ThreadPool.Names.FETCH_SHARD_STORE); this.indicesService = indicesService; this.nodeEnv = nodeEnv; } @Override public void list(ShardId shardId, IndexMetaData indexMetaData, String[] nodesIds, ActionListener<NodesStoreFilesMetaData> listener) { execute(new Request(shardId, false, nodesIds), listener); } @Override protected String[] resolveNodes(Request request, ClusterState clusterState) { // default implementation may filter out non existent nodes. it's important to keep exactly the ids // we were given for accounting on the caller return request.nodesIds(); } @Override protected NodeRequest newNodeRequest(String nodeId, Request request) { return new NodeRequest(nodeId, request); } @Override protected NodeStoreFilesMetaData newNodeResponse() { return new NodeStoreFilesMetaData(); } @Override protected NodesStoreFilesMetaData newResponse(Request request, AtomicReferenceArray responses) { final List<NodeStoreFilesMetaData> nodeStoreFilesMetaDatas = new ArrayList<>(); final List<FailedNodeException> failures = new ArrayList<>(); for (int i = 0; i < responses.length(); i++) { Object resp = responses.get(i); if (resp instanceof NodeStoreFilesMetaData) { // will also filter out null response for unallocated ones nodeStoreFilesMetaDatas.add((NodeStoreFilesMetaData) resp); } else if (resp instanceof FailedNodeException) { failures.add((FailedNodeException) resp); } else { logger.warn("unknown response type [{}], expected NodeStoreFilesMetaData or FailedNodeException", resp); } } return new NodesStoreFilesMetaData(clusterName, nodeStoreFilesMetaDatas.toArray(new NodeStoreFilesMetaData[nodeStoreFilesMetaDatas.size()]), failures.toArray(new FailedNodeException[failures.size()])); } @Override protected NodeStoreFilesMetaData nodeOperation(NodeRequest request) { if (request.unallocated) { IndexService indexService = indicesService.indexService(request.shardId.index().name()); if (indexService == null) { return new NodeStoreFilesMetaData(clusterService.localNode(), null); } if (!indexService.hasShard(request.shardId.id())) { return new NodeStoreFilesMetaData(clusterService.localNode(), null); } } IndexMetaData metaData = clusterService.state().metaData().index(request.shardId.index().name()); if (metaData == null) { return new NodeStoreFilesMetaData(clusterService.localNode(), null); } try { return new NodeStoreFilesMetaData(clusterService.localNode(), listStoreMetaData(request.shardId)); } catch (IOException e) { throw new ElasticsearchException("Failed to list store metadata for shard [" + request.shardId + "]", e); } } private StoreFilesMetaData listStoreMetaData(ShardId shardId) throws IOException { logger.trace("listing store meta data for {}", shardId); long startTimeNS = System.nanoTime(); boolean exists = false; try { IndexService indexService = indicesService.indexService(shardId.index().name()); if (indexService != null) { IndexShard indexShard = indexService.shard(shardId.id()); if (indexShard != null) { final Store store = indexShard.store(); store.incRef(); try { exists = true; return new StoreFilesMetaData(true, shardId, store.getMetadataOrEmpty()); } finally { store.decRef(); } } } // try and see if we an list unallocated IndexMetaData metaData = clusterService.state().metaData().index(shardId.index().name()); if (metaData == null) { return new StoreFilesMetaData(false, shardId, Store.MetadataSnapshot.EMPTY); } String storeType = metaData.getSettings().get(IndexStoreModule.STORE_TYPE, "fs"); if (!storeType.contains("fs")) { return new StoreFilesMetaData(false, shardId, Store.MetadataSnapshot.EMPTY); } final ShardPath shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, metaData.getSettings()); if (shardPath == null) { return new StoreFilesMetaData(false, shardId, Store.MetadataSnapshot.EMPTY); } return new StoreFilesMetaData(false, shardId, Store.readMetadataSnapshot(shardPath.resolveIndex(), logger)); } finally { TimeValue took = new TimeValue(System.nanoTime() - startTimeNS, TimeUnit.NANOSECONDS); if (exists) { logger.debug("{} loaded store meta data (took [{}])", shardId, took); } else { logger.trace("{} didn't find any store meta data to load (took [{}])", shardId, took); } } } @Override protected boolean accumulateExceptions() { return true; } public static class StoreFilesMetaData implements Iterable<StoreFileMetaData>, Streamable { // here also trasmit sync id, else recovery will not use sync id because of stupid gateway allocator every now and then... private boolean allocated; private ShardId shardId; Store.MetadataSnapshot metadataSnapshot; StoreFilesMetaData() { } public StoreFilesMetaData(boolean allocated, ShardId shardId, Store.MetadataSnapshot metadataSnapshot) { this.allocated = allocated; this.shardId = shardId; this.metadataSnapshot = metadataSnapshot; } public boolean allocated() { return allocated; } public ShardId shardId() { return this.shardId; } @Override public Iterator<StoreFileMetaData> iterator() { return metadataSnapshot.iterator(); } public boolean fileExists(String name) { return metadataSnapshot.asMap().containsKey(name); } public StoreFileMetaData file(String name) { return metadataSnapshot.asMap().get(name); } public static StoreFilesMetaData readStoreFilesMetaData(StreamInput in) throws IOException { StoreFilesMetaData md = new StoreFilesMetaData(); md.readFrom(in); return md; } @Override public void readFrom(StreamInput in) throws IOException { allocated = in.readBoolean(); shardId = ShardId.readShardId(in); this.metadataSnapshot = new Store.MetadataSnapshot(in); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(allocated); shardId.writeTo(out); metadataSnapshot.writeTo(out); } /** * @return commit sync id if exists, else null */ public String syncId() { return metadataSnapshot.getSyncId(); } } public static class Request extends BaseNodesRequest<Request> { private ShardId shardId; private boolean unallocated; public Request() { } public Request(ShardId shardId, boolean unallocated, Set<String> nodesIds) { super(nodesIds.toArray(new String[nodesIds.size()])); this.shardId = shardId; this.unallocated = unallocated; } public Request(ShardId shardId, boolean unallocated, String... nodesIds) { super(nodesIds); this.shardId = shardId; this.unallocated = unallocated; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); shardId = ShardId.readShardId(in); unallocated = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); shardId.writeTo(out); out.writeBoolean(unallocated); } } public static class NodesStoreFilesMetaData extends BaseNodesResponse<NodeStoreFilesMetaData> { private FailedNodeException[] failures; NodesStoreFilesMetaData() { } public NodesStoreFilesMetaData(ClusterName clusterName, NodeStoreFilesMetaData[] nodes, FailedNodeException[] failures) { super(clusterName, nodes); this.failures = failures; } @Override public FailedNodeException[] failures() { return failures; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); nodes = new NodeStoreFilesMetaData[in.readVInt()]; for (int i = 0; i < nodes.length; i++) { nodes[i] = NodeStoreFilesMetaData.readListShardStoreNodeOperationResponse(in); } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(nodes.length); for (NodeStoreFilesMetaData response : nodes) { response.writeTo(out); } } } public static class NodeRequest extends BaseNodeRequest { private ShardId shardId; private boolean unallocated; public NodeRequest() { } NodeRequest(String nodeId, TransportNodesListShardStoreMetaData.Request request) { super(request, nodeId); this.shardId = request.shardId; this.unallocated = request.unallocated; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); shardId = ShardId.readShardId(in); unallocated = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); shardId.writeTo(out); out.writeBoolean(unallocated); } } public static class NodeStoreFilesMetaData extends BaseNodeResponse { private StoreFilesMetaData storeFilesMetaData; NodeStoreFilesMetaData() { } public NodeStoreFilesMetaData(DiscoveryNode node, StoreFilesMetaData storeFilesMetaData) { super(node); this.storeFilesMetaData = storeFilesMetaData; } public StoreFilesMetaData storeFilesMetaData() { return storeFilesMetaData; } public static NodeStoreFilesMetaData readListShardStoreNodeOperationResponse(StreamInput in) throws IOException { NodeStoreFilesMetaData resp = new NodeStoreFilesMetaData(); resp.readFrom(in); return resp; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); if (in.readBoolean()) { storeFilesMetaData = StoreFilesMetaData.readStoreFilesMetaData(in); } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); if (storeFilesMetaData == null) { out.writeBoolean(false); } else { out.writeBoolean(true); storeFilesMetaData.writeTo(out); } } } }
package net.minestom.server.network.packet.client.play; import net.minestom.server.network.packet.client.ClientPlayPacket; import net.minestom.server.utils.BlockPosition; import net.minestom.server.utils.binary.BinaryReader; import net.minestom.server.utils.binary.BinaryWriter; import org.jetbrains.annotations.NotNull; public class ClientQueryBlockNbtPacket extends ClientPlayPacket { public int transactionId; public BlockPosition blockPosition = new BlockPosition(0,0,0); @Override public void read(@NotNull BinaryReader reader) { this.transactionId = reader.readVarInt(); this.blockPosition = reader.readBlockPosition(); } @Override public void write(@NotNull BinaryWriter writer) { writer.writeVarInt(transactionId); writer.writeBlockPosition(blockPosition); } }
/** * Copyright (C) 2009-2014 Dell, Inc. * See annotations for authorship information * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ==================================================================== */ package org.dasein.cloud.platform; /** * Description * <p>Created by stas: 18/07/2014 10:47</p> * * @author Stas Maksimov * @version 2014.08 initial version * @since 2014.08 */ public enum DatabaseLicenseModel { GENERAL_PUBLIC_LICENSE, LICENSE_INCLUDED, BRING_YOUR_OWN_LICENSE, POSTGRESQL_LICENSE, }
package com.gentics.mesh.core.data.schema.impl; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_CHANGE; import static com.gentics.mesh.core.data.relationship.GraphRelationships.HAS_SCHEMA_CONTAINER; import static com.gentics.mesh.core.rest.error.Errors.error; import static com.gentics.mesh.core.rest.schema.change.impl.SchemaChangeModel.ELASTICSEARCH_KEY; import static io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import java.io.IOException; import java.util.List; import java.util.Map; import com.gentics.mesh.context.BulkActionContext; import com.gentics.mesh.core.data.generic.MeshVertexImpl; import com.gentics.mesh.core.data.schema.GraphFieldSchemaContainerVersion; import com.gentics.mesh.core.data.schema.SchemaChange; import com.gentics.mesh.core.rest.schema.FieldSchemaContainer; import com.gentics.mesh.core.rest.schema.change.impl.SchemaChangeModel; import com.gentics.mesh.core.rest.schema.change.impl.SchemaChangeOperation; import io.vertx.core.json.JsonObject; /** * @see SchemaChange */ public abstract class AbstractSchemaChange<T extends FieldSchemaContainer> extends MeshVertexImpl implements SchemaChange<T> { private static String MIGRATION_SCRIPT_PROPERTY_KEY = "migrationScript"; public static final String REST_PROPERTY_PREFIX_KEY = "fieldProperty_"; @Override public SchemaChange<?> getNextChange() { return (SchemaChange) out(HAS_CHANGE).nextOrDefault(null); } @Override public SchemaChange<T> setNextChange(SchemaChange<?> change) { setUniqueLinkOutTo(change, HAS_CHANGE); return this; } @Override public SchemaChange<?> getPreviousChange() { return (SchemaChange) in(HAS_CHANGE).nextOrDefault(null); } @Override public SchemaChange<T> setPreviousChange(SchemaChange<?> change) { setUniqueLinkInTo(change, HAS_CHANGE); return this; } @Override abstract public SchemaChangeOperation getOperation(); @Override public <R extends GraphFieldSchemaContainerVersion<?, ?, ?, ?, ?>> R getPreviousContainerVersion() { return (R) in(HAS_SCHEMA_CONTAINER).nextOrDefault(null); } @Override public SchemaChange<T> setPreviousContainerVersion(GraphFieldSchemaContainerVersion<?, ?, ?, ?, ?> containerVersion) { setSingleLinkInTo(containerVersion, HAS_SCHEMA_CONTAINER); return this; } @Override public <R extends GraphFieldSchemaContainerVersion<?, ?, ?, ?, ?>> R getNextContainerVersion() { return (R) out(HAS_SCHEMA_CONTAINER).nextOrDefault(null); } @Override public SchemaChange<T> setNextSchemaContainerVersion(GraphFieldSchemaContainerVersion<?, ?, ?, ?, ?> containerVersion) { setSingleLinkOutTo(containerVersion, HAS_SCHEMA_CONTAINER); return this; } @Override public void setRestProperty(String key, Object value) { if (value instanceof List) { value = ((List) value).toArray(); } if (value instanceof JsonObject) { value = ((JsonObject) value).encode(); } property(REST_PROPERTY_PREFIX_KEY + key, value); } @Override public <R> R getRestProperty(String key) { return property(REST_PROPERTY_PREFIX_KEY + key); } @Override public <R> Map<String, R> getRestProperties() { return getProperties(REST_PROPERTY_PREFIX_KEY); } @Override public JsonObject getIndexOptions() { Object obj = getRestProperty(ELASTICSEARCH_KEY); if (obj != null) { if (obj instanceof String) { return new JsonObject((String) obj); } else if (obj instanceof JsonObject) { return (JsonObject) obj; } else { throw error(INTERNAL_SERVER_ERROR, "Type was not expected {" + obj.getClass().getName() + "}"); } } return null; } @Override public void setIndexOptions(JsonObject options) { setRestProperty(ELASTICSEARCH_KEY, options.encode()); } @Override public void updateFromRest(SchemaChangeModel restChange) { for (String key : restChange.getProperties().keySet()) { setRestProperty(key, restChange.getProperties().get(key)); } } @Override public SchemaChangeModel transformToRest() throws IOException { SchemaChangeModel model = new SchemaChangeModel(); // Strip away the prefix for (String key : getRestProperties().keySet()) { Object value = getRestProperties().get(key); key = key.replace(REST_PROPERTY_PREFIX_KEY, ""); model.getProperties().put(key, value); } model.setOperation(getOperation()); model.setUuid(getUuid()); return model; } @Override public void delete(BulkActionContext bc) { SchemaChange<?> next = getNextChange(); if (next != null) { next.delete(bc); } getElement().remove(); } }
/** * Copyright (c) 2015-present, Horcrux. * All rights reserved. * * This source code is licensed under the MIT-style license found in the * LICENSE file in the root directory of this source tree. */ package com.horcrux.svg; import javax.annotation.Nullable; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Point; import android.util.Log; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import com.facebook.infer.annotation.Assertions; import com.facebook.react.common.SystemClock; import com.facebook.react.touch.OnInterceptTouchEventListener; import com.facebook.react.touch.ReactInterceptingViewGroup; import com.facebook.react.uimanager.ThemedReactContext; import com.facebook.react.uimanager.TouchTargetHelper; import com.facebook.react.uimanager.UIManagerModule; import com.facebook.react.uimanager.events.TouchEvent; import com.facebook.react.uimanager.events.TouchEventType; import com.facebook.react.views.view.ReactClippingViewGroup; import com.facebook.react.uimanager.events.EventDispatcher; import com.facebook.react.uimanager.events.NativeGestureUtil; // NativeGestureUtil.notifyNativeGestureStarted /** * Custom {@link View} implementation that draws an RNSVGSvg React view and its \children. */ public class RNSVGSvgView extends ViewGroup { private @Nullable Bitmap mBitmap; private RNSVGSvgViewShadowNode mSvgViewShadowNode; private int mTargetTag; public RNSVGSvgView(Context context, RNSVGSvgViewShadowNode shadowNode) { super(context); mSvgViewShadowNode = shadowNode; } public RNSVGSvgView(Context context) { super(context); } public void setBitmap(Bitmap bitmap) { if (mBitmap != null) { mBitmap.recycle(); } mBitmap = bitmap; invalidate(); } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (mBitmap != null) { canvas.drawBitmap(mBitmap, 0, 0, null); } } @Override public boolean dispatchTouchEvent(MotionEvent event) { mTargetTag = mSvgViewShadowNode.hitTest(new Point((int) event.getX(), (int) event.getY()), this); if (mTargetTag != -1) { EventDispatcher eventDispatcher = ((ThemedReactContext) this.getContext()).getNativeModule(UIManagerModule.class).getEventDispatcher(); handleTouchEvent(event, eventDispatcher); return true; } return super.dispatchTouchEvent(event); } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { } public void handleTouchEvent(MotionEvent ev, EventDispatcher eventDispatcher) { int action = ev.getAction() & MotionEvent.ACTION_MASK; if (action == MotionEvent.ACTION_DOWN) { eventDispatcher.dispatchEvent( TouchEvent.obtain( mTargetTag, SystemClock.nanoTime(), TouchEventType.START, ev, ev.getX(), ev.getX())); } else if (mTargetTag == -1) { // All the subsequent action types are expected to be called after ACTION_DOWN thus target // is supposed to be set for them. Log.e( "error", "Unexpected state: received touch event but didn't get starting ACTION_DOWN for this " + "gesture before"); } else if (action == MotionEvent.ACTION_UP) { // End of the gesture. We reset target tag to -1 and expect no further event associated with // this gesture. eventDispatcher.dispatchEvent( TouchEvent.obtain( mTargetTag, SystemClock.nanoTime(), TouchEventType.END, ev, ev.getX(), ev.getY())); mTargetTag = -1; } else if (action == MotionEvent.ACTION_MOVE) { // Update pointer position for current gesture eventDispatcher.dispatchEvent( TouchEvent.obtain( mTargetTag, SystemClock.nanoTime(), TouchEventType.MOVE, ev, ev.getX(), ev.getY())); } else if (action == MotionEvent.ACTION_POINTER_DOWN) { // New pointer goes down, this can only happen after ACTION_DOWN is sent for the first pointer eventDispatcher.dispatchEvent( TouchEvent.obtain( mTargetTag, SystemClock.nanoTime(), TouchEventType.START, ev, ev.getX(), ev.getY())); } else if (action == MotionEvent.ACTION_POINTER_UP) { // Exactly onw of the pointers goes up eventDispatcher.dispatchEvent( TouchEvent.obtain( mTargetTag, SystemClock.nanoTime(), TouchEventType.END, ev, ev.getX(), ev.getY())); } else if (action == MotionEvent.ACTION_CANCEL) { dispatchCancelEvent(ev, eventDispatcher); mTargetTag = -1; } else { Log.w( "IGNORE", "Warning : touch event was ignored. Action=" + action + " Target=" + mTargetTag); } } private void dispatchCancelEvent(MotionEvent androidEvent, EventDispatcher eventDispatcher) { // This means the gesture has already ended, via some other CANCEL or UP event. This is not // expected to happen very often as it would mean some child View has decided to intercept the // touch stream and start a native gesture only upon receiving the UP/CANCEL event. if (mTargetTag == -1) { Log.w( "error", "Can't cancel already finished gesture. Is a child View trying to start a gesture from " + "an UP/CANCEL event?"); return; } Assertions.assertNotNull(eventDispatcher).dispatchEvent( TouchEvent.obtain( mTargetTag, SystemClock.nanoTime(), TouchEventType.CANCEL, androidEvent, androidEvent.getX(), androidEvent.getY())); } }
package other; /** * Service types for discovered agents. */ public class ServiceType { /** * */ public final static String AMBIENT_AGENT= "ambient-agent"; /** * */ public final static String PREFERENCE_AGENT= "preference-agent"; }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.pamarin.game24; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import javax.script.ScriptException; import org.apache.commons.lang3.StringUtils; /** * * @author jittagornp */ public class Equations { private final Integer answer; private String number; private ScriptEngine engine; private List<String> operators; private Equations(int answer) { this.answer = answer; } public static Equations withAnswer(int answer) { return new Equations(answer); } public Equations andNumbers(String number) { this.number = number; return this; } public Equations andOperators(String... operators) { this.operators = Arrays.asList(operators); return this; } private List<String> getOperators() { if (operators == null) { operators = Arrays.asList( "+", "-", "*", "/" ); } return operators; } private ScriptEngine getEngine() { if (engine == null) { ScriptEngineManager manager = new ScriptEngineManager(); engine = manager.getEngineByName("js"); } return engine; } private List<String> toList(String str) { List<String> results = new ArrayList<>(); for (int i = 0; i < str.length(); i++) { results.add(str.charAt(i) + ""); } return results; } private String toEquation(String number, String operator, String bracket) { String[] numbers = StringUtils.split(number, ":"); String[] opers = StringUtils.split(operator, ":"); return bracket .replace("A", numbers[0]) .replace("B", numbers[1]) .replace("C", numbers[2]) .replace("D", numbers[3]) .replace("x", opers[0]) .replace("y", opers[1]) .replace("z", opers[2]); } private Integer execute(String equation) { try { Object result = getEngine().eval(equation); if (result instanceof Integer) { return (Integer) result; } } catch (ScriptException ex) { ex.printStackTrace(); } return null; } public List<String> get() { List<String> uniqueNumbers = Probability .ofElements(toList(number)) .distinct() .find(); List<String> opers = Probability .ofElements(getOperators()) .size(number.length()) .find(); List<String> brackets = Arrays.asList( "((AxB)yC)zD", "(AxB)y(CzD)", "(Ax(ByC))zD", "Ax((ByC)zD)", "Ax(By(CzD))" ); List<String> results = new ArrayList<>(); for (String numb : uniqueNumbers) { for (String operator : opers) { for (String bracket : brackets) { String equation = toEquation(numb, operator, bracket); if (Objects.equals(execute(equation), answer)) { results.add(equation); } } } } return results; } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.dmn.feel.runtime.functions; import java.math.BigDecimal; import java.time.Duration; import java.time.Period; import org.kie.dmn.api.feel.runtime.events.FEELEvent.Severity; import org.kie.dmn.feel.runtime.events.InvalidParametersEvent; public class AbsFunction extends BaseFEELFunction { public static final AbsFunction INSTANCE = new AbsFunction(); AbsFunction() { super( "abs" ); } public FEELFnResult<BigDecimal> invoke(@ParameterName( "n" ) BigDecimal number) { if ( number == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "number", "cannot be null")); } return FEELFnResult.ofResult( number.abs() ); } public FEELFnResult<Period> invoke(@ParameterName( "n" ) Period duration) { if ( duration == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "duration", "cannot be null")); } return FEELFnResult.ofResult( duration.toTotalMonths() < 0 ? duration.negated() : duration ); } public FEELFnResult<Duration> invoke(@ParameterName( "n" ) Duration duration) { if ( duration == null ) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "duration", "cannot be null")); } return FEELFnResult.ofResult( duration.abs() ); } }
/** * Copyright (C) 2013-2014 EaseMob Technologies. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cn.ucai.fulicenter.activity; import java.util.List; import android.os.Bundle; import android.view.View; import android.widget.ListView; import cn.ucai.fulicenter.applib.controller.HXSDKHelper; import cn.ucai.fulicenter.Constant; import cn.ucai.fulicenter.DemoHXSDKHelper; import cn.ucai.fulicenter.R; import cn.ucai.fulicenter.adapter.NewFriendsMsgAdapter; import cn.ucai.fulicenter.db.InviteMessgeDao; import cn.ucai.fulicenter.domain.InviteMessage; /** * 申请与通知 * */ public class NewFriendsMsgActivity extends BaseActivity { private ListView listView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_new_friends_msg); listView = (ListView) findViewById(R.id.list); InviteMessgeDao dao = new InviteMessgeDao(this); List<InviteMessage> msgs = dao.getMessagesList(); //设置adapter NewFriendsMsgAdapter adapter = new NewFriendsMsgAdapter(this, 1, msgs); listView.setAdapter(adapter); ((DemoHXSDKHelper)HXSDKHelper.getInstance()).getContactList().get(Constant.NEW_FRIENDS_USERNAME).setUnreadMsgCount(0); } public void back(View view) { finish(); } }
/** * Copyright (c) 2020 DisCo Group - Universidad de Zaragoza. * * This program and the accompanying materials * are made available under the terms of the Eclipse Public License 1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/legal/epl-1.0/ * * SPDX-License-Identifier: EPL-1.0 * * Contributors: * Abel Gómez * Ignacio Requeno * Diego Pérez */ package es.unizar.disco.pnextensions.pnutils.impl; import es.unizar.disco.pnextensions.pnutils.PnUtils; import es.unizar.disco.pnextensions.pnutils.PnutilsPackage; import java.lang.reflect.InvocationTargetException; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Pn Utils</b></em>'. * <!-- end-user-doc --> * * @generated */ public class PnUtilsImpl extends MinimalEObjectImpl.Container implements PnUtils { /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public PnUtilsImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return PnutilsPackage.Literals.PN_UTILS; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void layout(EObject petriNet) { // TODO: implement this method // Ensure that you remove @generated or mark it @generated NOT throw new UnsupportedOperationException(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eInvoke(int operationID, EList<?> arguments) throws InvocationTargetException { switch (operationID) { case PnutilsPackage.PN_UTILS___LAYOUT__EOBJECT: layout((EObject)arguments.get(0)); return null; } return super.eInvoke(operationID, arguments); } } //PnUtilsImpl
package zero.downtime.soa.api; import org.apache.camel.builder.RouteBuilder; import java.io.IOException; import java.net.ServerSocket; /** * Rest DSL using the jetty component to expose the Hello Service API * * @author Mariano Gonzalez * @version 1.0 * @since 1.0 */ public class HelloServiceApi extends RouteBuilder { private static String KAFKA = "kafka:{{helloApi.kafkaCluster}}?topic={{helloApi.kafkaTopic}}&" + "serializerClass={{helloApi.kafkaSerializer}}&requestRequiredAcks={{helloApi.kafkaReqAcks}}"; @Override public void configure() throws Exception { //@formatter:off restConfiguration().component("jetty") .host("localhost") .port("8080"); rest("/rest-api") .get("/sync/{user}") .to("direct-vm:hello") .get("/async/{user}").route().transform(header("user")) .to(KAFKA) .setBody(simple("Thanks ${header.user} your messages is being processed. \\n")); //@formatter:on } /** * Looks up an available port number on the host. * * @return a port number available for use * @throws IOException if unable to find an available ServerSocket */ protected int getRandomServerPort() throws IOException { return (new ServerSocket(0)).getLocalPort(); } }
/* * Copyright 2013 serso aka se.solovyev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * --------------------------------------------------------------------- * Contact details * * Email: se.solovyev@gmail.com * Site: http://se.solovyev.org */ package org.solovyev.common.collections.multimap; import org.junit.Assert; import org.junit.Test; import org.solovyev.common.collections.multiset.ArrayListMultiSet; import org.solovyev.common.collections.multiset.MultiSet; import org.solovyev.common.collections.multiset.SynchronizedMultiSet; import javax.annotation.Nonnull; import java.util.Date; import java.util.Iterator; import java.util.Random; import java.util.concurrent.CountDownLatch; public class SynchronizedMultiSetTest { private static final int THREAD_NUMBER = 100; // random number generator private static final Random rg = new Random(new Date().getTime()); @Test public void testOperations() throws Exception { final Object mutex = new Object(); final MultiSet<String> m = SynchronizedMultiSet.wrap(ArrayListMultiSet.<String>newInstance(), mutex); final CountDownLatch startPoint = new CountDownLatch(1); final CountDownLatch finalPoint = new CountDownLatch(THREAD_NUMBER); for (int i = 0; i < THREAD_NUMBER; i++) { new Thread(new Runnable() { @Override public void run() { try { startPoint.await(); doOperations(m, mutex); } catch (InterruptedException e) { throw new RuntimeException(e); } finally { finalPoint.countDown(); } } }).start(); } // start test startPoint.countDown(); // wait until all threads finish their jobs finalPoint.await(); } private void doOperations(@Nonnull MultiSet<String> m, @Nonnull Object mutex) { for (int j = 0; j < 1000; j++) { final int operationCode; final int operationCount; synchronized (rg) { operationCode = rg.nextInt(10); operationCount = rg.nextInt(100); } switch (operationCode) { case 0: m.add("1", operationCount); break; case 1: m.add("2", operationCount); break; case 2: m.add("3", operationCount); break; case 3: m.remove("1", operationCount); break; case 4: m.remove("2", operationCount); break; case 5: m.remove("3", operationCount); break; case 6: m.count("1"); break; case 7: m.count("2"); break; case 8: m.count("3"); break; case 9: synchronized (mutex) { for (Iterator<String> it = m.iterator(); it.hasNext(); ) { final String el = it.next(); if (el.equals("1")) { it.remove(); } } Assert.assertEquals(0, m.count("1")); } break; case 10: synchronized (mutex) { for (Iterator<String> it = m.iterator(); it.hasNext(); ) { final String el = it.next(); if (el.equals("2")) { it.remove(); } } Assert.assertEquals(0, m.count("2")); } break; } } } }
package cn.closeli.rtc.widget; import android.content.Context; import android.graphics.Rect; import android.util.AttributeSet; import android.util.DisplayMetrics; import android.view.View; import android.view.ViewTreeObserver; import android.widget.Button; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.RelativeLayout; import java.util.HashSet; import java.util.Set; import cn.closeli.rtc.R; public class FocusLayout extends RelativeLayout implements ViewTreeObserver.OnGlobalFocusChangeListener { private LayoutParams mFocusLayoutParams; private View mFocusView; private Set<Integer> ingoreIds; //需要忽略的ids private Set<Integer> focusedIds; private int heightPixels; private int widthPixels; public FocusLayout(Context context) { super(context); init(context); ingoreIds = new HashSet<>(); focusedIds = new HashSet<>(); } public FocusLayout(Context context, AttributeSet attrs) { super(context, attrs); init(context); } public FocusLayout(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(context); } private void init(Context context) { DisplayMetrics displayMetrics = getResources().getDisplayMetrics(); heightPixels = displayMetrics.heightPixels; widthPixels = displayMetrics.widthPixels; this.mFocusLayoutParams = new RelativeLayout.LayoutParams(0, 0); this.mFocusView = new View(context); this.mFocusView.setBackgroundResource(R.drawable.bg_item_selector); this.addView(this.mFocusView, this.mFocusLayoutParams); } @Override public void onGlobalFocusChanged(View oldFocus, View newFocus) { if (newFocus == null) { return; } // if (!focusedIds.contains(newFocus.getId())) { // return; // } Rect viewRect = new Rect(); newFocus.getGlobalVisibleRect(viewRect); correctLocation(viewRect); this.mFocusView.setBackgroundResource(R.drawable.bg_item_selector); if (newFocus instanceof CompoundButton) { this.setFocusLocation( viewRect.left - this.mFocusView.getPaddingLeft(), viewRect.top - this.mFocusView.getPaddingTop(), viewRect.right + this.mFocusView.getPaddingRight(), viewRect.bottom + this.mFocusView.getPaddingBottom()); return; } else if (newFocus instanceof Button || newFocus instanceof EditText) { this.mFocusView.setBackgroundResource(R.color.color_translate); this.setFocusLocation( viewRect.left + (viewRect.right + this.mFocusView.getPaddingRight() - (viewRect.left - this.mFocusView.getPaddingLeft())) / 2 + 50, viewRect.top + (viewRect.bottom + this.mFocusView.getPaddingBottom() - (viewRect.top - this.mFocusView.getPaddingTop())) / 2 + 50, viewRect.right - ((viewRect.right + this.mFocusView.getPaddingRight() - (viewRect.left - this.mFocusView.getPaddingLeft())) / 2 - 50), viewRect.bottom - (viewRect.bottom + this.mFocusView.getPaddingBottom()) - (viewRect.top - this.mFocusView.getPaddingTop()) / 2 - 50); return; } //需要特殊处理的View if (ingoreIds.contains(newFocus.getId())) { this.mFocusView.setBackgroundResource(R.color.color_translate); this.setFocusLocation( viewRect.left + (viewRect.right + this.mFocusView.getPaddingRight() - (viewRect.left - this.mFocusView.getPaddingLeft())) / 2 + 50, viewRect.top + (viewRect.bottom + this.mFocusView.getPaddingBottom() - (viewRect.top - this.mFocusView.getPaddingTop())) / 2 + 50, viewRect.right - ((viewRect.right + this.mFocusView.getPaddingRight() - (viewRect.left - this.mFocusView.getPaddingLeft())) / 2 - 50), viewRect.bottom - (viewRect.bottom + this.mFocusView.getPaddingBottom()) - (viewRect.top - this.mFocusView.getPaddingTop()) / 2 - 50); return; } this.setFocusLocation( viewRect.left - this.mFocusView.getPaddingLeft(), viewRect.top - this.mFocusView.getPaddingTop(), viewRect.right + this.mFocusView.getPaddingRight(), viewRect.bottom + this.mFocusView.getPaddingBottom()); } /** * 由于getGlobalVisibleRect获取的位置是相对于全屏的,所以需要减去FocusLayout本身的左与上距离,变成相对于FocusLayout的 * * @param rect */ private void correctLocation(Rect rect) { Rect layoutRect = new Rect(); this.getGlobalVisibleRect(layoutRect); rect.left -= layoutRect.left; rect.right -= layoutRect.left; rect.top -= layoutRect.top; rect.bottom -= layoutRect.top; } /** * 设置焦点view的位置,计算焦点框的大小 * * @param left * @param top * @param right * @param bottom */ protected void setFocusLocation(int left, int top, int right, int bottom) { int width = right - left; int height = bottom - top; this.mFocusLayoutParams.width = width; this.mFocusLayoutParams.height = height; this.mFocusLayoutParams.leftMargin = left; this.mFocusLayoutParams.topMargin = top; this.mFocusView.layout(left, top, Math.min(right, widthPixels), Math.min(bottom, heightPixels)); } //添加需要忽略的ids public void addIngoreIds(int ids) { if (ingoreIds != null) { if (!ingoreIds.contains(ids)) { ingoreIds.add(ids); } } } public void removeingoreIds(int ids) { if (ingoreIds != null) { if (ingoreIds.contains(ids)) { ingoreIds.remove(ids); } } } public void addIngoreIds(Set<Integer> map) { if (ingoreIds != null) { ingoreIds = map; } } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); ingoreIds.clear(); } public void clearFocusView() { if (mFocusView != null) { this.mFocusView.setBackgroundResource(R.color.color_translate); } } public void addFocusedId(int id) { focusedIds.add(id); } public void addFocusedId(Set<Integer> ids) { focusedIds.addAll(ids); } }
package moe.gensoukyo.rpgmaths.api.impl.data; import com.google.common.base.Preconditions; import moe.gensoukyo.rpgmaths.api.damage.type.IDamageType; import moe.gensoukyo.rpgmaths.api.damage.type.IResistanceMap; import moe.gensoukyo.rpgmaths.api.data.IRpgData; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** * @author Chloe_koopa */ public abstract class AbstractRpgData implements IRpgData { @Nullable protected IResistanceMap resistanceMap; protected IDamageType[] defaultDamageTypes; protected AbstractRpgData(@Nullable IResistanceMap resMapIn, @Nonnull IDamageType[] defaultDamageTypeIn) { Preconditions.checkNotNull(defaultDamageTypeIn); this.resistanceMap = resMapIn; this.defaultDamageTypes = defaultDamageTypeIn; } @Override public boolean hasRpgData() { return true; } @Nonnull @Override public IResistanceMap getResistance() { return (this.resistanceMap == null) ? IResistanceMap.DEFAULT : this.resistanceMap; } @Nonnull @Override public IDamageType[] getDefaultDamageTypes() { return this.defaultDamageTypes; } }
package org.andengine.extension.physics.box2d; import android.content.Context; import android.support.test.InstrumentationRegistry; import android.support.test.runner.AndroidJUnit4; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.*; /** * Instrumentation test, which will execute on an Android device. * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ @RunWith(AndroidJUnit4.class) public class ExampleInstrumentedTest { @Test public void useAppContext() throws Exception { // Context of the app under test. Context appContext = InstrumentationRegistry.getTargetContext(); assertEquals("org.andengine.extension.physics.box2d.test", appContext.getPackageName()); } }
import java.io.File; import java.util.Scanner; import java.io.FileNotFoundException; public class Labyrint implements Comparable<String>{ static Rute[][] RuteArr; static int kolonner; static int rader; //Endret til å ha lenkelisten i labyrint public static Lenkeliste<String> utskrift = new Lenkeliste<String>(); //Konstruktøren er privat, så labyrinter kan kun opprettes fra denne klassen. private Labyrint(Rute[][] _RuteArr, int _kolonner, int _rader){ RuteArr = _RuteArr; kolonner = _kolonner; rader = _rader; } //Metode for å finne utveien fra en bestemt koordinat i labyrinten. public Lenkeliste<String> finnUtveiFra(int startKolonne, int startRad){ System.out.println(startKolonne +", "+ startRad); //Tester om startkoordinatet er en åpen plass. if(RuteArr[startKolonne][startRad].tilTegn() == '#'){ System.out.println("Ingen utveier."); return null; } else { tomListe(); RuteArr[startKolonne][startRad].finnUtvei(); } fjernMerk(); return utskrift; } public static void leggTilUtvei(String x){ utskrift.leggTil(x); } void tomListe(){ int a = utskrift.stoerrelse(); for(int i = 0; i < a; i++){ utskrift.fjern(); } } void fjernMerk(){ for(int a = 0; a < rader; a++){ for(int b = 0; b < kolonner; b++){ RuteArr[a][b].merket = false; } } } //Metode som leser inn labyrinten fra en fil og setter naboene til rutene. public static Labyrint lesFraFil(File fil) throws FileNotFoundException{ int rad = 0; int kolonne = 0; try { //Oppretter filscanner. Scanner sc = new Scanner(fil); String[] linje = sc.nextLine().split(" ",0); rad = Integer.parseInt(linje[0]); kolonne = Integer.parseInt(linje[1]); RuteArr = new Rute[rad][kolonne]; //Leser inn og oppretter Rute-objekter Rute ny; for(int a = 0; a < rad; a++){ linje = sc.nextLine().split("",0); for(int b = 0; b < kolonne; b++){ if(linje[b].equals(".")){ if(a == 0 || b == 0 || a == rad-1 || b == kolonne-1){ ny = new Aapning(a, b); } else { ny = new HvitRute(a, b); } } else{ ny = new SortRute(a, b); } RuteArr[a][b] = ny; } } //Avslutter scanner-funksjonen sc.close(); //Finner naboer for(int a = 0; a < rad; a++){ for(int b = 0; b < kolonne; b++){ Rute over; Rute under; Rute venstre; Rute hoyre; //Over if(a==0){ over = null; }else{ over = RuteArr[a-1][b]; } //Under if(a == rad-1){ under = null; }else{ under = RuteArr[a+1][b]; } //Venstre if(b==0){ venstre = null; }else{ venstre = RuteArr[a][b-1]; } //Høyre if(b == kolonne-1){ hoyre = null; }else{ hoyre = RuteArr[a][b+1]; } //Kaller på settNabo-metoden i Rute-klassen. RuteArr[a][b].settNabo(over, under, venstre, hoyre); } } //Printer ut labyrinten for(int a = 0; a < rad; a++){ for(int b = 0; b < kolonne; b++){ System.out.print(RuteArr[a][b]); } System.out.print("\n"); } } catch (FileNotFoundException e) { //TODO: handle exception } Labyrint nyLab = new Labyrint(RuteArr, kolonne, rad); return nyLab; } @Override public int compareTo(String o) { // TODO Auto-generated method stub return 0; } }
/** * Copyright 2020 cedar12.zxd@qq.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cn.cedar.data; import javax.script.ScriptEngine; import javax.script.ScriptEngineManager; import java.lang.reflect.Method; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.regex.Pattern; /** * @author cedar12.zxd@qq.com */ public class HandlerConstant { protected HandlerConstant(){}; protected static final String ENV_CEDAR_DATA="cedar-data"; protected static final String ENV_CEDAR_DATA_SPRING="cedar-data-spring"; protected static final String ENV_CEDAR_DATA_SPRING_BOOT_STARTER="cedar-data-spring-boot-starter"; protected static String env=ENV_CEDAR_DATA; public static String getEnv() { return env; } public static void setEnv(String env) { HandlerConstant.env = env; } protected static final char START_SYMBOL=123; protected static final char END_SYMBOL=125; protected static final char S_SYMBOL=91; protected static final char E_SYMBOL=93; protected static final char WELL_SYMBOL=35; protected static final char SINGLE_SYMBOL=39; protected static final char DOUBLE_SYMBOL=34; protected static final char S_TMP_SYMBOL='@'; protected static final char E_TMP_SYMBOL='`'; protected static final char EXP_FLAG_SYMBOL='#'; protected static final String FILE_SPLIT_SYMBOL="/"; protected static final String MAP_SYMBOL="map"; protected static final String PACK_MAP_SYMBOL="java.util.Map"; protected static final String EMPTY_SYMBOL=""; protected static final String SELECT_SYMBOL="select"; protected static final String JS_SYMBOL="JavaScript"; protected static final String SPLIT_SYMBOL=","; protected static final String COLON_SYMBOL=":"; protected static final String KEY_SYMBOL="KEY"; protected static final String RETURN_SYMBOL="return"; protected static final String ONE_EMPTY_SYMBOL=" "; protected static final String FUN_SYMBOL="function"; protected static final String EVAL_NAME_SYMBOL="p_exp"; protected static final String FLAG_SYMBOL="!S "; protected static final String PLACEHOLDER_SYMBOL="@?"; protected static final String ARGS_SYMBOL="args"; protected static final String EXP_SYMBOL="express"; protected static final String SQL_SYMBOL="sql"; protected static final String DATE_FORMAT="Date.prototype.format = function (fmt) { var o = {'M+': this.getMonth() + 1, 'd+': this.getDate(), 'h+': this.getHours(), 'm+': this.getMinutes(), 's+': this.getSeconds(), 'q+': Math.floor((this.getMonth() + 3) / 3), 'S': this.getMilliseconds() }; if (/(y+)/.test(fmt)) fmt = fmt.replace(RegExp.$1, (this.getFullYear() + '').substr(4 - RegExp.$1.length)); for (var k in o) if (new RegExp('(' + k + ')').test(fmt)) fmt = fmt.replace(RegExp.$1, (RegExp.$1.length == 1) ? (o[k]) : (('00' + o[k]).substr(('' + o[k]).length))); return '\\''+fmt+'\\'';};"; protected static final String STRING_TO="String.prototype.to=function(){ var s=this.toString(); if(s[0]=='\\''&&s[s.length-1]=='\\''){ return s.substring(1,s.length-1); }else if(s[0]=='\\''&&s[s.length-1]!='\\''){ return s+'\\''; }else if(s[0]!='\\''&&s[s.length-1]=='\\''){ return '\\''+s; }else{ return '\\''+s+'\\''; }};"; protected static final int TYPE_INT=0; protected static final int TYPE_INTEGER=1; protected static final int TYPE_LONG=2; protected static final int TYPE_LONG_=3; protected static final int TYPE_OTHER=4; protected static Map<Class<?>,Map<String,String>> setMap=new LinkedHashMap<>(); protected static final String KEYWORD_CONST="const"; protected static final String KEYWORD_DEF="def"; protected static final String KEYWORD_IMPORT="import"; protected static final String KEYWORD_PRIVATE="private"; protected static final String FILE_SUFFIX=".cd"; protected static final int SQL_TYPE_QUERY=0; protected static final int SQL_TYPE_QUERY_ONE=1; protected static final int SQL_TYPE_UPDATE=2; protected static final int SQL_TYPE_INSERT_KEY=3; protected static final int SQL_TYPE_QUERY_COUNT=4; protected static int MAX_LAYER=5; public static void setMaxLayer(int max){ if(max>0){ MAX_LAYER=max; } } protected static Pattern ANNOTATION = Pattern.compile("\\/\\*.*?\\*\\/",Pattern.DOTALL); protected static ScriptEngineManager MANAGER = new ScriptEngineManager(); protected static ScriptEngine ENGINE = HandlerConstant.MANAGER.getEngineByName(HandlerConstant.JS_SYMBOL); protected static Map<Method,String> sqlMap=new HashMap<>(); protected static Map<Method,String> returnMap=new HashMap<>(); protected static Map<Class,Object> proxyMap=new HashMap<>(); protected static Map<Method,Map<String,Object>> parseSqlMap=new HashMap<>(); protected static JdbcManager jdbc=new JdbcManager(); protected static boolean isExtended=false; public static void setJdbcManager(JdbcManager jdbc){ isExtended=true; HandlerConstant.jdbc=jdbc; } public static JdbcManager getJdbcManager(){ return jdbc; } protected static boolean displaySql=false; public static void setDisplaySql(boolean display){ displaySql=display; } protected static String placeholderSymbol(int count){ return String.valueOf(S_SYMBOL)+count+String.valueOf(E_SYMBOL); } }
package org.robolectric.shadows; import static android.os.Build.VERSION_CODES.N; import static org.robolectric.shadow.api.Shadow.directlyOn; import android.animation.AnimationHandler; import android.animation.ValueAnimator; import org.robolectric.RuntimeEnvironment; import org.robolectric.annotation.Implementation; import org.robolectric.annotation.Implements; import org.robolectric.annotation.RealObject; import org.robolectric.annotation.Resetter; import org.robolectric.util.ReflectionHelpers; @Implements(ValueAnimator.class) public class ShadowValueAnimator { @RealObject private ValueAnimator realObject; private int actualRepeatCount; @Resetter public static void reset() { /* ValueAnimator.sAnimationHandler is a static thread local that otherwise would survive between * tests. The AnimationHandler.mAnimationScheduled is set to true when the scheduleAnimation() is * called and the reset to false when run() is called by the Choreographer. If an animation is * already scheduled, it will not post to the Choreographer. This is a problem if a previous * test leaves animations on the Choreographers callback queue without running them as it will * cause the AnimationHandler not to post a callback. We reset the thread local here so a new * one will be created for each test with a fresh state. */ if (RuntimeEnvironment.getApiLevel() >= N) { ThreadLocal<AnimationHandler> animatorHandlerTL = ReflectionHelpers.getStaticField(AnimationHandler.class, "sAnimatorHandler"); animatorHandlerTL.remove(); } else { ReflectionHelpers.callStaticMethod(ValueAnimator.class, "clearAllAnimations"); ThreadLocal<AnimationHandler> animatorHandlerTL = ReflectionHelpers.getStaticField(ValueAnimator.class, "sAnimationHandler"); animatorHandlerTL.remove(); } } @Implementation protected void setRepeatCount(int count) { actualRepeatCount = count; if (count == ValueAnimator.INFINITE) { count = 1; } directlyOn(realObject, ValueAnimator.class).setRepeatCount(count); } /** * Returns the value that was set as the repeat count. This is otherwise the same as * getRepeatCount(), except when the count was set to infinite. * * @return Repeat count. */ public int getActualRepeatCount() { return actualRepeatCount; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.dbdiscovery.distsql.handler.update; import com.google.common.base.Preconditions; import org.apache.shardingsphere.dbdiscovery.api.config.DatabaseDiscoveryRuleConfiguration; import org.apache.shardingsphere.dbdiscovery.api.config.rule.DatabaseDiscoveryDataSourceRuleConfiguration; import org.apache.shardingsphere.dbdiscovery.distsql.handler.converter.DatabaseDiscoveryRuleStatementConverter; import org.apache.shardingsphere.dbdiscovery.distsql.parser.segment.AbstractDatabaseDiscoverySegment; import org.apache.shardingsphere.dbdiscovery.distsql.parser.segment.DatabaseDiscoveryConstructionSegment; import org.apache.shardingsphere.dbdiscovery.distsql.parser.segment.DatabaseDiscoveryDefinitionSegment; import org.apache.shardingsphere.dbdiscovery.distsql.parser.statement.AlterDatabaseDiscoveryRuleStatement; import org.apache.shardingsphere.dbdiscovery.spi.DatabaseDiscoveryType; import org.apache.shardingsphere.infra.config.RuleConfiguration; import org.apache.shardingsphere.infra.config.algorithm.ShardingSphereAlgorithmConfiguration; import org.apache.shardingsphere.infra.distsql.exception.DistSQLException; import org.apache.shardingsphere.infra.distsql.exception.resource.RequiredResourceMissedException; import org.apache.shardingsphere.infra.distsql.exception.rule.InvalidAlgorithmConfigurationException; import org.apache.shardingsphere.infra.distsql.exception.rule.RequiredAlgorithmMissedException; import org.apache.shardingsphere.infra.distsql.exception.rule.RequiredRuleMissedException; import org.apache.shardingsphere.infra.distsql.update.RuleDefinitionAlterUpdater; import org.apache.shardingsphere.infra.metadata.ShardingSphereMetaData; import org.apache.shardingsphere.infra.metadata.resource.ShardingSphereResource; import org.apache.shardingsphere.spi.ShardingSphereServiceLoader; import org.apache.shardingsphere.spi.typed.TypedSPIRegistry; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.stream.Collectors; /** * Alter database discovery rule statement updater. */ public final class AlterDatabaseDiscoveryRuleStatementUpdater implements RuleDefinitionAlterUpdater<AlterDatabaseDiscoveryRuleStatement, DatabaseDiscoveryRuleConfiguration> { static { // TODO consider about register once only ShardingSphereServiceLoader.register(DatabaseDiscoveryType.class); } @Override public void checkSQLStatement(final ShardingSphereMetaData shardingSphereMetaData, final AlterDatabaseDiscoveryRuleStatement sqlStatement, final DatabaseDiscoveryRuleConfiguration currentRuleConfig) throws DistSQLException { String schemaName = shardingSphereMetaData.getName(); checkCurrentRuleConfiguration(schemaName, currentRuleConfig); checkToBeAlteredRules(schemaName, sqlStatement, currentRuleConfig); checkToBeAlteredResources(schemaName, sqlStatement, shardingSphereMetaData.getResource()); checkDiscoverTypeAndHeartbeat(sqlStatement, currentRuleConfig); } private void checkCurrentRuleConfiguration(final String schemaName, final DatabaseDiscoveryRuleConfiguration currentRuleConfig) throws DistSQLException { DistSQLException.predictionThrow(null != currentRuleConfig, new RequiredRuleMissedException("database discovery", schemaName)); } private void checkToBeAlteredRules(final String schemaName, final AlterDatabaseDiscoveryRuleStatement sqlStatement, final DatabaseDiscoveryRuleConfiguration currentRuleConfig) throws DistSQLException { Collection<String> currentRuleNames = currentRuleConfig.getDataSources().stream().map(DatabaseDiscoveryDataSourceRuleConfiguration::getName).collect(Collectors.toSet()); Collection<String> notExistedRuleNames = getToBeAlteredRuleNames(sqlStatement).stream().filter(each -> !currentRuleNames.contains(each)).collect(Collectors.toList()); DistSQLException.predictionThrow(notExistedRuleNames.isEmpty(), new RequiredRuleMissedException("database discovery", schemaName, notExistedRuleNames)); } private Collection<String> getToBeAlteredRuleNames(final AlterDatabaseDiscoveryRuleStatement sqlStatement) { return sqlStatement.getRules().stream().map(AbstractDatabaseDiscoverySegment::getName).collect(Collectors.toList()); } private void checkToBeAlteredResources(final String schemaName, final AlterDatabaseDiscoveryRuleStatement sqlStatement, final ShardingSphereResource resource) throws DistSQLException { Collection<String> notExistedResources = resource.getNotExistedResources(getToBeAlteredResourceNames(sqlStatement)); DistSQLException.predictionThrow(notExistedResources.isEmpty(), new RequiredResourceMissedException(schemaName, notExistedResources)); } private Collection<String> getToBeAlteredResourceNames(final AlterDatabaseDiscoveryRuleStatement sqlStatement) { Collection<String> result = new LinkedHashSet<>(); sqlStatement.getRules().forEach(each -> result.addAll(each.getDataSources())); return result; } private void checkDiscoverTypeAndHeartbeat(final AlterDatabaseDiscoveryRuleStatement sqlStatement, final DatabaseDiscoveryRuleConfiguration currentRuleConfig) throws DistSQLException { Map<String, List<AbstractDatabaseDiscoverySegment>> segmentMap = sqlStatement.getRules().stream().collect(Collectors.groupingBy(each -> each.getClass().getSimpleName())); Collection<String> invalidInput = segmentMap.getOrDefault(DatabaseDiscoveryDefinitionSegment.class.getSimpleName(), Collections.emptyList()).stream() .map(each -> ((DatabaseDiscoveryDefinitionSegment) each).getDiscoveryType().getName()).distinct() .filter(each -> !TypedSPIRegistry.findRegisteredService(DatabaseDiscoveryType.class, each, new Properties()).isPresent()).collect(Collectors.toList()); DistSQLException.predictionThrow(invalidInput.isEmpty(), new InvalidAlgorithmConfigurationException("database discovery", invalidInput)); segmentMap.getOrDefault(DatabaseDiscoveryConstructionSegment.class.getSimpleName(), Collections.emptyList()).stream().map(each -> (DatabaseDiscoveryConstructionSegment) each) .forEach(each -> { if (!currentRuleConfig.getDiscoveryTypes().containsKey(each.getDiscoveryTypeName())) { invalidInput.add(each.getDiscoveryTypeName()); } if (!currentRuleConfig.getDiscoveryHeartbeats().containsKey(each.getDiscoveryHeartbeatName())) { invalidInput.add(each.getDiscoveryHeartbeatName()); } }); DistSQLException.predictionThrow(invalidInput.isEmpty(), new RequiredAlgorithmMissedException("database discovery", invalidInput)); } @Override public RuleConfiguration buildToBeAlteredRuleConfiguration(final AlterDatabaseDiscoveryRuleStatement sqlStatement) { return DatabaseDiscoveryRuleStatementConverter.convert(sqlStatement.getRules()); } @Override public void updateCurrentRuleConfiguration(final DatabaseDiscoveryRuleConfiguration currentRuleConfig, final DatabaseDiscoveryRuleConfiguration toBeAlteredRuleConfig) { dropRuleConfiguration(currentRuleConfig, toBeAlteredRuleConfig); addRuleConfiguration(currentRuleConfig, toBeAlteredRuleConfig); updateProperties(toBeAlteredRuleConfig); } private void dropRuleConfiguration(final DatabaseDiscoveryRuleConfiguration currentRuleConfig, final DatabaseDiscoveryRuleConfiguration toBeAlteredRuleConfig) { for (DatabaseDiscoveryDataSourceRuleConfiguration each : toBeAlteredRuleConfig.getDataSources()) { dropDataSourceRuleConfiguration(currentRuleConfig, each.getName()); } } private void dropDataSourceRuleConfiguration(final DatabaseDiscoveryRuleConfiguration currentRuleConfig, final String toBeDroppedRuleNames) { Optional<DatabaseDiscoveryDataSourceRuleConfiguration> toBeDroppedDataSourceRuleConfig = currentRuleConfig.getDataSources().stream() .filter(each -> each.getName().equals(toBeDroppedRuleNames)).findAny(); Preconditions.checkState(toBeDroppedDataSourceRuleConfig.isPresent()); currentRuleConfig.getDataSources().remove(toBeDroppedDataSourceRuleConfig.get()); currentRuleConfig.getDiscoveryTypes().remove(toBeDroppedDataSourceRuleConfig.get().getDiscoveryTypeName()); currentRuleConfig.getDiscoveryHeartbeats().remove(toBeDroppedDataSourceRuleConfig.get().getDiscoveryHeartbeatName()); } private void addRuleConfiguration(final DatabaseDiscoveryRuleConfiguration currentRuleConfig, final DatabaseDiscoveryRuleConfiguration toBeAlteredRuleConfig) { currentRuleConfig.getDataSources().addAll(toBeAlteredRuleConfig.getDataSources()); currentRuleConfig.getDiscoveryTypes().putAll(toBeAlteredRuleConfig.getDiscoveryTypes()); currentRuleConfig.getDiscoveryHeartbeats().putAll(toBeAlteredRuleConfig.getDiscoveryHeartbeats()); } private void updateProperties(final DatabaseDiscoveryRuleConfiguration ruleConfiguration) { ruleConfiguration.getDataSources().forEach(each -> { ShardingSphereAlgorithmConfiguration configuration = ruleConfiguration.getDiscoveryTypes().get(each.getDiscoveryTypeName()); TypedSPIRegistry.findRegisteredService(DatabaseDiscoveryType.class, configuration.getType(), new Properties()).ifPresent(databaseDiscoveryType -> databaseDiscoveryType.updateProperties(each.getName(), configuration.getProps())); }); } @Override public Class<DatabaseDiscoveryRuleConfiguration> getRuleConfigurationClass() { return DatabaseDiscoveryRuleConfiguration.class; } @Override public String getType() { return AlterDatabaseDiscoveryRuleStatement.class.getCanonicalName(); } }
package com.activity.analyzer.library; import android.util.EventLog; import android.util.EventLog.Event; import android.util.Log; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.List; /** * 抓取EventLog的工具类 * * @author Megatron King * @since 2016-2-18 下午5:43:56 */ public class EventLogFetcher { private static final String TAG = EventLogFetcher.class.getSimpleName(); private static final int SECONDS_OFFSET = 12; public static final String AM_ACTIVITY_FULLY_DRAWN_TIME = "am_activity_fully_drawn_time"; public static final String AM_ACTIVITY_LAUNCH_TIME = "am_activity_launch_time"; public static List<Event> fetch(String tagName){ List<Event> events = new ArrayList<Event>(); try { EventLog.readEvents(new int[]{EventLog.getTagCode(tagName)}, events); } catch (IOException e) { Log.wtf(TAG, e); } return events; } public static List<Object> fetchData(String tagName){ List<Event> events = fetch(tagName); List<Object> objects = new ArrayList<Object>(); for (Event event : events) { objects.add(event.getData()); } return objects; } public static Event fetchLatest(String tagName){ List<Event> events = fetch(tagName); return events.isEmpty() ? null : events.get(events.size() - 1); } public static Object fetchLatestData(String tagName){ Event event = fetchLatest(tagName); return event == null ? null : event.getData(); } public static List<Object> fetchDataAfter(String tagName, long startTime){ List<Event> events = fetch(tagName); List<Object> objects = new ArrayList<Object>(); for (Event event : events) { long time = getEventTimeInMills(event); if(time >= startTime){ objects.add(event.getData()); } } return objects; } @SuppressWarnings("unchecked") public static HashMap<Integer, String> fetchTags(){ HashMap<Integer, String> tags = null; try { Method method = EventLog.class.getDeclaredMethod("readTagsFile"); method.setAccessible(true); method.invoke(null); Field field = EventLog.class.getDeclaredField("sTagNames"); field.setAccessible(true); tags = (HashMap<Integer, String>) field.get(null); } catch (Exception e) { Log.wtf(TAG, e); } return tags; } public static void logTags(){ HashMap<Integer, String> tags = fetchTags(); if(tags == null || tags.isEmpty()){ Log.wtf(TAG, "The event tag is empty!"); }else{ for (int tagCode : tags.keySet()) { Log.i(TAG, "code: " + tagCode + " name: " + tags.get(tagCode)); } } } private static long getEventTimeInMills(Event event){ long time = 0; try { Field field = event.getClass().getDeclaredField("mBuffer"); field.setAccessible(true); ByteBuffer buffer = (ByteBuffer) field.get(event); // The will some milliseconds lost, add 1000ms time = ((long)buffer.getInt(SECONDS_OFFSET)) * 1000L + 1000L; } catch (Exception e) { Log.wtf(TAG, e); } return time; } }
/* All Contributors (C) 2021 */ package io.github.poorguy.explore.learn.linkedlist; class ListNode { int val; ListNode next; public ListNode() {} ListNode(int x) { val = x; next = null; } ListNode(int x, ListNode next) { val = x; this.next = next; } }
package io.smallrye.reactive.messaging.beans; import org.eclipse.microprofile.reactive.messaging.Incoming; import org.eclipse.microprofile.reactive.messaging.Outgoing; import javax.enterprise.context.ApplicationScoped; @ApplicationScoped public class BeanConsumingItemsAndProducingItems { @Incoming("count") @Outgoing("sink") public String process(int value) { return Integer.toString(value + 1); } }
package frc.robot.subsystems.arm.factory; import frc.robot.components.hardware.TalonSRXComponent; import frc.robot.subsystems.arm.Arm; public class HardwareArmFactory implements ArmFactory { /** * */ private static final int ARM_MOTOR_PORT = 8; public Arm makeArm(){ var srx = new TalonSRXComponent(ARM_MOTOR_PORT); srx.configReverseSoftLimitEnable(true); srx.configReverseSoftLimitThreshold(-3800); srx.configForwardSoftLimitEnable(true); srx.configForwardSoftLimitThreshold(0); return new Arm(srx); } }
package com.yu.hu.emoji.widget; import android.content.Context; import androidx.appcompat.widget.AppCompatImageView; import com.yu.hu.emoji.utils.TransformUtils; /** * Created by Hy on 2019/12/30 18:05 * <p> * 对应一个小表情 **/ public class EmojiView extends AppCompatImageView { //图标大小 private static final int DEFAULT_SIZE = 28; private int size = DEFAULT_SIZE; public EmojiView(Context context) { super(context); } /** * 设置表情大小 * * @param size 大小 dp */ @SuppressWarnings("unused") public EmojiView setSize(int size) { this.size = size; return this; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); int measuredWidth = getMeasuredWidth(); setMeasuredDimension(measuredWidth, TransformUtils.dip2px(getContext(), size)); } }
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =======================================================================*/ // This class has been generated, DO NOT EDIT! package org.tensorflow.op.core; import org.tensorflow.Operand; import org.tensorflow.Operation; import org.tensorflow.OperationBuilder; import org.tensorflow.op.PrimitiveOp; import org.tensorflow.op.Scope; import org.tensorflow.op.annotation.Endpoint; import org.tensorflow.op.annotation.Operator; import org.tensorflow.types.family.TNumber; import org.tensorflow.types.family.TType; /** * Divides sparse updates into the variable referenced by `resource`. * <p> * This operation computes * <p> * # Scalar indices * ref[indices, ...] /= updates[...] * <p> * # Vector indices (for each i) * ref[indices[i], ...] /= updates[i, ...] * <p> * # High rank indices (for each i, ..., j) * ref[indices[i, ..., j], ...] /= updates[i, ..., j, ...] * <p> * Duplicate entries are handled correctly: if multiple `indices` reference * the same location, their contributions multiply. * <p> * Requires `updates.shape = indices.shape + ref.shape[1:]` or `updates.shape = []`. * <p> * <div style="width:70%; margin:auto; margin-bottom:10px; margin-top:20px;"> * <img style="width:100%" src='https://www.tensorflow.org/images/ScatterAdd.png' alt> * </div> */ @Operator public final class ResourceScatterDiv extends PrimitiveOp { /** * Factory method to create a class wrapping a new ResourceScatterDiv operation. * * @param scope current scope * @param resource Should be from a `Variable` node. * @param indices A tensor of indices into the first dimension of `ref`. * @param updates A tensor of updated values to add to `ref`. * @return a new instance of ResourceScatterDiv */ @Endpoint(describeByClass = true) public static <T extends TNumber, U extends TType> ResourceScatterDiv create(Scope scope, Operand<?> resource, Operand<T> indices, Operand<U> updates) { OperationBuilder opBuilder = scope.env().opBuilder("ResourceScatterDiv", scope.makeOpName("ResourceScatterDiv")); opBuilder.addInput(resource.asOutput()); opBuilder.addInput(indices.asOutput()); opBuilder.addInput(updates.asOutput()); opBuilder = scope.applyControlDependencies(opBuilder); return new ResourceScatterDiv(opBuilder.build()); } private ResourceScatterDiv(Operation operation) { super(operation); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.test; import com.carrotsearch.hppc.ObjectLongMap; import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.http.HttpHost; import org.apache.lucene.search.Sort; import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.segments.IndexSegments; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.zen.ElectMasterService; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.MergeSchedulerConfig; import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MockFieldFilterPlugin; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.node.NodeMocksPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.MockSearchService; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.client.RandomizingClient; import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.test.disruption.NetworkDisruption; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.AssertingTransportInterceptor; import org.hamcrest.Matchers; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.Random; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BooleanSupplier; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.client.Requests.syncedFlushRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.util.CollectionUtils.eagerPartition; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.XContentTestUtils.convertToMap; import static org.elasticsearch.test.XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; /** * {@link ESIntegTestCase} is an abstract base class to run integration * tests against a JVM private Elasticsearch Cluster. The test class supports 2 different * cluster scopes. * <ul> * <li>{@link Scope#TEST} - uses a new cluster for each individual test method.</li> * <li>{@link Scope#SUITE} - uses a cluster shared across all test methods in the same suite</li> * </ul> * <p> * The most common test scope is {@link Scope#SUITE} which shares a cluster per test suite. * <p> * If the test methods need specific node settings or change persistent and/or transient cluster settings {@link Scope#TEST} * should be used. To configure a scope for the test cluster the {@link ClusterScope} annotation * should be used, here is an example: * <pre> * * {@literal @}NodeScope(scope=Scope.TEST) public class SomeIT extends ESIntegTestCase { * public void testMethod() {} * } * </pre> * <p> * If no {@link ClusterScope} annotation is present on an integration test the default scope is {@link Scope#SUITE} * <p> * A test cluster creates a set of nodes in the background before the test starts. The number of nodes in the cluster is * determined at random and can change across tests. The {@link ClusterScope} allows configuring the initial number of nodes * that are created before the tests start. * <pre> * {@literal @}NodeScope(scope=Scope.SUITE, numDataNodes=3) * public class SomeIT extends ESIntegTestCase { * public void testMethod() {} * } * </pre> * <p> * Note, the {@link ESIntegTestCase} uses randomized settings on a cluster and index level. For instance * each test might use different directory implementation for each test or will return a random client to one of the * nodes in the cluster for each call to {@link #client()}. Test failures might only be reproducible if the correct * system properties are passed to the test execution environment. * <p> * This class supports the following system properties (passed with -Dkey=value to the application) * <ul> * <li>-D{@value #TESTS_CLIENT_RATIO} - a double value in the interval [0..1] which defines the ration between node and transport clients used</li> * <li>-D{@value #TESTS_ENABLE_MOCK_MODULES} - a boolean value to enable or disable mock modules. This is * useful to test the system without asserting modules that to make sure they don't hide any bugs in production.</li> * <li> - a random seed used to initialize the index random context. * </ul> */ @LuceneTestCase.SuppressFileSystems("ExtrasFS") // doesn't work with potential multi data path from test cluster yet public abstract class ESIntegTestCase extends ESTestCase { /** * Property that controls whether ThirdParty Integration tests are run (not the default). */ public static final String SYSPROP_THIRDPARTY = "tests.thirdparty"; /** * Annotation for third-party integration tests. * <p> * These are tests the require a third-party service in order to run. They * may require the user to manually configure an external process (such as rabbitmq), * or may additionally require some external configuration (e.g. AWS credentials) * via the {@code tests.config} system property. */ @Inherited @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) @TestGroup(enabled = false, sysProperty = ESIntegTestCase.SYSPROP_THIRDPARTY) public @interface ThirdParty { } /** node names of the corresponding clusters will start with these prefixes */ public static final String SUITE_CLUSTER_NODE_PREFIX = "node_s"; public static final String TEST_CLUSTER_NODE_PREFIX = "node_t"; /** * Key used to set the transport client ratio via the commandline -D{@value #TESTS_CLIENT_RATIO} */ public static final String TESTS_CLIENT_RATIO = "tests.client.ratio"; /** * Key used to eventually switch to using an external cluster and provide its transport addresses */ public static final String TESTS_CLUSTER = "tests.cluster"; /** * Key used to retrieve the index random seed from the index settings on a running node. * The value of this seed can be used to initialize a random context for a specific index. * It's set once per test via a generic index template. */ public static final Setting<Long> INDEX_TEST_SEED_SETTING = Setting.longSetting("index.tests.seed", 0, Long.MIN_VALUE, Property.IndexScope); /** * A boolean value to enable or disable mock modules. This is useful to test the * system without asserting modules that to make sure they don't hide any bugs in * production. * * @see ESIntegTestCase */ public static final String TESTS_ENABLE_MOCK_MODULES = "tests.enable_mock_modules"; private static final boolean MOCK_MODULES_ENABLED = "true".equals(System.getProperty(TESTS_ENABLE_MOCK_MODULES, "true")); /** * Threshold at which indexing switches from frequently async to frequently bulk. */ private static final int FREQUENT_BULK_THRESHOLD = 300; /** * Threshold at which bulk indexing will always be used. */ private static final int ALWAYS_BULK_THRESHOLD = 3000; /** * Maximum number of async operations that indexRandom will kick off at one time. */ private static final int MAX_IN_FLIGHT_ASYNC_INDEXES = 150; /** * Maximum number of documents in a single bulk index request. */ private static final int MAX_BULK_INDEX_REQUEST_SIZE = 1000; /** * Default minimum number of shards for an index */ protected static final int DEFAULT_MIN_NUM_SHARDS = 1; /** * Default maximum number of shards for an index */ protected static final int DEFAULT_MAX_NUM_SHARDS = 10; /** * The current cluster depending on the configured {@link Scope}. * By default if no {@link ClusterScope} is configured this will hold a reference to the suite cluster. */ private static TestCluster currentCluster; private static RestClient restClient = null; private static final double TRANSPORT_CLIENT_RATIO = transportClientRatio(); private static final Map<Class<?>, TestCluster> clusters = new IdentityHashMap<>(); private static ESIntegTestCase INSTANCE = null; // see @SuiteScope private static Long SUITE_SEED = null; @BeforeClass public static void beforeClass() throws Exception { SUITE_SEED = randomLong(); initializeSuiteScope(); } @Override protected final boolean enableWarningsCheck() { //In an integ test it doesn't make sense to keep track of warnings: if the cluster is external the warnings are in another jvm, //if the cluster is internal the deprecation logger is shared across all nodes return false; } protected final void beforeInternal() throws Exception { final Scope currentClusterScope = getCurrentClusterScope(); switch (currentClusterScope) { case SUITE: assert SUITE_SEED != null : "Suite seed was not initialized"; currentCluster = buildAndPutCluster(currentClusterScope, SUITE_SEED); break; case TEST: currentCluster = buildAndPutCluster(currentClusterScope, randomLong()); break; default: fail("Unknown Scope: [" + currentClusterScope + "]"); } cluster().beforeTest(random(), getPerTestTransportClientRatio()); cluster().wipe(excludeTemplates()); randomIndexTemplate(); } private void printTestMessage(String message) { if (isSuiteScopedTest(getClass()) && (getTestName().equals("<unknown>"))) { logger.info("[{}]: {} suite", getTestClass().getSimpleName(), message); } else { logger.info("[{}#{}]: {} test", getTestClass().getSimpleName(), getTestName(), message); } } /** * Creates a randomized index template. This template is used to pass in randomized settings on a * per index basis. Allows to enable/disable the randomization for number of shards and replicas */ public void randomIndexTemplate() throws IOException { // TODO move settings for random directory etc here into the index based randomized settings. if (cluster().size() > 0) { Settings.Builder randomSettingsBuilder = setRandomIndexSettings(random(), Settings.builder()); if (isInternalCluster()) { // this is only used by mock plugins and if the cluster is not internal we just can't set it randomSettingsBuilder.put(INDEX_TEST_SEED_SETTING.getKey(), random().nextLong()); } randomSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, numberOfShards()) .put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas()); // if the test class is annotated with SuppressCodecs("*"), it means don't use lucene's codec randomization // otherwise, use it, it has assertions and so on that can find bugs. SuppressCodecs annotation = getClass().getAnnotation(SuppressCodecs.class); if (annotation != null && annotation.value().length == 1 && "*".equals(annotation.value()[0])) { randomSettingsBuilder.put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC)); } else { randomSettingsBuilder.put("index.codec", CodecService.LUCENE_DEFAULT_CODEC); } for (String setting : randomSettingsBuilder.keys()) { assertThat("non index. prefix setting set on index template, its a node setting...", setting, startsWith("index.")); } // always default delayed allocation to 0 to make sure we have tests are not delayed randomSettingsBuilder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0); if (randomBoolean()) { randomSettingsBuilder.put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), randomBoolean()); } if (randomBoolean()) { randomSettingsBuilder.put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), randomBoolean()); } PutIndexTemplateRequestBuilder putTemplate = client().admin().indices() .preparePutTemplate("random_index_template") .setPatterns(Collections.singletonList("*")) .setOrder(0) .setSettings(randomSettingsBuilder); assertAcked(putTemplate.execute().actionGet()); } } protected Settings.Builder setRandomIndexSettings(Random random, Settings.Builder builder) { setRandomIndexMergeSettings(random, builder); setRandomIndexTranslogSettings(random, builder); if (random.nextBoolean()) { builder.put(MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey(), false); } if (random.nextBoolean()) { builder.put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), random.nextBoolean()); } if (random.nextBoolean()) { builder.put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), randomFrom("false", "checksum", "true")); } if (randomBoolean()) { // keep this low so we don't stall tests builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), RandomNumbers.randomIntBetween(random, 1, 15) + "ms"); } return builder; } private static Settings.Builder setRandomIndexMergeSettings(Random random, Settings.Builder builder) { if (random.nextBoolean()) { builder.put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(), (random.nextBoolean() ? random.nextDouble() : random.nextBoolean()).toString()); } switch (random.nextInt(4)) { case 3: final int maxThreadCount = RandomNumbers.randomIntBetween(random, 1, 4); final int maxMergeCount = RandomNumbers.randomIntBetween(random, maxThreadCount, maxThreadCount + 4); builder.put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), maxMergeCount); builder.put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), maxThreadCount); break; } return builder; } private static Settings.Builder setRandomIndexTranslogSettings(Random random, Settings.Builder builder) { if (random.nextBoolean()) { builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(RandomNumbers.randomIntBetween(random, 1, 300), ByteSizeUnit.MB)); } if (random.nextBoolean()) { builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)); // just don't flush } if (random.nextBoolean()) { builder.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), RandomPicks.randomFrom(random, Translog.Durability.values())); } if (random.nextBoolean()) { builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), RandomNumbers.randomIntBetween(random, 100, 5000), TimeUnit.MILLISECONDS); } return builder; } private TestCluster buildWithPrivateContext(final Scope scope, final long seed) throws Exception { return RandomizedContext.current().runWithPrivateRandomness(seed, new Callable<TestCluster>() { @Override public TestCluster call() throws Exception { return buildTestCluster(scope, seed); } }); } private TestCluster buildAndPutCluster(Scope currentClusterScope, long seed) throws Exception { final Class<?> clazz = this.getClass(); TestCluster testCluster = clusters.remove(clazz); // remove this cluster first clearClusters(); // all leftovers are gone by now... this is really just a double safety if we miss something somewhere switch (currentClusterScope) { case SUITE: if (testCluster == null) { // only build if it's not there yet testCluster = buildWithPrivateContext(currentClusterScope, seed); } break; case TEST: // close the previous one and create a new one IOUtils.closeWhileHandlingException(testCluster); testCluster = buildTestCluster(currentClusterScope, seed); break; } clusters.put(clazz, testCluster); return testCluster; } private static void clearClusters() throws IOException { if (!clusters.isEmpty()) { IOUtils.close(clusters.values()); clusters.clear(); } if (restClient != null) { restClient.close(); restClient = null; } } protected final void afterInternal(boolean afterClass) throws Exception { boolean success = false; try { final Scope currentClusterScope = getCurrentClusterScope(); clearDisruptionScheme(); try { if (cluster() != null) { if (currentClusterScope != Scope.TEST) { MetaData metaData = client().admin().cluster().prepareState().execute().actionGet().getState().getMetaData(); final Set<String> persistent = metaData.persistentSettings().keySet(); assertThat("test leaves persistent cluster metadata behind: " + persistent, persistent.size(), equalTo(0)); final Set<String> transientSettings = new HashSet<>(metaData.transientSettings().keySet()); if (isInternalCluster() && internalCluster().getAutoManageMinMasterNode()) { // this is set by the test infra transientSettings.remove(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()); } assertThat("test leaves transient cluster metadata behind: " + transientSettings, transientSettings, empty()); } ensureClusterSizeConsistency(); ensureClusterStateConsistency(); if (isInternalCluster()) { // check no pending cluster states are leaked for (Discovery discovery : internalCluster().getInstances(Discovery.class)) { if (discovery instanceof ZenDiscovery) { final ZenDiscovery zenDiscovery = (ZenDiscovery) discovery; assertBusy(() -> { final ClusterState[] states = zenDiscovery.pendingClusterStates(); assertThat(zenDiscovery.clusterState().nodes().getLocalNode().getName() + " still having pending states:\n" + Stream.of(states).map(ClusterState::toString).collect(Collectors.joining("\n")), states, emptyArray()); }); } } } beforeIndexDeletion(); cluster().wipe(excludeTemplates()); // wipe after to make sure we fail in the test that didn't ack the delete if (afterClass || currentClusterScope == Scope.TEST) { cluster().close(); } cluster().assertAfterTest(); } } finally { if (currentClusterScope == Scope.TEST) { clearClusters(); // it is ok to leave persistent / transient cluster state behind if scope is TEST } } success = true; } finally { if (!success) { // if we failed here that means that something broke horribly so we should clear all clusters // TODO: just let the exception happen, WTF is all this horseshit // afterTestRule.forceFailure(); } } } /** * @return An exclude set of index templates that will not be removed in between tests. */ protected Set<String> excludeTemplates() { return Collections.emptySet(); } protected void beforeIndexDeletion() throws Exception { cluster().beforeIndexDeletion(); } public static TestCluster cluster() { return currentCluster; } public static boolean isInternalCluster() { return (currentCluster instanceof InternalTestCluster); } public static InternalTestCluster internalCluster() { if (!isInternalCluster()) { throw new UnsupportedOperationException("current test cluster is immutable"); } return (InternalTestCluster) currentCluster; } public ClusterService clusterService() { return internalCluster().clusterService(); } public static Client client() { return client(null); } public static Client client(@Nullable String node) { if (node != null) { return internalCluster().client(node); } Client client = cluster().client(); if (frequently()) { client = new RandomizingClient(client, random()); } return client; } public static Client dataNodeClient() { Client client = internalCluster().dataNodeClient(); if (frequently()) { client = new RandomizingClient(client, random()); } return client; } public static Iterable<Client> clients() { return cluster().getClients(); } protected int minimumNumberOfShards() { return DEFAULT_MIN_NUM_SHARDS; } protected int maximumNumberOfShards() { return DEFAULT_MAX_NUM_SHARDS; } protected int numberOfShards() { return between(minimumNumberOfShards(), maximumNumberOfShards()); } protected int minimumNumberOfReplicas() { return 0; } protected int maximumNumberOfReplicas() { //use either 0 or 1 replica, yet a higher amount when possible, but only rarely int maxNumReplicas = Math.max(0, cluster().numDataNodes() - 1); return frequently() ? Math.min(1, maxNumReplicas) : maxNumReplicas; } protected int numberOfReplicas() { return between(minimumNumberOfReplicas(), maximumNumberOfReplicas()); } public void setDisruptionScheme(ServiceDisruptionScheme scheme) { internalCluster().setDisruptionScheme(scheme); } public void clearDisruptionScheme() { if (isInternalCluster()) { internalCluster().clearDisruptionScheme(); } } /** * Returns a settings object used in {@link #createIndex(String...)} and {@link #prepareCreate(String)} and friends. * This method can be overwritten by subclasses to set defaults for the indices that are created by the test. * By default it returns a settings object that sets a random number of shards. Number of shards and replicas * can be controlled through specific methods. */ public Settings indexSettings() { Settings.Builder builder = Settings.builder(); int numberOfShards = numberOfShards(); if (numberOfShards > 0) { builder.put(SETTING_NUMBER_OF_SHARDS, numberOfShards).build(); } int numberOfReplicas = numberOfReplicas(); if (numberOfReplicas >= 0) { builder.put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas).build(); } // 30% of the time if (randomInt(9) < 3) { final String dataPath = randomAlphaOfLength(10); logger.info("using custom data_path for index: [{}]", dataPath); builder.put(IndexMetaData.SETTING_DATA_PATH, dataPath); } // always default delayed allocation to 0 to make sure we have tests are not delayed builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0); return builder.build(); } /** * Creates one or more indices and asserts that the indices are acknowledged. If one of the indices * already exists this method will fail and wipe all the indices created so far. */ public final void createIndex(String... names) { List<String> created = new ArrayList<>(); for (String name : names) { boolean success = false; try { assertAcked(prepareCreate(name)); created.add(name); success = true; } finally { if (!success && !created.isEmpty()) { cluster().wipeIndices(created.toArray(new String[created.size()])); } } } } /** * creates an index with the given setting */ public final void createIndex(String name, Settings indexSettings) { assertAcked(prepareCreate(name).setSettings(indexSettings)); } /** * Creates a new {@link CreateIndexRequestBuilder} with the settings obtained from {@link #indexSettings()}. */ public final CreateIndexRequestBuilder prepareCreate(String index) { return prepareCreate(index, -1); } /** * Creates a new {@link CreateIndexRequestBuilder} with the settings obtained from {@link #indexSettings()}. * The index that is created with this builder will only be allowed to allocate on the number of nodes passed to this * method. * <p> * This method uses allocation deciders to filter out certain nodes to allocate the created index on. It defines allocation * rules based on <code>index.routing.allocation.exclude._name</code>. * </p> */ public final CreateIndexRequestBuilder prepareCreate(String index, int numNodes) { return prepareCreate(index, numNodes, Settings.builder()); } /** * Creates a new {@link CreateIndexRequestBuilder} with the settings obtained from {@link #indexSettings()}, augmented * by the given builder */ public CreateIndexRequestBuilder prepareCreate(String index, Settings.Builder settingsBuilder) { return prepareCreate(index, -1, settingsBuilder); } /** * Creates a new {@link CreateIndexRequestBuilder} with the settings obtained from {@link #indexSettings()}. * The index that is created with this builder will only be allowed to allocate on the number of nodes passed to this * method. * <p> * This method uses allocation deciders to filter out certain nodes to allocate the created index on. It defines allocation * rules based on <code>index.routing.allocation.exclude._name</code>. * </p> */ public CreateIndexRequestBuilder prepareCreate(String index, int numNodes, Settings.Builder settingsBuilder) { Settings.Builder builder = Settings.builder().put(indexSettings()).put(settingsBuilder.build()); if (numNodes > 0) { internalCluster().ensureAtLeastNumDataNodes(numNodes); getExcludeSettings(index, numNodes, builder); } return client().admin().indices().prepareCreate(index).setSettings(builder.build()); } private Settings.Builder getExcludeSettings(String index, int num, Settings.Builder builder) { String exclude = String.join(",", internalCluster().allDataNodesButN(num)); builder.put("index.routing.allocation.exclude._name", exclude); return builder; } /** * Waits until all nodes have no pending tasks. */ public void waitNoPendingTasksOnAll() throws Exception { assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).get()); assertBusy(() -> { for (Client client : clients()) { ClusterHealthResponse clusterHealth = client.admin().cluster().prepareHealth().setLocal(true).get(); assertThat("client " + client + " still has in flight fetch", clusterHealth.getNumberOfInFlightFetch(), equalTo(0)); PendingClusterTasksResponse pendingTasks = client.admin().cluster().preparePendingClusterTasks().setLocal(true).get(); assertThat("client " + client + " still has pending tasks " + pendingTasks, pendingTasks, Matchers.emptyIterable()); clusterHealth = client.admin().cluster().prepareHealth().setLocal(true).get(); assertThat("client " + client + " still has in flight fetch", clusterHealth.getNumberOfInFlightFetch(), equalTo(0)); } }); assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).get()); } /** * Waits till a (pattern) field name mappings concretely exists on all nodes. Note, this waits for the current * started shards and checks for concrete mappings. */ public void assertConcreteMappingsOnAll(final String index, final String type, final String... fieldNames) throws Exception { Set<String> nodes = internalCluster().nodesInclude(index); assertThat(nodes, Matchers.not(Matchers.emptyIterable())); for (String node : nodes) { IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); IndexService indexService = indicesService.indexService(resolveIndex(index)); assertThat("index service doesn't exists on " + node, indexService, notNullValue()); DocumentMapper documentMapper = indexService.mapperService().documentMapper(type); assertThat("document mapper doesn't exists on " + node, documentMapper, notNullValue()); for (String fieldName : fieldNames) { Collection<String> matches = documentMapper.mappers().simpleMatchToFullName(fieldName); assertThat("field " + fieldName + " doesn't exists on " + node, matches, Matchers.not(emptyIterable())); } } assertMappingOnMaster(index, type, fieldNames); } /** * Waits for the given mapping type to exists on the master node. */ public void assertMappingOnMaster(final String index, final String type, final String... fieldNames) throws Exception { GetMappingsResponse response = client().admin().indices().prepareGetMappings(index).setTypes(type).get(); ImmutableOpenMap<String, MappingMetaData> mappings = response.getMappings().get(index); assertThat(mappings, notNullValue()); MappingMetaData mappingMetaData = mappings.get(type); assertThat(mappingMetaData, notNullValue()); Map<String, Object> mappingSource = mappingMetaData.getSourceAsMap(); assertFalse(mappingSource.isEmpty()); assertTrue(mappingSource.containsKey("properties")); for (String fieldName : fieldNames) { Map<String, Object> mappingProperties = (Map<String, Object>) mappingSource.get("properties"); if (fieldName.indexOf('.') != -1) { fieldName = fieldName.replace(".", ".properties."); } assertThat("field " + fieldName + " doesn't exists in mapping " + mappingMetaData.source().string(), XContentMapValues.extractValue(fieldName, mappingProperties), notNullValue()); } } /** Ensures the result counts are as expected, and logs the results if different */ public void assertResultsAndLogOnFailure(long expectedResults, SearchResponse searchResponse) { if (searchResponse.getHits().getTotalHits() != expectedResults) { StringBuilder sb = new StringBuilder("search result contains ["); sb.append(searchResponse.getHits().getTotalHits()).append("] results. expected [").append(expectedResults).append("]"); String failMsg = sb.toString(); for (SearchHit hit : searchResponse.getHits().getHits()) { sb.append("\n-> _index: [").append(hit.getIndex()).append("] type [").append(hit.getType()) .append("] id [").append(hit.getId()).append("]"); } logger.warn("{}", sb); fail(failMsg); } } /** * Restricts the given index to be allocated on <code>n</code> nodes using the allocation deciders. * Yet if the shards can't be allocated on any other node shards for this index will remain allocated on * more than <code>n</code> nodes. */ public void allowNodes(String index, int n) { assert index != null; internalCluster().ensureAtLeastNumDataNodes(n); Settings.Builder builder = Settings.builder(); if (n > 0) { getExcludeSettings(index, n, builder); } Settings build = builder.build(); if (!build.isEmpty()) { logger.debug("allowNodes: updating [{}]'s setting to [{}]", index, build.toDelimitedString(';')); client().admin().indices().prepareUpdateSettings(index).setSettings(build).execute().actionGet(); } } /** * Ensures the cluster has a green state via the cluster health API. This method will also wait for relocations. * It is useful to ensure that all action on the cluster have finished and all shards that were currently relocating * are now allocated and started. */ public ClusterHealthStatus ensureGreen(String... indices) { return ensureGreen(TimeValue.timeValueSeconds(30), indices); } /** * Ensures the cluster has a green state via the cluster health API. This method will also wait for relocations. * It is useful to ensure that all action on the cluster have finished and all shards that were currently relocating * are now allocated and started. * * @param timeout time out value to set on {@link org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest} */ public ClusterHealthStatus ensureGreen(TimeValue timeout, String... indices) { return ensureColor(ClusterHealthStatus.GREEN, timeout, false, indices); } /** * Ensures the cluster has a yellow state via the cluster health API. */ public ClusterHealthStatus ensureYellow(String... indices) { return ensureColor(ClusterHealthStatus.YELLOW, TimeValue.timeValueSeconds(30), false, indices); } /** * Ensures the cluster has a yellow state via the cluster health API and ensures the that cluster has no initializing shards * for the given indices */ public ClusterHealthStatus ensureYellowAndNoInitializingShards(String... indices) { return ensureColor(ClusterHealthStatus.YELLOW, TimeValue.timeValueSeconds(30), true, indices); } private ClusterHealthStatus ensureColor(ClusterHealthStatus clusterHealthStatus, TimeValue timeout, boolean waitForNoInitializingShards, String... indices) { String color = clusterHealthStatus.name().toLowerCase(Locale.ROOT); String method = "ensure" + Strings.capitalize(color); ClusterHealthRequest healthRequest = Requests.clusterHealthRequest(indices) .timeout(timeout) .waitForStatus(clusterHealthStatus) .waitForEvents(Priority.LANGUID) .waitForNoRelocatingShards(true) .waitForNoInitializingShards(waitForNoInitializingShards) // We currently often use ensureGreen or ensureYellow to check whether the cluster is back in a good state after shutting down // a node. If the node that is stopped is the master node, another node will become master and publish a cluster state where it // is master but where the node that was stopped hasn't been removed yet from the cluster state. It will only subsequently // publish a second state where the old master is removed. If the ensureGreen/ensureYellow is timed just right, it will get to // execute before the second cluster state update removes the old master and the condition ensureGreen / ensureYellow will // trivially hold if it held before the node was shut down. The following "waitForNodes" condition ensures that the node has // been removed by the master so that the health check applies to the set of nodes we expect to be part of the cluster. .waitForNodes(Integer.toString(cluster().size())); ClusterHealthResponse actionGet = client().admin().cluster().health(healthRequest).actionGet(); if (actionGet.isTimedOut()) { logger.info("{} timed out, cluster state:\n{}\n{}", method, client().admin().cluster().prepareState().get().getState(), client().admin().cluster().preparePendingClusterTasks().get()); fail("timed out waiting for " + color + " state"); } assertThat("Expected at least " + clusterHealthStatus + " but got " + actionGet.getStatus(), actionGet.getStatus().value(), lessThanOrEqualTo(clusterHealthStatus.value())); logger.debug("indices {} are {}", indices.length == 0 ? "[_all]" : indices, color); return actionGet.getStatus(); } /** * Waits for all relocating shards to become active using the cluster health API. */ public ClusterHealthStatus waitForRelocation() { return waitForRelocation(null); } /** * Waits for all relocating shards to become active and the cluster has reached the given health status * using the cluster health API. */ public ClusterHealthStatus waitForRelocation(ClusterHealthStatus status) { ClusterHealthRequest request = Requests.clusterHealthRequest().waitForNoRelocatingShards(true); if (status != null) { request.waitForStatus(status); } ClusterHealthResponse actionGet = client().admin().cluster() .health(request).actionGet(); if (actionGet.isTimedOut()) { logger.info("waitForRelocation timed out (status={}), cluster state:\n{}\n{}", status, client().admin().cluster().prepareState().get().getState(), client().admin().cluster().preparePendingClusterTasks().get()); assertThat("timed out waiting for relocation", actionGet.isTimedOut(), equalTo(false)); } if (status != null) { assertThat(actionGet.getStatus(), equalTo(status)); } return actionGet.getStatus(); } /** * Waits until at least a give number of document is visible for searchers * * @param numDocs number of documents to wait for. * @return the actual number of docs seen. */ public long waitForDocs(final long numDocs) throws InterruptedException { return waitForDocs(numDocs, null); } /** * Waits until at least a give number of document is visible for searchers * * @param numDocs number of documents to wait for * @param indexer a {@link org.elasticsearch.test.BackgroundIndexer}. If supplied it will be first checked for documents indexed. * This saves on unneeded searches. * @return the actual number of docs seen. */ public long waitForDocs(final long numDocs, @Nullable final BackgroundIndexer indexer) throws InterruptedException { // indexing threads can wait for up to ~1m before retrying when they first try to index into a shard which is not STARTED. return waitForDocs(numDocs, 90, TimeUnit.SECONDS, indexer); } /** * Waits until at least a give number of document is visible for searchers * * @param numDocs number of documents to wait for * @param maxWaitTime if not progress have been made during this time, fail the test * @param maxWaitTimeUnit the unit in which maxWaitTime is specified * @param indexer a {@link org.elasticsearch.test.BackgroundIndexer}. If supplied it will be first checked for documents indexed. * This saves on unneeded searches. * @return the actual number of docs seen. */ public long waitForDocs(final long numDocs, int maxWaitTime, TimeUnit maxWaitTimeUnit, @Nullable final BackgroundIndexer indexer) throws InterruptedException { final AtomicLong lastKnownCount = new AtomicLong(-1); long lastStartCount = -1; BooleanSupplier testDocs = () -> { if (indexer != null) { lastKnownCount.set(indexer.totalIndexedDocs()); } if (lastKnownCount.get() >= numDocs) { try { long count = client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet().getHits().getTotalHits(); if (count == lastKnownCount.get()) { // no progress - try to refresh for the next time client().admin().indices().prepareRefresh().get(); } lastKnownCount.set(count); } catch (Exception e) { // count now acts like search and barfs if all shards failed... logger.debug("failed to executed count", e); return false; } logger.debug("[{}] docs visible for search. waiting for [{}]", lastKnownCount.get(), numDocs); } else { logger.debug("[{}] docs indexed. waiting for [{}]", lastKnownCount.get(), numDocs); } return lastKnownCount.get() >= numDocs; }; while (!awaitBusy(testDocs, maxWaitTime, maxWaitTimeUnit)) { if (lastStartCount == lastKnownCount.get()) { // we didn't make any progress fail("failed to reach " + numDocs + "docs"); } lastStartCount = lastKnownCount.get(); } return lastKnownCount.get(); } /** * Sets the cluster's minimum master node and make sure the response is acknowledge. * Note: this doesn't guarantee that the new setting has taken effect, just that it has been received by all nodes. */ public void setMinimumMasterNodes(int n) { assertTrue(client().admin().cluster().prepareUpdateSettings().setTransientSettings( Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), n)) .get().isAcknowledged()); } /** * Prints the current cluster state as debug logging. */ public void logClusterState() { logger.debug("cluster state:\n{}\n{}", client().admin().cluster().prepareState().get().getState(), client().admin().cluster().preparePendingClusterTasks().get()); } /** * Prints the segments info for the given indices as debug logging. */ public void logSegmentsState(String... indices) throws Exception { IndicesSegmentResponse segsRsp = client().admin().indices().prepareSegments(indices).get(); logger.debug("segments {} state: \n{}", indices.length == 0 ? "[_all]" : indices, Strings.toString(segsRsp.toXContent(JsonXContent.contentBuilder().prettyPrint(), ToXContent.EMPTY_PARAMS))); } /** * Prints current memory stats as info logging. */ public void logMemoryStats() { logger.info("memory: {}", Strings.toString(client().admin().cluster().prepareNodesStats().clear().setJvm(true).get(), true, true)); } protected void ensureClusterSizeConsistency() { if (cluster() != null && cluster().size() > 0) { // if static init fails the cluster can be null logger.trace("Check consistency for [{}] nodes", cluster().size()); assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(cluster().size())).get()); } } /** * Verifies that all nodes that have the same version of the cluster state as master have same cluster state */ protected void ensureClusterStateConsistency() throws IOException { if (cluster() != null && cluster().size() > 0) { final NamedWriteableRegistry namedWriteableRegistry = cluster().getNamedWriteableRegistry(); ClusterState masterClusterState = client().admin().cluster().prepareState().all().get().getState(); byte[] masterClusterStateBytes = ClusterState.Builder.toBytes(masterClusterState); // remove local node reference masterClusterState = ClusterState.Builder.fromBytes(masterClusterStateBytes, null, namedWriteableRegistry); Map<String, Object> masterStateMap = convertToMap(masterClusterState); int masterClusterStateSize = ClusterState.Builder.toBytes(masterClusterState).length; String masterId = masterClusterState.nodes().getMasterNodeId(); for (Client client : cluster().getClients()) { ClusterState localClusterState = client.admin().cluster().prepareState().all().setLocal(true).get().getState(); byte[] localClusterStateBytes = ClusterState.Builder.toBytes(localClusterState); // remove local node reference localClusterState = ClusterState.Builder.fromBytes(localClusterStateBytes, null, namedWriteableRegistry); final Map<String, Object> localStateMap = convertToMap(localClusterState); final int localClusterStateSize = ClusterState.Builder.toBytes(localClusterState).length; // Check that the non-master node has the same version of the cluster state as the master and // that the master node matches the master (otherwise there is no requirement for the cluster state to match) if (masterClusterState.version() == localClusterState.version() && masterId.equals(localClusterState.nodes().getMasterNodeId())) { try { assertEquals("clusterstate UUID does not match", masterClusterState.stateUUID(), localClusterState.stateUUID()); // We cannot compare serialization bytes since serialization order of maps is not guaranteed // but we can compare serialization sizes - they should be the same assertEquals("clusterstate size does not match", masterClusterStateSize, localClusterStateSize); // Compare JSON serialization assertNull("clusterstate JSON serialization does not match", differenceBetweenMapsIgnoringArrayOrder(masterStateMap, localStateMap)); } catch (AssertionError error) { logger.error("Cluster state from master:\n{}\nLocal cluster state:\n{}", masterClusterState.toString(), localClusterState.toString()); throw error; } } } } } /** * Ensures the cluster is in a searchable state for the given indices. This means a searchable copy of each * shard is available on the cluster. */ protected ClusterHealthStatus ensureSearchable(String... indices) { // this is just a temporary thing but it's easier to change if it is encapsulated. return ensureGreen(indices); } protected void ensureStableCluster(int nodeCount) { ensureStableCluster(nodeCount, TimeValue.timeValueSeconds(30)); } protected void ensureStableCluster(int nodeCount, TimeValue timeValue) { ensureStableCluster(nodeCount, timeValue, false, null); } protected void ensureStableCluster(int nodeCount, @Nullable String viaNode) { ensureStableCluster(nodeCount, TimeValue.timeValueSeconds(30), false, viaNode); } protected void ensureStableCluster(int nodeCount, TimeValue timeValue, boolean local, @Nullable String viaNode) { if (viaNode == null) { viaNode = randomFrom(internalCluster().getNodeNames()); } logger.debug("ensuring cluster is stable with [{}] nodes. access node: [{}]. timeout: [{}]", nodeCount, viaNode, timeValue); ClusterHealthResponse clusterHealthResponse = client(viaNode).admin().cluster().prepareHealth() .setWaitForEvents(Priority.LANGUID) .setWaitForNodes(Integer.toString(nodeCount)) .setTimeout(timeValue) .setLocal(local) .setWaitForNoRelocatingShards(true) .get(); if (clusterHealthResponse.isTimedOut()) { ClusterStateResponse stateResponse = client(viaNode).admin().cluster().prepareState().get(); fail("failed to reach a stable cluster of [" + nodeCount + "] nodes. Tried via [" + viaNode + "]. last cluster state:\n" + stateResponse.getState()); } assertThat(clusterHealthResponse.isTimedOut(), is(false)); ensureFullyConnectedCluster(); } /** * Ensures that all nodes in the cluster are connected to each other. * * Some network disruptions may leave nodes that are not the master disconnected from each other. * {@link org.elasticsearch.cluster.NodeConnectionsService} will eventually reconnect but it's * handy to be able to ensure this happens faster */ protected void ensureFullyConnectedCluster() { NetworkDisruption.ensureFullyConnectedCluster(internalCluster()); } /** * Syntactic sugar for: * <pre> * client().prepareIndex(index, type).setSource(source).execute().actionGet(); * </pre> */ protected final IndexResponse index(String index, String type, XContentBuilder source) { return client().prepareIndex(index, type).setSource(source).execute().actionGet(); } /** * Syntactic sugar for: * <pre> * client().prepareIndex(index, type).setSource(source).execute().actionGet(); * </pre> */ protected final IndexResponse index(String index, String type, String id, Map<String, Object> source) { return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); } /** * Syntactic sugar for: * <pre> * client().prepareGet(index, type, id).execute().actionGet(); * </pre> */ protected final GetResponse get(String index, String type, String id) { return client().prepareGet(index, type, id).execute().actionGet(); } /** * Syntactic sugar for: * <pre> * return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); * </pre> */ protected final IndexResponse index(String index, String type, String id, XContentBuilder source) { return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); } /** * Syntactic sugar for: * <pre> * return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); * </pre> */ protected final IndexResponse index(String index, String type, String id, Object... source) { return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); } /** * Syntactic sugar for: * <pre> * return client().prepareIndex(index, type, id).setSource(source).execute().actionGet(); * </pre> * <p> * where source is a JSON String. */ protected final IndexResponse index(String index, String type, String id, String source) { return client().prepareIndex(index, type, id).setSource(source, XContentType.JSON).execute().actionGet(); } /** * Waits for relocations and refreshes all indices in the cluster. * * @see #waitForRelocation() */ protected final RefreshResponse refresh(String... indices) { waitForRelocation(); // TODO RANDOMIZE with flush? RefreshResponse actionGet = client().admin().indices().prepareRefresh(indices).execute().actionGet(); assertNoFailures(actionGet); return actionGet; } /** * Flushes and refreshes all indices in the cluster */ protected final void flushAndRefresh(String... indices) { flush(indices); refresh(indices); } /** * Flush some or all indices in the cluster. */ protected final FlushResponse flush(String... indices) { waitForRelocation(); FlushResponse actionGet = client().admin().indices().prepareFlush(indices).execute().actionGet(); for (DefaultShardOperationFailedException failure : actionGet.getShardFailures()) { assertThat("unexpected flush failure " + failure.reason(), failure.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE)); } return actionGet; } /** * Waits for all relocations and force merge all indices in the cluster to 1 segment. */ protected ForceMergeResponse forceMerge() { waitForRelocation(); ForceMergeResponse actionGet = client().admin().indices().prepareForceMerge().setMaxNumSegments(1).execute().actionGet(); assertNoFailures(actionGet); return actionGet; } /** * Returns <code>true</code> iff the given index exists otherwise <code>false</code> */ protected boolean indexExists(String index) { IndicesExistsResponse actionGet = client().admin().indices().prepareExists(index).execute().actionGet(); return actionGet.isExists(); } /** * Syntactic sugar for enabling allocation for <code>indices</code> */ protected final void enableAllocation(String... indices) { client().admin().indices().prepareUpdateSettings(indices).setSettings(Settings.builder().put( EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all" )).get(); } /** * Syntactic sugar for disabling allocation for <code>indices</code> */ protected final void disableAllocation(String... indices) { client().admin().indices().prepareUpdateSettings(indices).setSettings(Settings.builder().put( EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none" )).get(); } /** * Returns a random admin client. This client can either be a node or a transport client pointing to any of * the nodes in the cluster. */ protected AdminClient admin() { return client().admin(); } /** * Convenience method that forwards to {@link #indexRandom(boolean, List)}. */ public void indexRandom(boolean forceRefresh, IndexRequestBuilder... builders) throws InterruptedException, ExecutionException { indexRandom(forceRefresh, Arrays.asList(builders)); } public void indexRandom(boolean forceRefresh, boolean dummyDocuments, IndexRequestBuilder... builders) throws InterruptedException, ExecutionException { indexRandom(forceRefresh, dummyDocuments, Arrays.asList(builders)); } /** * Indexes the given {@link IndexRequestBuilder} instances randomly. It shuffles the given builders and either * indexes them in a blocking or async fashion. This is very useful to catch problems that relate to internal document * ids or index segment creations. Some features might have bug when a given document is the first or the last in a * segment or if only one document is in a segment etc. This method prevents issues like this by randomizing the index * layout. * * @param forceRefresh if <tt>true</tt> all involved indices are refreshed once the documents are indexed. Additionally if <tt>true</tt> * some empty dummy documents are may be randomly inserted into the document list and deleted once all documents are indexed. * This is useful to produce deleted documents on the server side. * @param builders the documents to index. * @see #indexRandom(boolean, boolean, java.util.List) */ public void indexRandom(boolean forceRefresh, List<IndexRequestBuilder> builders) throws InterruptedException, ExecutionException { indexRandom(forceRefresh, forceRefresh, builders); } /** * Indexes the given {@link IndexRequestBuilder} instances randomly. It shuffles the given builders and either * indexes them in a blocking or async fashion. This is very useful to catch problems that relate to internal document * ids or index segment creations. Some features might have bug when a given document is the first or the last in a * segment or if only one document is in a segment etc. This method prevents issues like this by randomizing the index * layout. * * @param forceRefresh if <tt>true</tt> all involved indices are refreshed once the documents are indexed. * @param dummyDocuments if <tt>true</tt> some empty dummy documents may be randomly inserted into the document list and deleted once * all documents are indexed. This is useful to produce deleted documents on the server side. * @param builders the documents to index. */ public void indexRandom(boolean forceRefresh, boolean dummyDocuments, List<IndexRequestBuilder> builders) throws InterruptedException, ExecutionException { indexRandom(forceRefresh, dummyDocuments, true, builders); } /** * Indexes the given {@link IndexRequestBuilder} instances randomly. It shuffles the given builders and either * indexes them in a blocking or async fashion. This is very useful to catch problems that relate to internal document * ids or index segment creations. Some features might have bug when a given document is the first or the last in a * segment or if only one document is in a segment etc. This method prevents issues like this by randomizing the index * layout. * * @param forceRefresh if <tt>true</tt> all involved indices are refreshed once the documents are indexed. * @param dummyDocuments if <tt>true</tt> some empty dummy documents may be randomly inserted into the document list and deleted once * all documents are indexed. This is useful to produce deleted documents on the server side. * @param maybeFlush if <tt>true</tt> this method may randomly execute full flushes after index operations. * @param builders the documents to index. */ public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean maybeFlush, List<IndexRequestBuilder> builders) throws InterruptedException, ExecutionException { Random random = random(); Map<String, Set<String>> indicesAndTypes = new HashMap<>(); for (IndexRequestBuilder builder : builders) { final Set<String> types = indicesAndTypes.computeIfAbsent(builder.request().index(), index -> new HashSet<>()); types.add(builder.request().type()); } Set<List<String>> bogusIds = new HashSet<>(); // (index, type, id) if (random.nextBoolean() && !builders.isEmpty() && dummyDocuments) { builders = new ArrayList<>(builders); // inject some bogus docs final int numBogusDocs = scaledRandomIntBetween(1, builders.size() * 2); final int unicodeLen = between(1, 10); for (int i = 0; i < numBogusDocs; i++) { String id = "bogus_doc_" + randomRealisticUnicodeOfLength(unicodeLen) + Integer.toString(dummmyDocIdGenerator.incrementAndGet()); Map.Entry<String, Set<String>> indexAndTypes = RandomPicks.randomFrom(random, indicesAndTypes.entrySet()); String index = indexAndTypes.getKey(); String type = RandomPicks.randomFrom(random, indexAndTypes.getValue()); bogusIds.add(Arrays.asList(index, type, id)); // We configure a routing key in case the mapping requires it builders.add(client().prepareIndex(index, type, id).setSource("{}", XContentType.JSON).setRouting(id)); } } Collections.shuffle(builders, random()); final CopyOnWriteArrayList<Tuple<IndexRequestBuilder, Exception>> errors = new CopyOnWriteArrayList<>(); List<CountDownLatch> inFlightAsyncOperations = new ArrayList<>(); // If you are indexing just a few documents then frequently do it one at a time. If many then frequently in bulk. final String[] indices = indicesAndTypes.keySet().toArray(new String[0]); if (builders.size() < FREQUENT_BULK_THRESHOLD ? frequently() : builders.size() < ALWAYS_BULK_THRESHOLD ? rarely() : false) { if (frequently()) { logger.info("Index [{}] docs async: [{}] bulk: [{}]", builders.size(), true, false); for (IndexRequestBuilder indexRequestBuilder : builders) { indexRequestBuilder.execute(new PayloadLatchedActionListener<IndexResponse, IndexRequestBuilder>(indexRequestBuilder, newLatch(inFlightAsyncOperations), errors)); postIndexAsyncActions(indices, inFlightAsyncOperations, maybeFlush); } } else { logger.info("Index [{}] docs async: [{}] bulk: [{}]", builders.size(), false, false); for (IndexRequestBuilder indexRequestBuilder : builders) { indexRequestBuilder.execute().actionGet(); postIndexAsyncActions(indices, inFlightAsyncOperations, maybeFlush); } } } else { List<List<IndexRequestBuilder>> partition = eagerPartition(builders, Math.min(MAX_BULK_INDEX_REQUEST_SIZE, Math.max(1, (int) (builders.size() * randomDouble())))); logger.info("Index [{}] docs async: [{}] bulk: [{}] partitions [{}]", builders.size(), false, true, partition.size()); for (List<IndexRequestBuilder> segmented : partition) { BulkRequestBuilder bulkBuilder = client().prepareBulk(); for (IndexRequestBuilder indexRequestBuilder : segmented) { bulkBuilder.add(indexRequestBuilder); } BulkResponse actionGet = bulkBuilder.execute().actionGet(); assertThat(actionGet.hasFailures() ? actionGet.buildFailureMessage() : "", actionGet.hasFailures(), equalTo(false)); } } for (CountDownLatch operation : inFlightAsyncOperations) { operation.await(); } final List<Exception> actualErrors = new ArrayList<>(); for (Tuple<IndexRequestBuilder, Exception> tuple : errors) { if (ExceptionsHelper.unwrapCause(tuple.v2()) instanceof EsRejectedExecutionException) { tuple.v1().execute().actionGet(); // re-index if rejected } else { actualErrors.add(tuple.v2()); } } assertThat(actualErrors, emptyIterable()); if (!bogusIds.isEmpty()) { // delete the bogus types again - it might trigger merges or at least holes in the segments and enforces deleted docs! for (List<String> doc : bogusIds) { assertEquals("failed to delete a dummy doc [" + doc.get(0) + "][" + doc.get(2) + "]", DocWriteResponse.Result.DELETED, client().prepareDelete(doc.get(0), doc.get(1), doc.get(2)).setRouting(doc.get(2)).get().getResult()); } } if (forceRefresh) { assertNoFailures(client().admin().indices().prepareRefresh(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get()); } } private AtomicInteger dummmyDocIdGenerator = new AtomicInteger(); /** Disables an index block for the specified index */ public static void disableIndexBlock(String index, String block) { Settings settings = Settings.builder().put(block, false).build(); client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); } /** Enables an index block for the specified index */ public static void enableIndexBlock(String index, String block) { Settings settings = Settings.builder().put(block, true).build(); client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get(); } /** Sets or unsets the cluster read_only mode **/ public static void setClusterReadOnly(boolean value) { Settings settings = value ? Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), value).build() : Settings.builder().putNull(MetaData.SETTING_READ_ONLY_SETTING.getKey()).build() ; assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get()); } private static CountDownLatch newLatch(List<CountDownLatch> latches) { CountDownLatch l = new CountDownLatch(1); latches.add(l); return l; } /** * Maybe refresh, force merge, or flush then always make sure there aren't too many in flight async operations. */ private void postIndexAsyncActions(String[] indices, List<CountDownLatch> inFlightAsyncOperations, boolean maybeFlush) throws InterruptedException { if (rarely()) { if (rarely()) { client().admin().indices().prepareRefresh(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute( new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); } else if (maybeFlush && rarely()) { if (randomBoolean()) { client().admin().indices().prepareFlush(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute( new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); } else { client().admin().indices().syncedFlush(syncedFlushRequest(indices).indicesOptions(IndicesOptions.lenientExpandOpen()), new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); } } else if (rarely()) { client().admin().indices().prepareForceMerge(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).setMaxNumSegments(between(1, 10)).setFlush(maybeFlush && randomBoolean()).execute( new LatchedActionListener<>(newLatch(inFlightAsyncOperations))); } } while (inFlightAsyncOperations.size() > MAX_IN_FLIGHT_ASYNC_INDEXES) { int waitFor = between(0, inFlightAsyncOperations.size() - 1); inFlightAsyncOperations.remove(waitFor).await(); } } /** * The scope of a test cluster used together with * {@link ESIntegTestCase.ClusterScope} annotations on {@link ESIntegTestCase} subclasses. */ public enum Scope { /** * A cluster shared across all method in a single test suite */ SUITE, /** * A test exclusive test cluster */ TEST } /** * Defines a cluster scope for a {@link ESIntegTestCase} subclass. * By default if no {@link ClusterScope} annotation is present {@link ESIntegTestCase.Scope#SUITE} is used * together with randomly chosen settings like number of nodes etc. */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.TYPE}) public @interface ClusterScope { /** * Returns the scope. {@link ESIntegTestCase.Scope#SUITE} is default. */ Scope scope() default Scope.SUITE; /** * Returns the number of nodes in the cluster. Default is <tt>-1</tt> which means * a random number of nodes is used, where the minimum and maximum number of nodes * are either the specified ones or the default ones if not specified. */ int numDataNodes() default -1; /** * Returns the minimum number of data nodes in the cluster. Default is <tt>-1</tt>. * Ignored when {@link ClusterScope#numDataNodes()} is set. */ int minNumDataNodes() default -1; /** * Returns the maximum number of data nodes in the cluster. Default is <tt>-1</tt>. * Ignored when {@link ClusterScope#numDataNodes()} is set. */ int maxNumDataNodes() default -1; /** * Indicates whether the cluster can have dedicated master nodes. If <tt>false</tt> means data nodes will serve as master nodes * and there will be no dedicated master (and data) nodes. Default is <tt>true</tt> which means * dedicated master nodes will be randomly used. */ boolean supportsDedicatedMasters() default true; /** * The cluster automatically manages the {@link ElectMasterService#DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING} by default * as nodes are started and stopped. Set this to false to manage the setting manually. */ boolean autoMinMasterNodes() default true; /** * Returns the number of client nodes in the cluster. Default is {@link InternalTestCluster#DEFAULT_NUM_CLIENT_NODES}, a * negative value means that the number of client nodes will be randomized. */ int numClientNodes() default InternalTestCluster.DEFAULT_NUM_CLIENT_NODES; /** * Returns the transport client ratio. By default this returns <code>-1</code> which means a random * ratio in the interval <code>[0..1]</code> is used. */ double transportClientRatio() default -1; } private class LatchedActionListener<Response> implements ActionListener<Response> { private final CountDownLatch latch; LatchedActionListener(CountDownLatch latch) { this.latch = latch; } @Override public final void onResponse(Response response) { latch.countDown(); } @Override public final void onFailure(Exception t) { try { logger.info("Action Failed", t); addError(t); } finally { latch.countDown(); } } protected void addError(Exception e) { } } private class PayloadLatchedActionListener<Response, T> extends LatchedActionListener<Response> { private final CopyOnWriteArrayList<Tuple<T, Exception>> errors; private final T builder; PayloadLatchedActionListener(T builder, CountDownLatch latch, CopyOnWriteArrayList<Tuple<T, Exception>> errors) { super(latch); this.errors = errors; this.builder = builder; } @Override protected void addError(Exception e) { errors.add(new Tuple<>(builder, e)); } } /** * Clears the given scroll Ids */ public void clearScroll(String... scrollIds) { ClearScrollResponse clearResponse = client().prepareClearScroll() .setScrollIds(Arrays.asList(scrollIds)).get(); assertThat(clearResponse.isSucceeded(), equalTo(true)); } private static <A extends Annotation> A getAnnotation(Class<?> clazz, Class<A> annotationClass) { if (clazz == Object.class || clazz == ESIntegTestCase.class) { return null; } A annotation = clazz.getAnnotation(annotationClass); if (annotation != null) { return annotation; } return getAnnotation(clazz.getSuperclass(), annotationClass); } private Scope getCurrentClusterScope() { return getCurrentClusterScope(this.getClass()); } private static Scope getCurrentClusterScope(Class<?> clazz) { ClusterScope annotation = getAnnotation(clazz, ClusterScope.class); // if we are not annotated assume suite! return annotation == null ? Scope.SUITE : annotation.scope(); } private boolean getSupportsDedicatedMasters() { ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class); return annotation == null ? true : annotation.supportsDedicatedMasters(); } private boolean getAutoMinMasterNodes() { ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class); return annotation == null ? true : annotation.autoMinMasterNodes(); } private int getNumDataNodes() { ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class); return annotation == null ? -1 : annotation.numDataNodes(); } private int getMinNumDataNodes() { ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class); return annotation == null || annotation.minNumDataNodes() == -1 ? InternalTestCluster.DEFAULT_MIN_NUM_DATA_NODES : annotation.minNumDataNodes(); } private int getMaxNumDataNodes() { ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class); return annotation == null || annotation.maxNumDataNodes() == -1 ? InternalTestCluster.DEFAULT_MAX_NUM_DATA_NODES : annotation.maxNumDataNodes(); } private int getNumClientNodes() { ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class); return annotation == null ? InternalTestCluster.DEFAULT_NUM_CLIENT_NODES : annotation.numClientNodes(); } /** * This method is used to obtain settings for the <tt>Nth</tt> node in the cluster. * Nodes in this cluster are associated with an ordinal number such that nodes can * be started with specific configurations. This method might be called multiple * times with the same ordinal and is expected to return the same value for each invocation. * In other words subclasses must ensure this method is idempotent. */ protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder() .put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), Integer.MAX_VALUE) // Default the watermarks to absurdly low to prevent the tests // from failing on nodes without enough disk space .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b") .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b") .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "1b") .put(ScriptService.SCRIPT_MAX_COMPILATIONS_RATE.getKey(), "2048/1m") // by default we never cache below 10k docs in a segment, // bypass this limit so that caching gets some testing in // integration tests that usually create few documents .put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), nodeOrdinal % 2 == 0) // wait short time for other active shards before actually deleting, default 30s not needed in tests .put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT.getKey(), new TimeValue(1, TimeUnit.SECONDS)) // randomly enable low-level search cancellation to make sure it does not alter results .put(SearchService.LOW_LEVEL_CANCELLATION_SETTING.getKey(), randomBoolean()); if (rarely()) { // Sometimes adjust the minimum search thread pool size, causing // QueueResizingEsThreadPoolExecutor to be used instead of a regular // fixed thread pool builder.put("thread_pool.search.min_queue_size", 100); } return builder.build(); } protected Path nodeConfigPath(int nodeOrdinal) { return null; } /** * Returns a collection of plugins that should be loaded on each node. */ protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.emptyList(); } /** * Returns a collection of plugins that should be loaded when creating a transport client. */ protected Collection<Class<? extends Plugin>> transportClientPlugins() { return Collections.emptyList(); } /** * This method is used to obtain additional settings for clients created by the internal cluster. * These settings will be applied on the client in addition to some randomized settings defined in * the cluster. These settings will also override any other settings the internal cluster might * add by default. */ protected Settings transportClientSettings() { return Settings.EMPTY; } private ExternalTestCluster buildExternalCluster(String clusterAddresses) throws IOException { String[] stringAddresses = clusterAddresses.split(","); TransportAddress[] transportAddresses = new TransportAddress[stringAddresses.length]; int i = 0; for (String stringAddress : stringAddresses) { URL url = new URL("http://" + stringAddress); InetAddress inetAddress = InetAddress.getByName(url.getHost()); transportAddresses[i++] = new TransportAddress(new InetSocketAddress(inetAddress, url.getPort())); } return new ExternalTestCluster(createTempDir(), externalClusterClientSettings(), transportClientPlugins(), transportAddresses); } protected Settings externalClusterClientSettings() { return Settings.EMPTY; } protected boolean ignoreExternalCluster() { return false; } protected TestCluster buildTestCluster(Scope scope, long seed) throws IOException { String clusterAddresses = System.getProperty(TESTS_CLUSTER); if (Strings.hasLength(clusterAddresses) && ignoreExternalCluster() == false) { if (scope == Scope.TEST) { throw new IllegalArgumentException("Cannot run TEST scope test with " + TESTS_CLUSTER); } return buildExternalCluster(clusterAddresses); } final String nodePrefix; switch (scope) { case TEST: nodePrefix = TEST_CLUSTER_NODE_PREFIX; break; case SUITE: nodePrefix = SUITE_CLUSTER_NODE_PREFIX; break; default: throw new ElasticsearchException("Scope not supported: " + scope); } boolean supportsDedicatedMasters = getSupportsDedicatedMasters(); int numDataNodes = getNumDataNodes(); int minNumDataNodes; int maxNumDataNodes; if (numDataNodes >= 0) { minNumDataNodes = maxNumDataNodes = numDataNodes; } else { minNumDataNodes = getMinNumDataNodes(); maxNumDataNodes = getMaxNumDataNodes(); } Collection<Class<? extends Plugin>> mockPlugins = getMockPlugins(); final NodeConfigurationSource nodeConfigurationSource = getNodeConfigSource(); if (addMockTransportService()) { ArrayList<Class<? extends Plugin>> mocks = new ArrayList<>(mockPlugins); // add both mock plugins - local and tcp if they are not there // we do this in case somebody overrides getMockPlugins and misses to call super if (mockPlugins.contains(getTestTransportPlugin()) == false) { mocks.add(getTestTransportPlugin()); } mockPlugins = mocks; } return new InternalTestCluster(seed, createTempDir(), supportsDedicatedMasters, getAutoMinMasterNodes(), minNumDataNodes, maxNumDataNodes, InternalTestCluster.clusterName(scope.name(), seed) + "-cluster", nodeConfigurationSource, getNumClientNodes(), InternalTestCluster.DEFAULT_ENABLE_HTTP_PIPELINING, nodePrefix, mockPlugins, getClientWrapper()); } protected NodeConfigurationSource getNodeConfigSource() { Settings.Builder networkSettings = Settings.builder(); if (addMockTransportService()) { networkSettings.put(NetworkModule.TRANSPORT_TYPE_KEY, getTestTransportType()); } NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(NetworkModule.HTTP_ENABLED.getKey(), false) .put(networkSettings.build()) .put(ESIntegTestCase.this.nodeSettings(nodeOrdinal)).build(); } @Override public Path nodeConfigPath(int nodeOrdinal) { return ESIntegTestCase.this.nodeConfigPath(nodeOrdinal); } @Override public Collection<Class<? extends Plugin>> nodePlugins() { return ESIntegTestCase.this.nodePlugins(); } @Override public Settings transportClientSettings() { return Settings.builder().put(networkSettings.build()) .put(ESIntegTestCase.this.transportClientSettings()).build(); } @Override public Collection<Class<? extends Plugin>> transportClientPlugins() { Collection<Class<? extends Plugin>> plugins = ESIntegTestCase.this.transportClientPlugins(); if (plugins.contains(getTestTransportPlugin()) == false) { plugins = new ArrayList<>(plugins); plugins.add(getTestTransportPlugin()); } return Collections.unmodifiableCollection(plugins); } }; return nodeConfigurationSource; } /** * Iff this returns true mock transport implementations are used for the test runs. Otherwise not mock transport impls are used. * The default is <tt>true</tt> */ protected boolean addMockTransportService() { return true; } /** * Iff this returns true test zen discovery implementations is used for the test runs. * The default is <tt>true</tt> */ protected boolean addTestZenDiscovery() { return true; } /** * Returns a function that allows to wrap / filter all clients that are exposed by the test cluster. This is useful * for debugging or request / response pre and post processing. It also allows to intercept all calls done by the test * framework. By default this method returns an identity function {@link Function#identity()}. */ protected Function<Client,Client> getClientWrapper() { return Function.identity(); } /** Return the mock plugins the cluster should use */ protected Collection<Class<? extends Plugin>> getMockPlugins() { final ArrayList<Class<? extends Plugin>> mocks = new ArrayList<>(); if (MOCK_MODULES_ENABLED && randomBoolean()) { // sometimes run without those completely if (randomBoolean() && addMockTransportService()) { mocks.add(MockTransportService.TestPlugin.class); } if (randomBoolean()) { mocks.add(MockFSIndexStore.TestPlugin.class); } if (randomBoolean()) { mocks.add(NodeMocksPlugin.class); } if (randomBoolean()) { mocks.add(MockEngineFactoryPlugin.class); } if (randomBoolean()) { mocks.add(MockSearchService.TestPlugin.class); } if (randomBoolean()) { mocks.add(AssertingTransportInterceptor.TestPlugin.class); } if (randomBoolean()) { mocks.add(MockFieldFilterPlugin.class); } } if (addMockTransportService()) { mocks.add(getTestTransportPlugin()); } if (addTestZenDiscovery()) { mocks.add(TestZenDiscovery.TestPlugin.class); } mocks.add(TestSeedPlugin.class); return Collections.unmodifiableList(mocks); } public static final class TestSeedPlugin extends Plugin { @Override public List<Setting<?>> getSettings() { return Arrays.asList(INDEX_TEST_SEED_SETTING); } } /** * Returns the client ratio configured via */ private static double transportClientRatio() { String property = System.getProperty(TESTS_CLIENT_RATIO); if (property == null || property.isEmpty()) { return Double.NaN; } return Double.parseDouble(property); } /** * Returns the transport client ratio from the class level annotation or via * {@link System#getProperty(String)} if available. If both are not available this will * return a random ratio in the interval <tt>[0..1]</tt> */ protected double getPerTestTransportClientRatio() { final ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class); double perTestRatio = -1; if (annotation != null) { perTestRatio = annotation.transportClientRatio(); } if (perTestRatio == -1) { return Double.isNaN(TRANSPORT_CLIENT_RATIO) ? randomDouble() : TRANSPORT_CLIENT_RATIO; } assert perTestRatio >= 0.0 && perTestRatio <= 1.0; return perTestRatio; } /** * Returns path to a random directory that can be used to create a temporary file system repo */ public Path randomRepoPath() { if (currentCluster instanceof InternalTestCluster) { return randomRepoPath(((InternalTestCluster) currentCluster).getDefaultSettings()); } throw new UnsupportedOperationException("unsupported cluster type"); } /** * Returns path to a random directory that can be used to create a temporary file system repo */ public static Path randomRepoPath(Settings settings) { Environment environment = TestEnvironment.newEnvironment(settings); Path[] repoFiles = environment.repoFiles(); assert repoFiles.length > 0; Path path; do { path = repoFiles[0].resolve(randomAlphaOfLength(10)); } while (Files.exists(path)); return path; } protected NumShards getNumShards(String index) { MetaData metaData = client().admin().cluster().prepareState().get().getState().metaData(); assertThat(metaData.hasIndex(index), equalTo(true)); int numShards = Integer.valueOf(metaData.index(index).getSettings().get(SETTING_NUMBER_OF_SHARDS)); int numReplicas = Integer.valueOf(metaData.index(index).getSettings().get(SETTING_NUMBER_OF_REPLICAS)); return new NumShards(numShards, numReplicas); } /** * Asserts that all shards are allocated on nodes matching the given node pattern. */ public Set<String> assertAllShardsOnNodes(String index, String... pattern) { Set<String> nodes = new HashSet<>(); ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) { for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) { for (ShardRouting shardRouting : indexShardRoutingTable) { if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndexName())) { String name = clusterState.nodes().get(shardRouting.currentNodeId()).getName(); nodes.add(name); assertThat("Allocated on new node: " + name, Regex.simpleMatch(pattern, name), is(true)); } } } } return nodes; } /** * Asserts that all segments are sorted with the provided {@link Sort}. */ public void assertSortedSegments(String indexName, Sort expectedIndexSort) { IndicesSegmentResponse segmentResponse = client().admin().indices().prepareSegments(indexName).execute().actionGet(); IndexSegments indexSegments = segmentResponse.getIndices().get(indexName); for (IndexShardSegments indexShardSegments : indexSegments.getShards().values()) { for (ShardSegments shardSegments : indexShardSegments.getShards()) { for (Segment segment : shardSegments) { assertThat(expectedIndexSort, equalTo(segment.getSegmentSort())); } } } } protected static class NumShards { public final int numPrimaries; public final int numReplicas; public final int totalNumShards; public final int dataCopies; private NumShards(int numPrimaries, int numReplicas) { this.numPrimaries = numPrimaries; this.numReplicas = numReplicas; this.dataCopies = numReplicas + 1; this.totalNumShards = numPrimaries * dataCopies; } } private static boolean runTestScopeLifecycle() { return INSTANCE == null; } @Before public final void setupTestCluster() throws Exception { if (runTestScopeLifecycle()) { printTestMessage("setting up"); beforeInternal(); printTestMessage("all set up"); } } @After public final void cleanUpCluster() throws Exception { // Deleting indices is going to clear search contexts implicitly so we // need to check that there are no more in-flight search contexts before // we remove indices super.ensureAllSearchContextsReleased(); if (runTestScopeLifecycle()) { printTestMessage("cleaning up after"); afterInternal(false); printTestMessage("cleaned up after"); } } @AfterClass public static void afterClass() throws Exception { if (!runTestScopeLifecycle()) { try { INSTANCE.printTestMessage("cleaning up after"); INSTANCE.afterInternal(true); checkStaticState(true); } finally { INSTANCE = null; } } else { clearClusters(); } SUITE_SEED = null; currentCluster = null; } private static void initializeSuiteScope() throws Exception { Class<?> targetClass = getTestClass(); /** * Note we create these test class instance via reflection * since JUnit creates a new instance per test and that is also * the reason why INSTANCE is static since this entire method * must be executed in a static context. */ assert INSTANCE == null; if (isSuiteScopedTest(targetClass)) { // note we need to do this this way to make sure this is reproducible INSTANCE = (ESIntegTestCase) targetClass.getConstructor().newInstance(); boolean success = false; try { INSTANCE.printTestMessage("setup"); INSTANCE.beforeInternal(); INSTANCE.setupSuiteScopeCluster(); success = true; } finally { if (!success) { afterClass(); } } } else { INSTANCE = null; } } /** * Compute a routing key that will route documents to the <code>shard</code>-th shard * of the provided index. */ protected String routingKeyForShard(String index, int shard) { return internalCluster().routingKeyForShard(resolveIndex(index), shard, random()); } @Override protected NamedXContentRegistry xContentRegistry() { if (isInternalCluster() && cluster().size() > 0) { // If it's internal cluster - using existing registry in case plugin registered custom data return internalCluster().getInstance(NamedXContentRegistry.class); } else { // If it's external cluster - fall back to the standard set return new NamedXContentRegistry(ClusterModule.getNamedXWriteables()); } } /** * Returns an instance of {@link RestClient} pointing to the current test cluster. * Creates a new client if the method is invoked for the first time in the context of the current test scope. * The returned client gets automatically closed when needed, it shouldn't be closed as part of tests otherwise * it cannot be reused by other tests anymore. */ protected static synchronized RestClient getRestClient() { if (restClient == null) { restClient = createRestClient(null); } return restClient; } protected static RestClient createRestClient(RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback) { return createRestClient(httpClientConfigCallback, "http"); } protected static RestClient createRestClient(RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, String protocol) { NodesInfoResponse nodesInfoResponse = client().admin().cluster().prepareNodesInfo().get(); assertFalse(nodesInfoResponse.hasFailures()); return createRestClient(nodesInfoResponse.getNodes(), httpClientConfigCallback, protocol); } protected static RestClient createRestClient(final List<NodeInfo> nodes, RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, String protocol) { List<HttpHost> hosts = new ArrayList<>(); for (NodeInfo node : nodes) { if (node.getHttp() != null) { TransportAddress publishAddress = node.getHttp().address().publishAddress(); InetSocketAddress address = publishAddress.address(); hosts.add(new HttpHost(NetworkAddress.format(address.getAddress()), address.getPort(), protocol)); } } RestClientBuilder builder = RestClient.builder(hosts.toArray(new HttpHost[hosts.size()])); if (httpClientConfigCallback != null) { builder.setHttpClientConfigCallback(httpClientConfigCallback); } return builder.build(); } /** * This method is executed iff the test is annotated with {@link SuiteScopeTestCase} * before the first test of this class is executed. * * @see SuiteScopeTestCase */ protected void setupSuiteScopeCluster() throws Exception { } private static boolean isSuiteScopedTest(Class<?> clazz) { return clazz.getAnnotation(SuiteScopeTestCase.class) != null; } /** * If a test is annotated with {@link SuiteScopeTestCase} * the checks and modifications that are applied to the used test cluster are only done after all tests * of this class are executed. This also has the side-effect of a suite level setup method {@link #setupSuiteScopeCluster()} * that is executed in a separate test instance. Variables that need to be accessible across test instances must be static. */ @Retention(RetentionPolicy.RUNTIME) @Inherited @Target(ElementType.TYPE) public @interface SuiteScopeTestCase { } public static Index resolveIndex(String index) { GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().setIndices(index).get(); assertTrue("index " + index + " not found", getIndexResponse.getSettings().containsKey(index)); String uuid = getIndexResponse.getSettings().get(index).get(IndexMetaData.SETTING_INDEX_UUID); return new Index(index, uuid); } protected void assertSeqNos() throws Exception { assertBusy(() -> { IndicesStatsResponse stats = client().admin().indices().prepareStats().clear().get(); for (IndexStats indexStats : stats.getIndices().values()) { for (IndexShardStats indexShardStats : indexStats.getIndexShards().values()) { Optional<ShardStats> maybePrimary = Stream.of(indexShardStats.getShards()) .filter(s -> s.getShardRouting().active() && s.getShardRouting().primary()) .findFirst(); if (maybePrimary.isPresent() == false) { continue; } ShardStats primary = maybePrimary.get(); final SeqNoStats primarySeqNoStats = primary.getSeqNoStats(); final ShardRouting primaryShardRouting = primary.getShardRouting(); assertThat(primaryShardRouting + " should have set the global checkpoint", primarySeqNoStats.getGlobalCheckpoint(), not(equalTo(SequenceNumbers.UNASSIGNED_SEQ_NO))); final DiscoveryNode node = clusterService().state().nodes().get(primaryShardRouting.currentNodeId()); final IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node.getName()); final IndexShard indexShard = indicesService.getShardOrNull(primaryShardRouting.shardId()); final ObjectLongMap<String> globalCheckpoints = indexShard.getInSyncGlobalCheckpoints(); for (ShardStats shardStats : indexShardStats) { final SeqNoStats seqNoStats = shardStats.getSeqNoStats(); assertThat(shardStats.getShardRouting() + " local checkpoint mismatch", seqNoStats.getLocalCheckpoint(), equalTo(primarySeqNoStats.getLocalCheckpoint())); assertThat(shardStats.getShardRouting() + " global checkpoint mismatch", seqNoStats.getGlobalCheckpoint(), equalTo(primarySeqNoStats.getGlobalCheckpoint())); assertThat(shardStats.getShardRouting() + " max seq no mismatch", seqNoStats.getMaxSeqNo(), equalTo(primarySeqNoStats.getMaxSeqNo())); // the local knowledge on the primary of the global checkpoint equals the global checkpoint on the shard assertThat( seqNoStats.getGlobalCheckpoint(), equalTo(globalCheckpoints.get(shardStats.getShardRouting().allocationId().getId()))); } } } }); } }
package DesignPattern.behavioral.cor.Impl2; import lombok.AllArgsConstructor; import lombok.NoArgsConstructor; @NoArgsConstructor @AllArgsConstructor public class Dollar1Dispenser implements DispenseChain{ private DispenseChain chain; @Override public void setNextChain(DispenseChain nextChain) { this.chain = nextChain; } @Override public void dispense(Currency cur) { if (cur.getAmount() >= 1) { int num = cur.getAmount(); System.out.println("dispense " + num + " $1"); } else { this.chain.dispense(cur); } } }
package com.youzu.clan.base.json; import com.youzu.clan.base.json.favforum.AddFavForumVariables; public class AddForumJson extends BaseJson { private static final long serialVersionUID = -1948992055549669100L; private AddFavForumVariables variables; public AddFavForumVariables getVariables() { return variables; } public void setVariables(AddFavForumVariables variables) { this.variables = variables; } }
package faith.noah.system.service.impl; import java.util.ArrayList; import java.util.List; import faith.noah.common.annotation.DataScope; import faith.noah.common.exception.BusinessException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import faith.noah.common.constant.UserConstants; import faith.noah.common.core.domain.entity.SysRole; import faith.noah.common.core.domain.entity.SysUser; import faith.noah.common.core.text.Convert; import faith.noah.common.utils.StringUtils; import faith.noah.common.utils.security.Md5Utils; import faith.noah.system.domain.SysPost; import faith.noah.system.domain.SysUserPost; import faith.noah.system.domain.SysUserRole; import faith.noah.system.mapper.SysPostMapper; import faith.noah.system.mapper.SysRoleMapper; import faith.noah.system.mapper.SysUserMapper; import faith.noah.system.mapper.SysUserPostMapper; import faith.noah.system.mapper.SysUserRoleMapper; import faith.noah.system.service.ISysConfigService; import faith.noah.system.service.ISysUserService; /** * 用户 业务层处理 * * @author ruoyi */ @Service public class SysUserServiceImpl implements ISysUserService { private static final Logger log = LoggerFactory.getLogger(SysUserServiceImpl.class); @Autowired private SysUserMapper userMapper; @Autowired private SysRoleMapper roleMapper; @Autowired private SysPostMapper postMapper; @Autowired private SysUserPostMapper userPostMapper; @Autowired private SysUserRoleMapper userRoleMapper; @Autowired private ISysConfigService configService; /** * 根据条件分页查询用户列表 * * @param user 用户信息 * @return 用户信息集合信息 */ @Override @DataScope(deptAlias = "d", userAlias = "u") public List<SysUser> selectUserList(SysUser user) { return userMapper.selectUserList(user); } /** * 根据条件分页查询已分配用户角色列表 * * @param user 用户信息 * @return 用户信息集合信息 */ @Override @DataScope(deptAlias = "d", userAlias = "u") public List<SysUser> selectAllocatedList(SysUser user) { return userMapper.selectAllocatedList(user); } /** * 根据条件分页查询未分配用户角色列表 * * @param user 用户信息 * @return 用户信息集合信息 */ @Override @DataScope(deptAlias = "d", userAlias = "u") public List<SysUser> selectUnallocatedList(SysUser user) { return userMapper.selectUnallocatedList(user); } /** * 通过用户名查询用户 * * @param userName 用户名 * @return 用户对象信息 */ @Override public SysUser selectUserByLoginName(String userName) { return userMapper.selectUserByLoginName(userName); } /** * 通过手机号码查询用户 * * @param phoneNumber 手机号码 * @return 用户对象信息 */ @Override public SysUser selectUserByPhoneNumber(String phoneNumber) { return userMapper.selectUserByPhoneNumber(phoneNumber); } /** * 通过邮箱查询用户 * * @param email 邮箱 * @return 用户对象信息 */ @Override public SysUser selectUserByEmail(String email) { return userMapper.selectUserByEmail(email); } /** * 通过用户ID查询用户 * * @param userId 用户ID * @return 用户对象信息 */ @Override public SysUser selectUserById(Long userId) { return userMapper.selectUserById(userId); } /** * 通过用户ID查询用户和角色关联 * * @param userId 用户ID * @return 用户和角色关联列表 */ @Override public List<SysUserRole> selectUserRoleByUserId(Long userId) { return userRoleMapper.selectUserRoleByUserId(userId); } /** * 通过用户ID删除用户 * * @param userId 用户ID * @return 结果 */ @Override @Transactional public int deleteUserById(Long userId) { // 删除用户与角色关联 userRoleMapper.deleteUserRoleByUserId(userId); // 删除用户与岗位表 userPostMapper.deleteUserPostByUserId(userId); return userMapper.deleteUserById(userId); } /** * 批量删除用户信息 * * @param ids 需要删除的数据ID * @return 结果 */ @Override @Transactional public int deleteUserByIds(String ids) { Long[] userIds = Convert.toLongArray(ids); for (Long userId : userIds) { checkUserAllowed(new SysUser(userId)); } // 删除用户与角色关联 userRoleMapper.deleteUserRole(userIds); // 删除用户与岗位关联 userPostMapper.deleteUserPost(userIds); return userMapper.deleteUserByIds(userIds); } /** * 新增保存用户信息 * * @param user 用户信息 * @return 结果 */ @Override @Transactional public int insertUser(SysUser user) { // 新增用户信息 int rows = userMapper.insertUser(user); // 新增用户岗位关联 insertUserPost(user); // 新增用户与角色管理 insertUserRole(user.getUserId(), user.getRoleIds()); return rows; } /** * 注册用户信息 * * @param user 用户信息 * @return 结果 */ @Override public boolean registerUser(SysUser user) { user.setUserType(UserConstants.REGISTER_USER_TYPE); return userMapper.insertUser(user) > 0; } /** * 修改保存用户信息 * * @param user 用户信息 * @return 结果 */ @Override @Transactional public int updateUser(SysUser user) { Long userId = user.getUserId(); // 删除用户与角色关联 userRoleMapper.deleteUserRoleByUserId(userId); // 新增用户与角色管理 insertUserRole(user.getUserId(), user.getRoleIds()); // 删除用户与岗位关联 userPostMapper.deleteUserPostByUserId(userId); // 新增用户与岗位管理 insertUserPost(user); return userMapper.updateUser(user); } /** * 修改用户个人详细信息 * * @param user 用户信息 * @return 结果 */ @Override public int updateUserInfo(SysUser user) { return userMapper.updateUser(user); } /** * 用户授权角色 * * @param userId 用户ID * @param roleIds 角色组 */ @Override @Transactional public void insertUserAuth(Long userId, Long[] roleIds) { userRoleMapper.deleteUserRoleByUserId(userId); insertUserRole(userId, roleIds); } /** * 修改用户密码 * * @param user 用户信息 * @return 结果 */ @Override public int resetUserPwd(SysUser user) { return updateUserInfo(user); } /** * 新增用户角色信息 * * @param userId 用户ID * @param roleIds 角色组 */ public void insertUserRole(Long userId, Long[] roleIds) { if (StringUtils.isNotNull(roleIds)) { // 新增用户与角色管理 List<SysUserRole> list = new ArrayList<SysUserRole>(); for (Long roleId : roleIds) { SysUserRole ur = new SysUserRole(); ur.setUserId(userId); ur.setRoleId(roleId); list.add(ur); } if (list.size() > 0) { userRoleMapper.batchUserRole(list); } } } /** * 新增用户岗位信息 * * @param user 用户对象 */ public void insertUserPost(SysUser user) { Long[] posts = user.getPostIds(); if (StringUtils.isNotNull(posts)) { // 新增用户与岗位管理 List<SysUserPost> list = new ArrayList<SysUserPost>(); for (Long postId : posts) { SysUserPost up = new SysUserPost(); up.setUserId(user.getUserId()); up.setPostId(postId); list.add(up); } if (list.size() > 0) { userPostMapper.batchUserPost(list); } } } /** * 校验登录名称是否唯一 * * @param loginName 用户名 * @return */ @Override public String checkLoginNameUnique(String loginName) { int count = userMapper.checkLoginNameUnique(loginName); if (count > 0) { return UserConstants.USER_NAME_NOT_UNIQUE; } return UserConstants.USER_NAME_UNIQUE; } /** * 校验手机号码是否唯一 * * @param user 用户信息 * @return */ @Override public String checkPhoneUnique(SysUser user) { Long userId = StringUtils.isNull(user.getUserId()) ? -1L : user.getUserId(); SysUser info = userMapper.checkPhoneUnique(user.getPhonenumber()); if (StringUtils.isNotNull(info) && info.getUserId().longValue() != userId.longValue()) { return UserConstants.USER_PHONE_NOT_UNIQUE; } return UserConstants.USER_PHONE_UNIQUE; } /** * 校验email是否唯一 * * @param user 用户信息 * @return */ @Override public String checkEmailUnique(SysUser user) { Long userId = StringUtils.isNull(user.getUserId()) ? -1L : user.getUserId(); SysUser info = userMapper.checkEmailUnique(user.getEmail()); if (StringUtils.isNotNull(info) && info.getUserId().longValue() != userId.longValue()) { return UserConstants.USER_EMAIL_NOT_UNIQUE; } return UserConstants.USER_EMAIL_UNIQUE; } /** * 校验用户是否允许操作 * * @param user 用户信息 */ @Override public void checkUserAllowed(SysUser user) { if (StringUtils.isNotNull(user.getUserId()) && user.isAdmin()) { throw new BusinessException("不允许操作超级管理员用户"); } } /** * 查询用户所属角色组 * * @param userId 用户ID * @return 结果 */ @Override public String selectUserRoleGroup(Long userId) { List<SysRole> list = roleMapper.selectRolesByUserId(userId); StringBuffer idsStr = new StringBuffer(); for (SysRole role : list) { idsStr.append(role.getRoleName()).append(","); } if (StringUtils.isNotEmpty(idsStr.toString())) { return idsStr.substring(0, idsStr.length() - 1); } return idsStr.toString(); } /** * 查询用户所属岗位组 * * @param userId 用户ID * @return 结果 */ @Override public String selectUserPostGroup(Long userId) { List<SysPost> list = postMapper.selectPostsByUserId(userId); StringBuffer idsStr = new StringBuffer(); for (SysPost post : list) { idsStr.append(post.getPostName()).append(","); } if (StringUtils.isNotEmpty(idsStr.toString())) { return idsStr.substring(0, idsStr.length() - 1); } return idsStr.toString(); } /** * 导入用户数据 * * @param userList 用户数据列表 * @param isUpdateSupport 是否更新支持,如果已存在,则进行更新数据 * @param operName 操作用户 * @return 结果 */ @Override public String importUser(List<SysUser> userList, Boolean isUpdateSupport, String operName) { if (StringUtils.isNull(userList) || userList.size() == 0) { throw new BusinessException("导入用户数据不能为空!"); } int successNum = 0; int failureNum = 0; StringBuilder successMsg = new StringBuilder(); StringBuilder failureMsg = new StringBuilder(); String password = configService.selectConfigByKey("sys.user.initPassword"); for (SysUser user : userList) { try { // 验证是否存在这个用户 SysUser u = userMapper.selectUserByLoginName(user.getLoginName()); if (StringUtils.isNull(u)) { user.setPassword(Md5Utils.hash(user.getLoginName() + password)); user.setCreateBy(operName); this.insertUser(user); successNum++; successMsg.append("<br/>" + successNum + "、账号 " + user.getLoginName() + " 导入成功"); } else if (isUpdateSupport) { user.setUpdateBy(operName); this.updateUser(user); successNum++; successMsg.append("<br/>" + successNum + "、账号 " + user.getLoginName() + " 更新成功"); } else { failureNum++; failureMsg.append("<br/>" + failureNum + "、账号 " + user.getLoginName() + " 已存在"); } } catch (Exception e) { failureNum++; String msg = "<br/>" + failureNum + "、账号 " + user.getLoginName() + " 导入失败:"; failureMsg.append(msg + e.getMessage()); log.error(msg, e); } } if (failureNum > 0) { failureMsg.insert(0, "很抱歉,导入失败!共 " + failureNum + " 条数据格式不正确,错误如下:"); throw new BusinessException(failureMsg.toString()); } else { successMsg.insert(0, "恭喜您,数据已全部导入成功!共 " + successNum + " 条,数据如下:"); } return successMsg.toString(); } /** * 用户状态修改 * * @param user 用户信息 * @return 结果 */ @Override public int changeStatus(SysUser user) { return userMapper.updateUser(user); } }
package com.arloor.forwardproxy.vo; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.HashMap; import java.util.Map; import java.util.Properties; public class Config { private static final String TRUE = "true"; public static boolean ask4Authcate = false; private static final String POUND_SIGN = "\u00A3"; // £ private Ssl ssl; private Http http; public Ssl ssl() { return ssl; } public Http http() { return http; } public static Config parse(Properties properties) { Config config = new Config(); ask4Authcate = TRUE.equals(properties.getProperty("ask4Authcate")); String httpsEnable = properties.getProperty("https.enable"); if (TRUE.equals(httpsEnable)) { String httpsPortStr = properties.getProperty("https.port"); Integer port = Integer.parseInt(httpsPortStr); String auth = properties.getProperty("https.auth"); Map<String, String> users = new HashMap<>(); if (auth != null && auth.length() != 0) { for (String user : auth.split(",")) { users.computeIfAbsent(genBasicAuth(user), (cell) -> user); users.computeIfAbsent(genBasicAuthWithOut£(user), (cell) -> user); } } String fullchain = properties.getProperty("https.fullchain.pem"); String privkey = properties.getProperty("https.privkey.pem"); Ssl ssl = new Ssl(port, users, fullchain, privkey); config.ssl = ssl; } String httpEnable = properties.getProperty("http.enable"); if (TRUE.equals(httpEnable)) { String httpPortStr = properties.getProperty("http.port"); Integer port = Integer.parseInt(httpPortStr); String auth = properties.getProperty("http.auth"); Map<String, String> users = new HashMap<>(); if (auth != null && auth.length() != 0) { for (String user : auth.split(",")) { users.computeIfAbsent(genBasicAuth(user), (cell) -> user); users.computeIfAbsent(genBasicAuthWithOut£(user), (cell) -> user); } } Http http = new Http(port, users); config.http = http; } return config; } /** * https://datatracker.ietf.org/doc/html/rfc7617 * The user's name is "test", and the password is the string "123" * followed by the Unicode character U+00A3 (POUND SIGN). Using the * character encoding scheme UTF-8, the user-pass becomes: * <p> * 't' 'e' 's' 't' ':' '1' '2' '3' pound * 74 65 73 74 3A 31 32 33 C2 A3 * <p> * Encoding this octet sequence in Base64 ([RFC4648], Section 4) yields: * <p> * dGVzdDoxMjPCow== * * @param user * @return */ private static String genBasicAuth(String user) { user += POUND_SIGN; return "Basic " + Base64.getEncoder().encodeToString(user.getBytes(StandardCharsets.UTF_8)); } private static String genBasicAuthWithOut£(String user) { return "Basic " + Base64.getEncoder().encodeToString(user.getBytes(StandardCharsets.UTF_8)); } public static class Http { private Integer port; private Map<String, String> auth; // base64 - raw public Http(Integer port, Map<String, String> auth) { this.port = port; this.auth = auth; } public Integer getPort() { return port; } public String getAuth(String base64Auth) { return auth.get(base64Auth); } public Map<String, String> getAuthMap() { return auth; } public boolean needAuth() { return auth != null && auth.size() != 0; } } public static class Ssl { private Integer port; private Map<String, String> auth; // base64 - raw private String fullchain; private String privkey; public Ssl(Integer port, Map<String, String> auth, String fullchain, String privkey) { this.port = port; this.auth = auth; this.fullchain = fullchain; this.privkey = privkey; } public Integer getPort() { return port; } public String getAuth(String base64Auth) { return auth.get(base64Auth); } public Map<String, String> getAuthMap() { return auth; } public String getFullchain() { return fullchain; } public String getPrivkey() { return privkey; } public boolean needAuth() { return auth != null && auth.size() != 0; } } }
/* * Copyright © 2018 IBM Corp. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.ibm.mq.spring.boot; import java.lang.reflect.InvocationTargetException; import java.util.Collections; import java.util.List; import javax.jms.JMSException; import com.ibm.mq.jms.MQConnectionFactory; import com.ibm.msg.client.wmq.WMQConstants; /** * Factory to create a {@link MQConnectionFactory} instance from properties defined in {@link MQConfigurationProperties}. */ public class MQConnectionFactoryFactory { private final MQConfigurationProperties properties; private final List<MQConnectionFactoryCustomizer> factoryCustomizers; @SuppressWarnings("unchecked") public MQConnectionFactoryFactory(MQConfigurationProperties properties, List<MQConnectionFactoryCustomizer> factoryCustomizers) { this.properties = properties; this.factoryCustomizers = (List<MQConnectionFactoryCustomizer>) (factoryCustomizers != null ? factoryCustomizers : Collections.emptyList()); } // There are many properties that can be set on an MQ Connection Factory, but these are the most commonly-used // for both direct and client connections. // // If you use TLS for client connectivity, most properties related to that // (keystore, certificates, ciphers etc) must be set independently. That could be done in a customizer() method. // Keystores are often set in global properties defined by -D options on the command line. public <T extends MQConnectionFactory> T createConnectionFactory(Class<T> factoryClass) { String err = null; try { T cf = createConnectionFactoryInstance(factoryClass); // Should usually provide a queue manager name but it can be empty, to connect to the // default queue manager. String qmName = this.properties.getQueueManager(); cf.setStringProperty(WMQConstants.WMQ_QUEUE_MANAGER, qmName); // Use the channel name to decide whether to try to connect locally or as a client. If the queue manager // code has been installed locally, then this connection will try to use native JNI bindings to match. String channel = this.properties.getChannel(); String connName = this.properties.getConnName(); String ccdtUrl = this.properties.getCcdtUrl(); if (!isNullOrEmpty(ccdtUrl)) { cf.setIntProperty(WMQConstants.WMQ_CONNECTION_MODE, WMQConstants.WMQ_CM_CLIENT); cf.setStringProperty(WMQConstants.WMQ_CCDTURL, ccdtUrl); } else { if (isNullOrEmpty(channel) || isNullOrEmpty(connName)) { cf.setIntProperty(WMQConstants.WMQ_CONNECTION_MODE, WMQConstants.WMQ_CM_BINDINGS); } else { cf.setStringProperty(WMQConstants.WMQ_CONNECTION_NAME_LIST, connName); cf.setStringProperty(WMQConstants.WMQ_CHANNEL, channel); cf.setIntProperty(WMQConstants.WMQ_CONNECTION_MODE, WMQConstants.WMQ_CM_CLIENT); } } String clientId = this.properties.getClientId(); if(!isNullOrEmpty(clientId)){ cf.setStringProperty(WMQConstants.CLIENT_ID, clientId); } // Setup the authentication. If there is a userid defined, prefer to use the CSP model for // password checking. That is more general than the cf.connect(user,pass) method which has // some restrictions in the MQ client. But it is possible to override the choice via a // property, for some compatibility requirements. String u = this.properties.getUser(); if (!isNullOrEmpty(u)) { cf.setStringProperty(WMQConstants.USERID, u); cf.setStringProperty(WMQConstants.PASSWORD, this.properties.getPassword()); cf.setBooleanProperty(WMQConstants.USER_AUTHENTICATION_MQCSP, this.properties.isUserAuthenticationMQCSP()); } if (!isNullOrEmpty(this.properties.getSslCipherSuite())) cf.setStringProperty(WMQConstants.WMQ_SSL_CIPHER_SUITE, this.properties.getSslCipherSuite()); if (!isNullOrEmpty(this.properties.getSslCipherSpec())) cf.setStringProperty(WMQConstants.WMQ_SSL_CIPHER_SPEC, this.properties.getSslCipherSpec()); if (!isNullOrEmpty(this.properties.getSslPeerName())) { cf.setStringProperty(WMQConstants.WMQ_SSL_PEER_NAME, this.properties.getSslPeerName()); } customize(cf); return cf; } catch (JMSException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException ex) { throw new IllegalStateException("Unable to create MQConnectionFactory" + ((err != null) ? (": " + err) : ""), ex); } } private <T extends MQConnectionFactory> T createConnectionFactoryInstance(Class<T> factoryClass) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, NoSuchMethodException, SecurityException { return factoryClass.getConstructor().newInstance(); } private void customize(MQConnectionFactory connectionFactory) { for (MQConnectionFactoryCustomizer factoryCustomizer : this.factoryCustomizers) { factoryCustomizer.customize(connectionFactory); } } boolean isNullOrEmpty(String s) { if (s == null || s.isEmpty()) return true; else return false; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices; import com.google.common.base.Function; import com.google.common.collect.*; import org.apache.lucene.util.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.*; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.*; import org.elasticsearch.index.aliases.IndexAliasesServiceModule; import org.elasticsearch.index.analysis.AnalysisModule; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.IndexCacheModule; import org.elasticsearch.index.codec.CodecModule; import org.elasticsearch.index.fielddata.IndexFieldDataModule; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.flush.FlushStats; import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.indexing.IndexingStats; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceModule; import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.query.IndexQueryParserModule; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; import org.elasticsearch.index.settings.IndexSettingsModule; import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityModule; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.IndexStoreModule; import org.elasticsearch.indices.analysis.IndicesAnalysisService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.IndexPluginsModule; import org.elasticsearch.plugins.PluginsService; import java.io.Closeable; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import static com.google.common.collect.Maps.newHashMap; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; /** * */ public class IndicesService extends AbstractLifecycleComponent<IndicesService> implements Iterable<IndexService> { private final InternalIndicesLifecycle indicesLifecycle; private final IndicesAnalysisService indicesAnalysisService; private final Injector injector; private final PluginsService pluginsService; private final NodeEnvironment nodeEnv; private final Map<String, Injector> indicesInjectors = new HashMap<>(); private volatile ImmutableMap<String, IndexService> indices = ImmutableMap.of(); private final OldShardsStats oldShardsStats = new OldShardsStats(); @Inject public IndicesService(Settings settings, IndicesLifecycle indicesLifecycle, IndicesAnalysisService indicesAnalysisService, Injector injector, NodeEnvironment nodeEnv) { super(settings); this.indicesLifecycle = (InternalIndicesLifecycle) indicesLifecycle; this.indicesAnalysisService = indicesAnalysisService; this.injector = injector; this.pluginsService = injector.getInstance(PluginsService.class); this.indicesLifecycle.addListener(oldShardsStats); this.nodeEnv = nodeEnv; } @Override protected void doStart() throws ElasticsearchException { } @Override protected void doStop() throws ElasticsearchException { ImmutableSet<String> indices = ImmutableSet.copyOf(this.indices.keySet()); final CountDownLatch latch = new CountDownLatch(indices.size()); final ExecutorService indicesStopExecutor = Executors.newFixedThreadPool(5, EsExecutors.daemonThreadFactory("indices_shutdown")); for (final String index : indices) { indicesStopExecutor.execute(new Runnable() { @Override public void run() { try { removeIndex(index, "shutdown", false); } catch (Throwable e) { logger.warn("failed to delete index on stop [" + index + "]", e); } finally { latch.countDown(); } } }); } try { if (latch.await(30, TimeUnit.SECONDS) == false) { logger.warn("Not all shards are closed yet, waited 30sec - stopping service"); } } catch (InterruptedException e) { // ignore } finally { indicesStopExecutor.shutdown(); } } @Override protected void doClose() throws ElasticsearchException { IOUtils.closeWhileHandlingException(injector.getInstance(RecoverySettings.class), indicesAnalysisService); } public IndicesLifecycle indicesLifecycle() { return this.indicesLifecycle; } /** * Returns the node stats indices stats. The <tt>includePrevious</tt> flag controls * if old shards stats will be aggregated as well (only for relevant stats, such as * refresh and indexing, not for docs/store). */ public NodeIndicesStats stats(boolean includePrevious) { return stats(true, new CommonStatsFlags().all()); } public NodeIndicesStats stats(boolean includePrevious, CommonStatsFlags flags) { CommonStats oldStats = new CommonStats(flags); if (includePrevious) { Flag[] setFlags = flags.getFlags(); for (Flag flag : setFlags) { switch (flag) { case Get: oldStats.get.add(oldShardsStats.getStats); break; case Indexing: oldStats.indexing.add(oldShardsStats.indexingStats); break; case Search: oldStats.search.add(oldShardsStats.searchStats); break; case Merge: oldStats.merge.add(oldShardsStats.mergeStats); break; case Refresh: oldStats.refresh.add(oldShardsStats.refreshStats); break; case Flush: oldStats.flush.add(oldShardsStats.flushStats); break; } } } Map<Index, List<IndexShardStats>> statsByShard = Maps.newHashMap(); for (IndexService indexService : indices.values()) { for (IndexShard indexShard : indexService) { try { if (indexShard.routingEntry() == null) { continue; } IndexShardStats indexShardStats = new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard, indexShard.routingEntry(), flags) }); if (!statsByShard.containsKey(indexService.index())) { statsByShard.put(indexService.index(), Lists.<IndexShardStats>newArrayList(indexShardStats)); } else { statsByShard.get(indexService.index()).add(indexShardStats); } } catch (IllegalIndexShardStateException e) { // we can safely ignore illegal state on ones that are closing for example } } } return new NodeIndicesStats(oldStats, statsByShard); } /** * Returns <tt>true</tt> if changes (adding / removing) indices, shards and so on are allowed. */ public boolean changesAllowed() { // we check on stop here since we defined stop when we delete the indices return lifecycle.started(); } @Override public UnmodifiableIterator<IndexService> iterator() { return indices.values().iterator(); } public boolean hasIndex(String index) { return indices.containsKey(index); } /** * Returns a snapshot of the started indices and the associated {@link IndexService} instances. * * The map being returned is not a live view and subsequent calls can return a different view. */ public ImmutableMap<String, IndexService> indices() { return indices; } /** * Returns an IndexService for the specified index if exists otherwise returns <code>null</code>. * * Even if the index name appeared in {@link #indices()} <code>null</code> can still be returned as an * index maybe removed in the meantime, so preferable use the associated {@link IndexService} in order to prevent NPE. */ @Nullable public IndexService indexService(String index) { return indices.get(index); } /** * Returns an IndexService for the specified index if exists otherwise a {@link IndexMissingException} is thrown. */ public IndexService indexServiceSafe(String index) throws IndexMissingException { IndexService indexService = indexService(index); if (indexService == null) { throw new IndexMissingException(new Index(index)); } return indexService; } public synchronized IndexService createIndex(String sIndexName, Settings settings, String localNodeId) throws ElasticsearchException { if (!lifecycle.started()) { throw new ElasticsearchIllegalStateException("Can't create an index [" + sIndexName + "], node is closed"); } Index index = new Index(sIndexName); if (indicesInjectors.containsKey(index.name())) { throw new IndexAlreadyExistsException(index); } indicesLifecycle.beforeIndexCreated(index); logger.debug("creating Index [{}], shards [{}]/[{}]", sIndexName, settings.get(SETTING_NUMBER_OF_SHARDS), settings.get(SETTING_NUMBER_OF_REPLICAS)); Settings indexSettings = settingsBuilder() .put(this.settings) .put(settings) .classLoader(settings.getClassLoader()) .build(); ModulesBuilder modules = new ModulesBuilder(); modules.add(new IndexNameModule(index)); modules.add(new LocalNodeIdModule(localNodeId)); modules.add(new IndexSettingsModule(index, indexSettings)); modules.add(new IndexPluginsModule(indexSettings, pluginsService)); modules.add(new IndexStoreModule(indexSettings)); modules.add(new AnalysisModule(indexSettings, indicesAnalysisService)); modules.add(new SimilarityModule(indexSettings)); modules.add(new IndexCacheModule(indexSettings)); modules.add(new IndexFieldDataModule(indexSettings)); modules.add(new CodecModule(indexSettings)); modules.add(new MapperServiceModule()); modules.add(new IndexQueryParserModule(indexSettings)); modules.add(new IndexAliasesServiceModule()); modules.add(new IndexModule(indexSettings)); Injector indexInjector; try { indexInjector = modules.createChildInjector(injector); } catch (CreationException e) { throw new IndexCreationException(index, Injectors.getFirstErrorFailure(e)); } catch (Throwable e) { throw new IndexCreationException(index, e); } indicesInjectors.put(index.name(), indexInjector); IndexService indexService = indexInjector.getInstance(IndexService.class); indicesLifecycle.afterIndexCreated(indexService); indices = newMapBuilder(indices).put(index.name(), indexService).immutableMap(); return indexService; } /** * Removes the given index from this service and releases all associated resources. Persistent parts of the index * like the shards files, state and transaction logs are kept around in the case of a disaster recovery. * @param index the index to remove * @param reason the high level reason causing this removal */ public void removeIndex(String index, String reason) throws ElasticsearchException { removeIndex(index, reason, false); } /** * Deletes the given index. Persistent parts of the index * like the shards files, state and transaction logs are removed once all resources are released. * * Equivalent to {@link #removeIndex(String, String)} but fires * different lifecycle events to ensure pending resources of this index are immediately removed. * @param index the index to delete * @param reason the high level reason causing this delete */ public void deleteIndex(String index, String reason) throws ElasticsearchException { removeIndex(index, reason, true); } private void removeIndex(String index, String reason, boolean delete) throws ElasticsearchException { try { final IndexService indexService; final Injector indexInjector; synchronized (this) { indexInjector = indicesInjectors.remove(index); if (indexInjector == null) { return; } logger.debug("[{}] closing ... (reason [{}])", index, reason); Map<String, IndexService> tmpMap = newHashMap(indices); indexService = tmpMap.remove(index); indices = ImmutableMap.copyOf(tmpMap); } indicesLifecycle.beforeIndexClosed(indexService); if (delete) { indicesLifecycle.beforeIndexDeleted(indexService); } IOUtils.close(Iterables.transform(pluginsService.indexServices(), new Function<Class<? extends Closeable>, Closeable>() { @Override public Closeable apply(Class<? extends Closeable> input) { return indexInjector.getInstance(input); } })); logger.debug("[{}] closing index service (reason [{}])", index, reason); indexService.close(reason); logger.debug("[{}] closing index cache (reason [{}])", index, reason); indexInjector.getInstance(IndexCache.class).close(); logger.debug("[{}] clearing index field data (reason [{}])", index, reason); indexInjector.getInstance(IndexFieldDataService.class).clear(); logger.debug("[{}] closing analysis service (reason [{}])", index, reason); indexInjector.getInstance(AnalysisService.class).close(); logger.debug("[{}] closing mapper service (reason [{}])", index, reason); indexInjector.getInstance(MapperService.class).close(); logger.debug("[{}] closing index query parser service (reason [{}])", index, reason); indexInjector.getInstance(IndexQueryParserService.class).close(); logger.debug("[{}] closing index service (reason [{}])", index, reason); indexInjector.getInstance(IndexStore.class).close(); Injectors.close(injector); logger.debug("[{}] closed... (reason [{}])", index, reason); indicesLifecycle.afterIndexClosed(indexService.index()); if (delete) { indicesLifecycle.afterIndexDeleted(indexService.index()); } } catch (IOException ex) { throw new ElasticsearchException("failed to remove index " + index, ex); } } static class OldShardsStats extends IndicesLifecycle.Listener { final SearchStats searchStats = new SearchStats(); final GetStats getStats = new GetStats(); final IndexingStats indexingStats = new IndexingStats(); final MergeStats mergeStats = new MergeStats(); final RefreshStats refreshStats = new RefreshStats(); final FlushStats flushStats = new FlushStats(); @Override public synchronized void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard) { if (indexShard != null) { getStats.add(indexShard.getStats()); indexingStats.add(indexShard.indexingStats(), false); searchStats.add(indexShard.searchStats(), false); mergeStats.add(indexShard.mergeStats()); refreshStats.add(indexShard.refreshStats()); flushStats.add(indexShard.flushStats()); } } } }
package org.swtk.commons.dict.wiktionary.generated.h.a.c; import java.util.Collection; import java.util.HashMap; import java.util.Map; import org.swtk.common.dict.dto.wiktionary.Entry; import com.trimc.blogger.commons.utils.GsonUtils; public class WiktionaryHAC000 { private static Map<String, Entry> map = new HashMap<String, Entry>(); static { add("hachure", "{\"term\":\"hachure\", \"etymology\":{\"influencers\":[], \"languages\":[\"English\", \"French\"], \"text\":\"From French {{m|fr|hachure||crosshatching}}, from {{m|fr|hacher||to hatch}}.\"}, \"definitions\":{\"list\":[{\"upperType\":\"NOUN\", \"text\":\"A line on a map indicating the steepness of a slope\", \"priority\":1}]}, \"synonyms\":{}}"); add("hacienda", "{\"term\":\"hacienda\", \"etymology\":{\"influencers\":[], \"languages\":[\"English\", \"Spanish\"], \"text\":\"Spanish \u0027hacienda\u0027\"}, \"definitions\":{\"list\":[{\"upperType\":\"NOUN\", \"text\":\"a large homestead in a ranch or estate usually in places where Colonial Spanish culture has had architectural influence\", \"priority\":1}]}, \"synonyms\":{}}"); add("hackamore", "{\"term\":\"hackamore\", \"etymology\":{\"influencers\":[], \"languages\":[], \"text\":\"Perhaps from Spanish jáquima ‘halter’.\"}, \"definitions\":{\"list\":[{\"upperType\":\"NOUN\", \"text\":\"a kind of bridle with no bi\", \"priority\":1},{\"upperType\":\"NOUN\", \"text\":\"1992\u0027\u0027: Before they could agree or disagree he’d hauled the horse around by the \u0027\u0027hackamore\u0027\u0027 and was pounding off up the track. — Cormac McCarthy, \u0027All The Pretty Horses\", \"priority\":2}]}, \"synonyms\":{}}"); add("hackathon", "{\"term\":\"hackathon\", \"etymology\":{\"influencers\":[], \"languages\":[], \"text\":\"{{suffix|hack|athon|lang\u003den}}\"}, \"definitions\":{\"list\":[{\"upperType\":\"NOUN\", \"text\":\"An event where programmers meet for collaborative computer programming\", \"priority\":1}]}, \"synonyms\":{}}"); add("hacking", "{\"term\":\"hacking\", \"etymology\":{\"influencers\":[], \"languages\":[], \"text\":\"\"}, \"definitions\":{\"list\":[{\"upperType\":\"NOUN\", \"text\":\"Playful solving of technical work that requires deep understanding, especially of a computer system\", \"priority\":1},{\"upperType\":\"NOUN\", \"text\":\"From \u0027hacker\u0027: \u0027\u0026quot;A person who delights in having an intimate understanding of the internal workings of a system, computers and computer networks in particular.\u0026quot; \u0026amp;mdash; [http://tools.ietf.org/html/rfc1392 RFC 1392\", \"priority\":2},{\"upperType\":\"NOUN\", \"text\":\"Unauthorized attempts to bypass the security mechanisms of an information system or network. See also cracker\", \"priority\":3},{\"upperType\":\"NOUN\", \"text\":\"A dry coughing; the emission of a succession of short coughs\", \"priority\":4},{\"upperType\":\"NOUN\", \"text\":\"A kick in the shins\", \"priority\":5},{\"upperType\":\"NOUN\", \"text\":\"The act of striking the muscles with the side of the hand\", \"priority\":6},{\"upperType\":\"NOUN\", \"text\":\"A riding or journey on horseback. (Plural hackings\", \"priority\":7},{\"upperType\":\"NOUN\", \"text\":\"The operation of working over the faces of rough or worn grindstones with a hack-hammer\", \"priority\":8},{\"upperType\":\"NOUN\", \"text\":\"The separation of a course of stones into two smaller courses, when there are not enough large stones to form a single course\", \"priority\":9},{\"upperType\":\"NOUN\", \"text\":\"The cuts and grooves made in the metal laps by holding the cutting edge of a steel blade against them while in motion, for the purpose of providing receptacles or pockets for the powders using in cutting and polishing gems\", \"priority\":10},{\"upperType\":\"NOUN\", \"text\":\"The piling of bricks for drying\", \"priority\":11}]}, \"synonyms\":{}}"); add("hackle", "{\"term\":\"hackle\", \"etymology\":{\"influencers\":[], \"languages\":[\"old english (ca. 450-1100)\", \"English\"], \"text\":\"From Old english (ca. 450-1100) {{m|ang|hæcla}}, {{m|ang|hacele}}, from Proto-germanic {{m|gem-pro|*hakulǭ}}. Cognate with Dutch {{m|nl|hekel}}, German {{m|de|Hechel}}.\"}, \"definitions\":{\"list\":[{\"upperType\":\"NOUN\", \"text\":\"An instrument with steel pins used to comb out flax or hemp\", \"priority\":1},{\"upperType\":\"NOUN\", \"text\":\"One of the long, narrow feathers on the neck of birds, most noticeable on the cock\", \"priority\":2},{\"upperType\":\"NOUN\", \"text\":\"A feather used to make a fishing lure or a fishing lure incorporating a feather\", \"priority\":3},{\"upperType\":\"NOUN\", \"text\":\"By extension (because the hackles of a cock are lifted when it is angry), the hair on the nape of the neck in dogs and other animals; also used figuratively for humans\", \"priority\":4},{\"upperType\":\"NOUN\", \"text\":\"When the dog got angry his \u0027\u0027hackles\u0027\u0027 rose and he growled\", \"priority\":5},{\"upperType\":\"NOUN\", \"text\":\"A plate with rows of pointed needles used to blend or straighten hair\", \"priority\":6},{\"upperType\":\"NOUN\", \"text\":\"A feather plume on some soldier\u0027s uniforms, especially the hat or helmet\", \"priority\":7},{\"upperType\":\"NOUN\", \"text\":\"Any flimsy substance unspun, such as raw silk\", \"priority\":8}]}, \"synonyms\":{}}"); add("hackleback", "{\"term\":\"hackleback\", \"etymology\":{\"influencers\":[], \"languages\":[], \"text\":\"{{compound|hackle|back|lang\u003den}}\"}, \"definitions\":{\"list\":[{\"upperType\":\"NOUN\", \"text\":\"A small North American sturgeon, the roe of which is harvested for cavia\", \"priority\":1},{\"upperType\":\"NOUN\", \"text\":\"They served crackers with \u0027\u0027hackleback\u0027\u0027 caviar\", \"priority\":2}]}, \"synonyms\":{}}"); } private static void add(String term, String json) { map.put(term, GsonUtils.toObject(json, Entry.class)); } public static Entry get(String term) { return map.get(term); } public static boolean has(String term) { return null != get(term); } public static Collection<String> terms() { return map.keySet(); } }
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser; import android.annotation.SuppressLint; import android.app.Activity; import android.app.ActivityManager; import android.app.Notification; import android.app.SearchManager; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Bundle; import android.os.SystemClock; import androidx.annotation.IntDef; import androidx.annotation.Nullable; import androidx.browser.customtabs.CustomTabsIntent; import androidx.browser.customtabs.CustomTabsSessionToken; import androidx.browser.customtabs.TrustedWebUtils; import org.chromium.base.ApplicationStatus; import org.chromium.base.CommandLine; import org.chromium.base.ContextUtils; import org.chromium.base.IntentUtils; import org.chromium.base.PackageManagerUtils; import org.chromium.base.StrictModeContext; import org.chromium.base.metrics.RecordHistogram; import org.chromium.chrome.browser.app.ChromeActivity; import org.chromium.chrome.browser.app.video_tutorials.VideoTutorialShareHelper; import org.chromium.chrome.browser.attribution_reporting.AttributionIntentHandler; import org.chromium.chrome.browser.attribution_reporting.AttributionIntentHandlerFactory; import org.chromium.chrome.browser.browserservices.SessionDataHolder; import org.chromium.chrome.browser.browserservices.ui.splashscreen.trustedwebactivity.TwaSplashController; import org.chromium.chrome.browser.customtabs.CustomTabActivity; import org.chromium.chrome.browser.customtabs.CustomTabsConnection; import org.chromium.chrome.browser.firstrun.FirstRunFlowSequencer; import org.chromium.chrome.browser.flags.ChromeSwitches; import org.chromium.chrome.browser.instantapps.InstantAppsHandler; import org.chromium.chrome.browser.multiwindow.MultiWindowUtils; import org.chromium.chrome.browser.notifications.NotificationPlatformBridge; import org.chromium.chrome.browser.partnercustomizations.PartnerBrowserCustomizations; import org.chromium.chrome.browser.searchwidget.SearchActivity; import org.chromium.chrome.browser.tab.Tab; import org.chromium.chrome.browser.translate.TranslateIntentHandler; import org.chromium.chrome.browser.vr.VrModuleProvider; import org.chromium.chrome.browser.webapps.WebappLauncherActivity; import org.chromium.components.browser_ui.media.MediaNotificationUma; import org.chromium.components.embedder_support.util.UrlConstants; import org.chromium.ui.widget.Toast; import org.chromium.url.Origin; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.List; /** * Dispatches incoming intents to the appropriate activity based on the current configuration and * Intent fired. */ public class LaunchIntentDispatcher implements IntentHandler.IntentHandlerDelegate { /** * Extra indicating launch mode used. */ public static final String EXTRA_LAUNCH_MODE = "com.google.android.apps.chrome.EXTRA_LAUNCH_MODE"; private static final String TAG = "ActivitiyDispatcher"; // Typically the number of tasks returned by getRecentTasks will be around 3 or less - the // Chrome Launcher Activity, a Tabbed Activity task, and the home screen on older Android // versions. However, theoretically this task list could be unbounded, so limit it to a number // that won't cause Chrome to blow up in degenerate cases. private static final int MAX_NUM_TASKS = 100; private final Activity mActivity; private Intent mIntent; private final boolean mIsCustomTabIntent; private final boolean mIsVrIntent; private final AttributionIntentHandler mAttributionIntentHandler; @IntDef({Action.CONTINUE, Action.FINISH_ACTIVITY, Action.FINISH_ACTIVITY_REMOVE_TASK}) @Retention(RetentionPolicy.SOURCE) public @interface Action { int CONTINUE = 0; int FINISH_ACTIVITY = 1; int FINISH_ACTIVITY_REMOVE_TASK = 2; } /** * Dispatches the intent in the context of the activity. * In most cases calling this method will result in starting a new activity, in which case * the current activity will need to be finished as per the action returned. * * @param currentActivity activity that received the intent * @param intent intent to dispatch * @return action to take */ public static @Action int dispatch(Activity currentActivity, Intent intent) { return new LaunchIntentDispatcher(currentActivity, intent).dispatch(); } /** * Dispatches the intent to proper tabbed activity. * This method is similar to {@link #dispatch()}, but only handles intents that result in * starting a tabbed activity (i.e. one of *TabbedActivity classes). * * @param currentActivity activity that received the intent * @param intent intent to dispatch * @return action to take */ public static @Action int dispatchToTabbedActivity(Activity currentActivity, Intent intent) { return new LaunchIntentDispatcher(currentActivity, intent).dispatchToTabbedActivity(); } /** * Dispatches the intent to proper tabbed activity. * This method is similar to {@link #dispatch()}, but only handles intents that result in * starting a custom tab activity. */ public static @Action int dispatchToCustomTabActivity(Activity currentActivity, Intent intent) { LaunchIntentDispatcher dispatcher = new LaunchIntentDispatcher(currentActivity, intent); if (!dispatcher.mIsCustomTabIntent) return Action.CONTINUE; dispatcher.launchCustomTabActivity(); return Action.FINISH_ACTIVITY; } private LaunchIntentDispatcher(Activity activity, Intent intent) { mActivity = activity; mIntent = IntentUtils.sanitizeIntent(intent); mAttributionIntentHandler = AttributionIntentHandlerFactory.create(); // Needs to be called as early as possible, to accurately capture the // time at which the intent was received. if (mIntent != null && IntentHandler.getTimestampFromIntent(mIntent) == -1) { IntentHandler.addTimestampToIntent(mIntent); } recordIntentMetrics(); mIsVrIntent = VrModuleProvider.getIntentDelegate().isVrIntent(mIntent); mIsCustomTabIntent = isCustomTabIntent(mIntent); } /** * Figure out how to route the Intent. Because this is on the critical path to startup, please * avoid making the pathway any more complicated than it already is. Make sure that anything * you add _absolutely has_ to be here. */ private @Action int dispatch() { // Read partner browser customizations information asynchronously. // We want to initialize early because when there are no tabs to restore, we should possibly // show homepage, which might require reading PartnerBrowserCustomizations provider. PartnerBrowserCustomizations.getInstance().initializeAsync( mActivity.getApplicationContext()); // Must come before processing other intents, as we may un-wrap |mIntent| to another type of // Intent. if (handleAppAttributionIntent()) return Action.FINISH_ACTIVITY; int tabId = IntentHandler.getBringTabToFrontId(mIntent); boolean incognito = mIntent.getBooleanExtra(IntentHandler.EXTRA_OPEN_NEW_INCOGNITO_TAB, false); // Check if a web search Intent is being handled. IntentHandler intentHandler = new IntentHandler(mActivity, this); String url = IntentHandler.getUrlFromIntent(mIntent); if (url == null && tabId == Tab.INVALID_TAB_ID && !incognito && intentHandler.handleWebSearchIntent(mIntent)) { return Action.FINISH_ACTIVITY; } // Check if the URL is a video tutorial and needs to be handled in a video player. if (VideoTutorialShareHelper.handleVideoTutorialURL(url)) { return Action.FINISH_ACTIVITY; } // Check if a LIVE WebappActivity has to be brought back to the foreground. We can't // check for a dead WebappActivity because we don't have that information without a global // TabManager. If that ever lands, code to bring back any Tab could be consolidated // here instead of being spread between ChromeTabbedActivity and ChromeLauncherActivity. // https://crbug.com/443772, https://crbug.com/522918 if (WebappLauncherActivity.bringWebappToFront(tabId)) { return Action.FINISH_ACTIVITY_REMOVE_TASK; } // The notification settings cog on the flipped side of Notifications and in the Android // Settings "App Notifications" view will open us with a specific category. if (mIntent.hasCategory(Notification.INTENT_CATEGORY_NOTIFICATION_PREFERENCES)) { NotificationPlatformBridge.launchNotificationPreferences(mIntent); return Action.FINISH_ACTIVITY; } // Check if we should launch an Instant App to handle the intent. if (InstantAppsHandler.getInstance().handleIncomingIntent( mActivity, mIntent, mIsCustomTabIntent, false)) { return Action.FINISH_ACTIVITY; } // Check if we should push the user through First Run. if (FirstRunFlowSequencer.launch(mActivity, mIntent, false /* requiresBroadcast */, false /* preferLightweightFre */)) { return Action.FINISH_ACTIVITY; } // Check if we should launch a Custom Tab. if (mIsCustomTabIntent) { launchCustomTabActivity(); return Action.FINISH_ACTIVITY; } return dispatchToTabbedActivity(); } @Override public void processWebSearchIntent(String query) { Intent searchIntent = new Intent(Intent.ACTION_WEB_SEARCH); searchIntent.putExtra(SearchManager.QUERY, query); try (StrictModeContext ignored = StrictModeContext.allowDiskReads()) { int resolvers = PackageManagerUtils .queryIntentActivities(searchIntent, PackageManager.GET_RESOLVED_FILTER) .size(); if (resolvers == 0) { // Phone doesn't have a WEB_SEARCH action handler, open Search Activity with // the given query. Intent searchActivityIntent = new Intent(Intent.ACTION_MAIN); searchActivityIntent.setClass( ContextUtils.getApplicationContext(), SearchActivity.class); searchActivityIntent.putExtra(SearchManager.QUERY, query); mActivity.startActivity(searchActivityIntent); } else { mActivity.startActivity(searchIntent); } } } @Override public void processTranslateTabIntent( @Nullable String targetLanguageCode, @Nullable String expectedUrl) { assert false; } @Override public void processUrlViewIntent(String url, String referer, String headers, @IntentHandler.TabOpenType int tabOpenType, String externalAppId, int tabIdToBringToFront, boolean hasUserGesture, boolean isRendererInitiated, @Nullable Origin initiatorOrigin, Intent intent) { assert false; } /** When started with an intent, maybe pre-resolve the domain. */ private void maybePrefetchDnsInBackground() { if (mIntent != null && Intent.ACTION_VIEW.equals(mIntent.getAction())) { String maybeUrl = IntentHandler.getUrlFromIntent(mIntent); if (maybeUrl != null) { WarmupManager.getInstance().maybePrefetchDnsForUrlInBackground(mActivity, maybeUrl); } } } /** * Adds a token to TRANSLATE_TAB intents that we know were sent from a first party app. * * TRANSLATE_TAB requires a signature permission. We know that permission has been enforced (and * thus comes from a first party application) if it was routed via the TranslateDispatcher * activity-alias. In this case, add a token so IntentHandler knows the intent is from a first * party app. */ private static void maybeAuthenticateFirstPartyTranslateIntent(Intent intent) { if (intent != null && TranslateIntentHandler.ACTION_TRANSLATE_TAB.equals(intent.getAction()) && TranslateIntentHandler.COMPONENT_TRANSLATE_DISPATCHER.equals( intent.getComponent().getClassName())) { IntentHandler.addTrustedIntentExtras(intent); } } /** * @return Whether the intent is for launching a Custom Tab. */ public static boolean isCustomTabIntent(Intent intent) { if (intent == null) return false; if (CustomTabsIntent.shouldAlwaysUseBrowserUI(intent) || !intent.hasExtra(CustomTabsIntent.EXTRA_SESSION)) { return false; } return IntentHandler.getUrlFromIntent(intent) != null; } /** * Creates an Intent that can be used to launch a {@link CustomTabActivity}. */ public static Intent createCustomTabActivityIntent(Context context, Intent intent) { // Use the copy constructor to carry over the myriad of extras. Uri uri = Uri.parse(IntentHandler.getUrlFromIntent(intent)); Intent newIntent = new Intent(intent); newIntent.setAction(Intent.ACTION_VIEW); newIntent.setData(uri); newIntent.setClassName(context, CustomTabActivity.class.getName()); if (clearTopIntentsForCustomTabsEnabled(intent)) { // Ensure the new intent is routed into the instance of CustomTabActivity in this task. // If the existing CustomTabActivity can't handle the intent, it will re-launch // the intent without these flags. // If you change this flow, please make sure it works correctly with // - "Don't keep activities", // - Multiple clients hosting CCTs, // - Multiwindow mode. Class<? extends Activity> handlerClass = getSessionDataHolder().getActiveHandlerClassInCurrentTask(intent, context); if (handlerClass != null) { newIntent.setClassName(context, handlerClass.getName()); newIntent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_CLEAR_TOP); } } // If |uri| is a content:// URI, we want to propagate the URI permissions. This can't be // achieved by simply adding the FLAG_GRANT_READ_URI_PERMISSION to the Intent, since the // data URI on the Intent isn't |uri|, it just has |uri| as a query parameter. if (uri != null && UrlConstants.CONTENT_SCHEME.equals(uri.getScheme())) { context.grantUriPermission( context.getPackageName(), uri, Intent.FLAG_GRANT_READ_URI_PERMISSION); } if (CommandLine.getInstance().hasSwitch(ChromeSwitches.OPEN_CUSTOM_TABS_IN_NEW_TASK)) { newIntent.setFlags(newIntent.getFlags() | Intent.FLAG_ACTIVITY_NEW_TASK); } // Handle activity started in a new task. // See https://developer.android.com/guide/components/activities/tasks-and-back-stack if ((newIntent.getFlags() & Intent.FLAG_ACTIVITY_NEW_TASK) != 0 || (newIntent.getFlags() & Intent.FLAG_ACTIVITY_NEW_DOCUMENT) != 0) { // If a CCT intent triggers First Run, then NEW_TASK will be automatically applied. As // part of that, it will inherit the EXCLUDE_FROM_RECENTS bit from // ChromeLauncherActivity, so explicitly remove it to ensure the CCT does not get lost // in recents. newIntent.setFlags(newIntent.getFlags() & ~Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS); // Android will try to find and reuse an existing CCT activity in the background. Use // this flag to always start a new one instead. newIntent.addFlags(Intent.FLAG_ACTIVITY_MULTIPLE_TASK); // Force a new document to ensure the proper task/stack creation. newIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_DOCUMENT); } return newIntent; } private static SessionDataHolder getSessionDataHolder() { return ChromeApplicationImpl.getComponent().resolveSessionDataHolder(); } /** * Handles launching a {@link CustomTabActivity}, which will sit on top of a client's activity * in the same task. */ private void launchCustomTabActivity() { CustomTabsConnection.getInstance().onHandledIntent( CustomTabsSessionToken.getSessionTokenFromIntent(mIntent), mIntent); if (!clearTopIntentsForCustomTabsEnabled(mIntent)) { // The old way of delivering intents relies on calling the activity directly via a // static reference. It doesn't allow using CLEAR_TOP, and also doesn't work when an // intent brings the task to foreground. The condition above is a temporary safety net. boolean handled = getSessionDataHolder().handleIntent(mIntent); if (handled) return; } maybePrefetchDnsInBackground(); // Create and fire a launch intent. Intent launchIntent = createCustomTabActivityIntent(mActivity, mIntent); // Allow disk writes during startActivity() to avoid strict mode violations on some // Samsung devices, see https://crbug.com/796548. try (StrictModeContext ignored = StrictModeContext.allowDiskWrites()) { if (TwaSplashController.handleIntent(mActivity, launchIntent)) { return; } mActivity.startActivity(launchIntent, null); } } /** * Handles launching a {@link ChromeTabbedActivity}. */ @SuppressLint("InlinedApi") @SuppressWarnings("checkstyle:SystemExitCheck") // Allowed due to https://crbug.com/847921#c17. private @Action int dispatchToTabbedActivity() { if (mIsVrIntent) { for (Activity activity : ApplicationStatus.getRunningActivities()) { if (activity instanceof ChromeTabbedActivity) { if (VrModuleProvider.getDelegate().willChangeDensityInVr( (ChromeActivity) activity)) { // In the rare case that entering VR will trigger a density change (and // hence an Activity recreation), just return to Daydream home and kill the // process, as there's no good way to recreate without showing 2D UI // in-headset. mActivity.finish(); System.exit(0); } } } } maybePrefetchDnsInBackground(); maybeAuthenticateFirstPartyTranslateIntent(mIntent); Intent newIntent = new Intent(mIntent); if (Intent.ACTION_VIEW.equals(newIntent.getAction()) && !IntentHandler.wasIntentSenderChrome(newIntent)) { long time = SystemClock.elapsedRealtime(); if (!chromeTabbedTaskExists()) { newIntent.putExtra(IntentHandler.EXTRA_STARTED_TABBED_CHROME_TASK, true); } RecordHistogram.recordTimesHistogram("Startup.Android.ChromeTabbedTaskExistsTime", SystemClock.elapsedRealtime() - time); } if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP_MR1) { Uri extraReferrer = mActivity.getReferrer(); if (extraReferrer != null) { newIntent.putExtra(IntentHandler.EXTRA_ACTIVITY_REFERRER, extraReferrer.toString()); } } String targetActivityClassName = MultiWindowUtils.getInstance() .getTabbedActivityForIntent(newIntent, mActivity) .getName(); newIntent.setClassName( mActivity.getApplicationContext().getPackageName(), targetActivityClassName); newIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_RETAIN_IN_RECENTS); Uri uri = newIntent.getData(); boolean isContentScheme = false; if (uri != null && UrlConstants.CONTENT_SCHEME.equals(uri.getScheme())) { isContentScheme = true; newIntent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION); } if (MultiWindowUtils.getInstance().shouldRunInLegacyMultiInstanceMode(mActivity, mIntent)) { MultiWindowUtils.getInstance().makeLegacyMultiInstanceIntent(mActivity, newIntent); } if (newIntent.getComponent().getClassName().equals(mActivity.getClass().getName())) { // We're trying to start activity that is already running - just continue. return Action.CONTINUE; } // This system call is often modified by OEMs and not actionable. http://crbug.com/619646. try { Bundle options = mIsVrIntent ? VrModuleProvider.getIntentDelegate().getVrIntentOptions(mActivity) : null; mActivity.startActivity(newIntent, options); } catch (SecurityException ex) { if (isContentScheme) { Toast.makeText(mActivity, org.chromium.chrome.R.string.external_app_restricted_access_error, Toast.LENGTH_LONG) .show(); } else { throw ex; } } return Action.FINISH_ACTIVITY; } private boolean chromeTabbedTaskExists() { // Fast check for a running Chrome instance. for (Activity activity : ApplicationStatus.getRunningActivities()) { if (activity instanceof ChromeTabbedActivity) return true; } // Slightly slower check for an existing task (One IPC, usually ~2ms). final ActivityManager activityManager = (ActivityManager) mActivity.getSystemService(Context.ACTIVITY_SERVICE); try { boolean chromeTaskExists = false; // getRecentTasks is deprecated, but still returns your app's tasks, and does so // without needing an extra IPC for each task you want to get the info for. It also // includes some known-safe tasks like the home screen on older Android versions, but // that's fine for this purpose. List<ActivityManager.RecentTaskInfo> tasks = activityManager.getRecentTasks(MAX_NUM_TASKS, 0); if (tasks != null) { for (ActivityManager.RecentTaskInfo task : tasks) { // Note that Android documentation lies, and TaskInfo#origActivity does not // actually return the target of an alias, so we have to explicitly check // for the target component of the base intent, which will have been set to // the Activity that launched, in order to make this check more robust. ComponentName component = task.baseIntent.getComponent(); if (component == null) continue; if (ChromeTabbedActivity.isTabbedModeComponentName(component.getClassName())) { return true; } } } } catch (SecurityException ex) { // If we can't query task status, assume a Chrome task exists so this doesn't // mistakenly lead to a Chrome task being removed. return true; } return false; } /** * Records metrics gleaned from the Intent. */ private void recordIntentMetrics() { @IntentHandler.ExternalAppId int source = IntentHandler.determineExternalIntentSource(mIntent); if (mIntent.getPackage() == null && source != IntentHandler.ExternalAppId.CHROME) { int flagsOfInterest = Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_NEW_DOCUMENT; int maskedFlags = mIntent.getFlags() & flagsOfInterest; RecordHistogram.recordSparseHistogram("Launch.IntentFlags", maskedFlags); } MediaNotificationUma.recordClickSource(mIntent); } private static boolean clearTopIntentsForCustomTabsEnabled(Intent intent) { // The new behavior is important for TWAs, but could potentially affect other clients. // For now we expose this risky change only to TWAs. return IntentUtils.safeGetBooleanExtra( intent, TrustedWebUtils.EXTRA_LAUNCH_AS_TRUSTED_WEB_ACTIVITY, false); } private boolean handleAppAttributionIntent() { if (mAttributionIntentHandler.handleOuterAttributionIntent(mIntent)) return true; Intent launchIntent = mAttributionIntentHandler.handleInnerAttributionIntent(mIntent); if (launchIntent != null) mIntent = IntentUtils.sanitizeIntent(launchIntent); return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.query.internal; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.lang.reflect.Method; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.charset.Charset; import java.sql.Timestamp; import java.util.Date; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.query.internal.index.DummyQRegion; import org.apache.geode.internal.cache.EntrySnapshot; import org.apache.geode.internal.cache.LocalRegion; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.test.junit.categories.SecurityTest; @Category({SecurityTest.class}) public class RestrictedMethodInvocationAuthorizerTest { RestrictedMethodInvocationAuthorizer methodInvocationAuthorizer = new RestrictedMethodInvocationAuthorizer(null); @Test public void getClassShouldFail() throws Exception { Method method = Integer.class.getMethod("getClass"); assertFalse(methodInvocationAuthorizer.isWhitelisted(method)); } @Test public void toStringOnAnyObject() throws Exception { Method stringMethod = Integer.class.getMethod("toString"); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void equalsOnAnyObject() throws Exception { Method equalsMethod = Integer.class.getMethod("equals", Object.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(equalsMethod)); } @Test public void booleanMethodsAreWhiteListed() throws Exception { Method booleanValue = Boolean.class.getMethod("booleanValue"); assertTrue(methodInvocationAuthorizer.isWhitelisted(booleanValue)); } @Test public void toCharAtOnStringObject() throws Exception { Method stringMethod = String.class.getMethod("charAt", int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void codePointAtStringObject() throws Exception { Method stringMethod = String.class.getMethod("codePointAt", int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void codePointBeforeStringObject() throws Exception { Method stringMethod = String.class.getMethod("codePointBefore", int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void codePointCountStringObject() throws Exception { Method stringMethod = String.class.getMethod("codePointCount", int.class, int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void compareToStringObject() throws Exception { Method stringMethod = String.class.getMethod("compareTo", String.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void compareToIgnoreCaseStringObject() throws Exception { Method stringMethod = String.class.getMethod("compareToIgnoreCase", String.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void concatStringObject() throws Exception { Method stringMethod = String.class.getMethod("compareTo", String.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void containsStringObject() throws Exception { Method stringMethod = String.class.getMethod("contains", CharSequence.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void contentEqualsStringObject() throws Exception { Method stringMethod = String.class.getMethod("contentEquals", CharSequence.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void contentEqualsWithStringBufferStringObject() throws Exception { Method stringMethod = String.class.getMethod("contentEquals", StringBuffer.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void endsWithOnStringObject() throws Exception { Method stringMethod = String.class.getMethod("endsWith", String.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void equalsIgnoreCase() throws Exception { Method stringMethod = String.class.getMethod("equalsIgnoreCase", String.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void getBytesOnString() throws Exception { Method stringMethod = String.class.getMethod("getBytes"); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void getBytesWithCharsetOnString() throws Exception { Method stringMethod = String.class.getMethod("getBytes", Charset.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void hashCodeOnStringObject() throws Exception { Method stringMethod = String.class.getMethod("hashCode"); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void indexOfOnStringObject() throws Exception { Method stringMethod = String.class.getMethod("indexOf", int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void indexOfWithStringOnStringObject() throws Exception { Method stringMethod = String.class.getMethod("indexOf", String.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void indexOfWithStringAndIntOnStringObject() throws Exception { Method stringMethod = String.class.getMethod("indexOf", String.class, int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void internOnStringObject() throws Exception { Method stringMethod = String.class.getMethod("intern"); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void isEmpty() throws Exception { Method stringMethod = String.class.getMethod("isEmpty"); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void lastIndexOfWithIntOnString() throws Exception { Method stringMethod = String.class.getMethod("lastIndexOf", int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void lastIndexOfWithIntAndFronIndexOnString() throws Exception { Method stringMethod = String.class.getMethod("lastIndexOf", int.class, int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void lastIndexOfWithStringOnString() throws Exception { Method stringMethod = String.class.getMethod("lastIndexOf", String.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void lastIndexOfWithStringAndFromIndexOnString() throws Exception { Method stringMethod = String.class.getMethod("lastIndexOf", String.class, int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void lengthOnString() throws Exception { Method stringMethod = String.class.getMethod("length"); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void matchesOnString() throws Exception { Method stringMethod = String.class.getMethod("matches", String.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void offsetByCodePointsOnString() throws Exception { Method stringMethod = String.class.getMethod("offsetByCodePoints", int.class, int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void regionMatchesWith5ParamsOnString() throws Exception { Method stringMethod = String.class.getMethod("regionMatches", boolean.class, int.class, String.class, int.class, int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void regionMatchesWith4ParamsOnString() throws Exception { Method stringMethod = String.class.getMethod("regionMatches", int.class, String.class, int.class, int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void replaceOnString() throws Exception { Method stringMethod = String.class.getMethod("replace", char.class, char.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void replaceWithCharSequenceOnString() throws Exception { Method stringMethod = String.class.getMethod("replace", CharSequence.class, CharSequence.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void replaceAllOnString() throws Exception { Method stringMethod = String.class.getMethod("replaceAll", String.class, String.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void replaceFirstOnString() throws Exception { Method stringMethod = String.class.getMethod("replaceFirst", String.class, String.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void splitOnString() throws Exception { Method stringMethod = String.class.getMethod("split", String.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void splitWithLimitOnString() throws Exception { Method stringMethod = String.class.getMethod("split", String.class, int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void startsOnString() throws Exception { Method stringMethod = String.class.getMethod("startsWith", String.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void startsWithOffsetOnString() throws Exception { Method stringMethod = String.class.getMethod("startsWith", String.class, int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void substringOnString() throws Exception { Method stringMethod = String.class.getMethod("substring", int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void substringWithEndIndexOnString() throws Exception { Method stringMethod = String.class.getMethod("substring", int.class, int.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void toCharArrayOnString() throws Exception { Method stringMethod = String.class.getMethod("toCharArray"); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void toLowerCaseOnStringObject() throws Exception { Method stringMethod = String.class.getMethod("toLowerCase"); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void toUpperCaseOnStringObject() throws Exception { Method stringMethod = String.class.getMethod("toUpperCase"); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void trimOnString() throws Exception { Method stringMethod = String.class.getMethod("trim"); assertTrue(methodInvocationAuthorizer.isWhitelisted(stringMethod)); } @Test public void utilDateAfterMethodIsWhiteListed() throws Exception { Method method = Date.class.getMethod("after", Date.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(method)); } @Test public void sqlDateAfterMethodIsWhiteListed() throws Exception { Method method = java.sql.Date.class.getMethod("after", Date.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(method)); } @Test public void utilDateBeforeMethodIsWhiteListed() throws Exception { Method method = Date.class.getMethod("before", Date.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(method)); } @Test public void sqlDateBeforeMethodIsWhiteListed() throws Exception { Method method = java.sql.Date.class.getMethod("before", Date.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(method)); } @Test public void timestampAfterMethodIsWhiteListed() throws Exception { Method method = Timestamp.class.getMethod("after", Timestamp.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(method)); } @Test public void sqlTimestampBeforeMethodIsWhiteListed() throws Exception { Method method = Timestamp.class.getMethod("before", Timestamp.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(method)); } @Test public void sqlTimestampGetNanosIsWhiteListed() throws Exception { Method method = Timestamp.class.getMethod("getNanos"); assertTrue(methodInvocationAuthorizer.isWhitelisted(method)); } @Test public void sqlTimestampGetTimeIsWhiteListed() throws Exception { Method method = Timestamp.class.getMethod("getTime"); assertTrue(methodInvocationAuthorizer.isWhitelisted(method)); } @Test public void getKeyForMapEntryIsWhiteListed() throws Exception { Method getKeyMethod = Map.Entry.class.getMethod("getKey"); assertTrue(methodInvocationAuthorizer.isWhitelisted(getKeyMethod)); } @Test public void getValueForMapEntryIsWhiteListed() throws Exception { Method getValueMethod = Map.Entry.class.getMethod("getValue"); assertTrue(methodInvocationAuthorizer.isWhitelisted(getValueMethod)); } @Test public void getKeyForMapEntrySnapShotIsWhiteListed() throws Exception { Method getKeyMethod = EntrySnapshot.class.getMethod("getKey"); assertTrue(methodInvocationAuthorizer.isWhitelisted(getKeyMethod)); } @Test public void getValueForMapEntrySnapShotIsWhiteListed() throws Exception { Method getValueMethod = EntrySnapshot.class.getMethod("getValue"); assertTrue(methodInvocationAuthorizer.isWhitelisted(getValueMethod)); } @Test public void getKeyForNonTXEntryIsWhiteListed() throws Exception { Method getKeyMethod = LocalRegion.NonTXEntry.class.getMethod("getKey"); assertTrue(methodInvocationAuthorizer.isWhitelisted(getKeyMethod)); } @Test public void getValueForNonTXEntryIsWhiteListed() throws Exception { Method getValueMethod = LocalRegion.NonTXEntry.class.getMethod("getValue"); assertTrue(methodInvocationAuthorizer.isWhitelisted(getValueMethod)); } @Test public void mapMethodsForQRegionAreWhiteListed() throws Exception { testMapMethods(QRegion.class); } @Test public void mapMethodsForDummyQRegionAreWhiteListed() throws Exception { testMapMethods(DummyQRegion.class); } @Test public void mapMethodsForPartitionedRegionAreWhiteListed() throws Exception { Class<PartitionedRegion> clazz = PartitionedRegion.class; Method get = clazz.getMethod("get", Object.class); Method entrySet = clazz.getMethod("entrySet"); Method keySet = clazz.getMethod("keySet"); Method values = clazz.getMethod("values"); Method containsKey = clazz.getMethod("containsKey", Object.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(get)); assertTrue(methodInvocationAuthorizer.isWhitelisted(entrySet)); assertTrue(methodInvocationAuthorizer.isWhitelisted(keySet)); assertTrue(methodInvocationAuthorizer.isWhitelisted(values)); assertTrue(methodInvocationAuthorizer.isWhitelisted(containsKey)); } @Test public void numberMethodsForByteAreWhiteListed() throws Exception { testNumberMethods(Byte.class); } @Test public void numberMethodsForDoubleAreWhiteListed() throws Exception { testNumberMethods(Double.class); } @Test public void numberMethodsForFloatAreWhiteListed() throws Exception { testNumberMethods(Float.class); } @Test public void numberMethodsForIntegerAreWhiteListed() throws Exception { testNumberMethods(Integer.class); } @Test public void numberMethodsForShortAreWhiteListed() throws Exception { testNumberMethods(Short.class); } @Test public void numberMethodsForBigDecimalAreWhiteListed() throws Exception { testNumberMethods(BigDecimal.class); } @Test public void numberMethodsForNumberAreWhiteListed() throws Exception { testNumberMethods(BigInteger.class); } @Test public void numberMethodsForAtomicIntegerAreWhiteListed() throws Exception { testNumberMethods(AtomicInteger.class); } @Test public void numberMethodsForAtomicLongAreWhiteListed() throws Exception { testNumberMethods(AtomicLong.class); } @Test public void verifyAuthorizersUseDefaultWhiteList() { RestrictedMethodInvocationAuthorizer authorizer1 = new RestrictedMethodInvocationAuthorizer(null); RestrictedMethodInvocationAuthorizer authorizer2 = new RestrictedMethodInvocationAuthorizer(null); assertThat(authorizer1.getWhiteList()).isSameAs(authorizer2.getWhiteList()); assertThat(authorizer1.getWhiteList()) .isSameAs(RestrictedMethodInvocationAuthorizer.DEFAULT_WHITELIST); assertThat(authorizer2.getWhiteList()) .isSameAs(RestrictedMethodInvocationAuthorizer.DEFAULT_WHITELIST); } private void testNumberMethods(Class<?> clazz) throws NoSuchMethodException { Method byteValue = clazz.getMethod("byteValue"); Method doubleValue = clazz.getMethod("doubleValue"); Method intValue = clazz.getMethod("intValue"); Method floatValue = clazz.getMethod("longValue"); Method longValue = clazz.getMethod("floatValue"); Method shortValue = clazz.getMethod("shortValue"); assertTrue(methodInvocationAuthorizer.isWhitelisted(byteValue)); assertTrue(methodInvocationAuthorizer.isWhitelisted(doubleValue)); assertTrue(methodInvocationAuthorizer.isWhitelisted(intValue)); assertTrue(methodInvocationAuthorizer.isWhitelisted(floatValue)); assertTrue(methodInvocationAuthorizer.isWhitelisted(longValue)); assertTrue(methodInvocationAuthorizer.isWhitelisted(shortValue)); } private void testMapMethods(Class<?> clazz) throws NoSuchMethodException { Method get = clazz.getMethod("get", Object.class); Method entrySet = clazz.getMethod("entrySet"); Method keySet = clazz.getMethod("keySet"); Method values = clazz.getMethod("values"); Method getEntries = clazz.getMethod("getEntries"); Method getValues = clazz.getMethod("getValues"); Method containsKey = clazz.getMethod("containsKey", Object.class); assertTrue(methodInvocationAuthorizer.isWhitelisted(get)); assertTrue(methodInvocationAuthorizer.isWhitelisted(entrySet)); assertTrue(methodInvocationAuthorizer.isWhitelisted(keySet)); assertTrue(methodInvocationAuthorizer.isWhitelisted(values)); assertTrue(methodInvocationAuthorizer.isWhitelisted(getEntries)); assertTrue(methodInvocationAuthorizer.isWhitelisted(getValues)); assertTrue(methodInvocationAuthorizer.isWhitelisted(containsKey)); } }