gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/**
*
*/
package de.csw.cl.importer.model;
import static util.XMLUtil.NS_XCL2;
import java.io.File;
import java.io.FilenameFilter;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jdom2.Content;
import org.jdom2.Document;
import org.jdom2.Element;
import de.csw.cl.importer.algorithm.FolderCreationException;
import util.XMLUtil;
/**
* This class represents a CL text corpus and its elements which are relevant
* during the importation process. The availability of such elements is subject
* to change during the importation process. E.g., a nested titling can become
* available for importation once its surrounding titling has been imported.
* Likewise, the stack of processable import directives can grow if contents of
* titlings including import directives are asserted after importation, or
* shrink after import directives have been executed and replaced by the actual
* content of the imported titled texts.
*
* @author ralph
*
*/
public class Corpus {
//TODO : why is logging not working here for the JUnit tests?
private static Logger LOG = LogManager.getLogger(Corpus.class);
// The top-level XML documents of the corpus.
// The documents should be valid with respect to the XCL2 schema, when
// XInclude directives are resolved.
// The state of the Corpus is inconsistent if there are circular XInclude directives.
// Titlings should not contain XInclude directives in the Ontomaven namespace.
private final HashMap<Document, File> documents = new HashMap<Document, File>();
// All top-level titlings (by name) that are available for importation.
// During the importation process, new titlings, which become importable
// because their surrounding titling has been imported, might be added to
// this map.
private HashMap<String, Element> importableTitlings = new HashMap<String, Element>();
//TODO: maintain a hashmap of accessible importation elements and their include IRI
//private HashMap<Element, String> accessibleImportations = new HashMap<Element, String>();
// An Includes data structure containing all XML elements that are
// included by XInclude directives in the corpus,
// indexed by the relative path that will be used
// for local caching when the corpus is written.
// During the importation process, new entries may be added.
public Includes includes;
// Contains mappings between the IRIs of XInclude directives and the
// relative path for the cached copy.
// All Xinclude directives in the XInclude closure of documents that reference remote objects
// should have mappings in the catalog that map to relative file paths.
// The catalog should not contain mappings in the Ontomaven domain that are not used by some XInclude directive.
// The catalog should not contain mappings to remote IRIs.
// During the importation process, new entries may be added.
public XMLCatalog catalog;
/**
* @param corpusDir
* the directory containing XCL2 documents and optionally
* the catalog.xml file and /incldues directory
* @throws ConflictingTitlingException
* @throws MissingIncludeEntryException
* @throws MissingCatalogEntryException
*/
public Corpus(File corpusDir) throws ConflictingTitlingException, MissingCatalogEntryException, MissingIncludeEntryException {
includes = new Includes(corpusDir);
catalog = new XMLCatalog(new File(corpusDir, "catalog.xml"));
if(!(includes.verifySequentialFileNames())) {
//TODO: do something if the includes are not named as sequential numbers
System.out.println("Warning: include files are not sequentially numbered.");
}
File[] docFiles = corpusDir.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.toLowerCase().endsWith(".xcl");
}
});
if (!(docFiles == null)) {
for (File docFile : docFiles) {
Document doc = XMLUtil.readLocalDoc(docFile);
// TODO wenn schemaURL, dann:
// XMLUtil.readAndValidate(file, schemaURL);
addDocument(doc, docFile);
}
}
catalog = cleanCatalog();
//TODO: verify that each file in includes is used in some XInclude directive.
}
/**
* Adds a loaded document to the corpus.
* @param doc
* the document to add.
* @param documentFile
* the original file of the document.
* @throws ConflictingTitlingException
* @throws MissingIncludeEntryException
* @throws MissingCatalogEntryException
*/
public void addDocument(Document doc, File documentFile) throws ConflictingTitlingException, MissingCatalogEntryException, MissingIncludeEntryException {
LOG.debug("Adding document: " + documentFile.getName());
System.out.println("Adding document: " + documentFile.getName());
documents.put(doc, documentFile);
extractTitlings(doc.getRootElement());
}
public XMLCatalog cleanCatalog() throws MissingCatalogEntryException, MissingIncludeEntryException {
//remove catalog entries in the Ontomaven domain that are unused,
XMLCatalog newCatalog = catalog.clone();
for ( Entry<String, String> entry: catalog.getMappings().entrySet() ){
String name = entry.getKey();
if (name.startsWith(XMLUtil.NS_ONTOMAVEN.getURI().toString())) {
if (!(verifyInclude(name))) {
LOG.warn("Warning: An unused catalog entries has been removed: " + name);
System.out.println("Warning: An unused catalog entries has been removed: " + name);
newCatalog.removeMapping(name);
}
}
}
//TODO: verify that each catalog entry maps to a relative path.
//TODO: verify that each catalog entry has an includes entry.
return newCatalog;
}
/**
* Returns a {@link Boolean} indicating if an IRI is used for XInclude in the corpus.
* This implementation only checks for an exact String match of the IRI.
* TODO: also check for equivalent IRIs?
* @param uri the IRI to be checked.
* @return true if there is an XInclude that references uri
* in the XInclude closure of the corpus documents.
* @throws MissingIncludeEntryException
* @throws MissingCatalogEntryException
*/
private boolean verifyInclude(String uri) throws MissingCatalogEntryException, MissingIncludeEntryException {
for( Document doc: getDocuments()) {
//System.out.println("Checking:" + getOriginalFile(doc).getName());
if(verifyIncludeLoop(doc.getRootElement(), uri ))
return true;
}
return false;
}
/**
* Returns a {@link Boolean} indicating if an IRI is used for XInclude in an {@link Element}.
* @param e the {@link Element} to check.
* @param uri the IRI to check.
* @return true if there is an XInclude that references uri
* in the XInclude closure of e.
* @throws MissingIncludeEntryException
* @throws MissingCatalogEntryException
*/
private boolean verifyIncludeLoop(Element e, String uri) throws MissingCatalogEntryException, MissingIncludeEntryException {
String ename = e.getName();
//System.out.println("Checking: " + ename);
if(e.getNamespace().equals(XMLUtil.NS_XINCLUDE)) {
if(ename.equals("include")){
if (e.getAttributeValue("href").equals(uri)) return true;
if (verifyIncludeLoop(followInclude(e), uri)) return true;
}
return false;
}
if(!(e.getNamespace().equals(XMLUtil.NS_XCL2))) return false;
if(ename.equals("Restrict") || ename.equals("Construct")){
for( Element c : e.getChildren()) {
if(verifyIncludeLoop(c, uri)) return true;
}
}
return false;
}
/**
* Returns an {@link Iterable} over all documents contained in this {@link Corpus}.
* @return an {@link Iterable} over all documents contained in this {@link Corpus}.
*/
public Iterable<Document> getDocuments() {
return documents.keySet();
}
/**
* Gets the original file of a document
* @param doc the document
* @return the original file
*/
public File getOriginalFile(Document doc) {
return documents.get(doc);
}
/**
* Returns the number of documents in the corpus
*
* @return
*/
public Integer size() {
return documents.size();
}
/**
* Returns an importable titling with a given name, or null if no such
* titling exists.
*
* @param name
* @return
*/
public Element getImportableTitling(String name) {
return importableTitlings.get(name);
}
/**
* Performs a recursive depth-first traversion of the subtree under the
* given element e for Titling elements and adds the found Titling elements
* to the map of importable Titlings. The search will not continue under the
* found Titling elements.
*
* @param e
* @throws ConflictingTitlingException
* @throws MissingIncludeEntryException
* @throws MissingCatalogEntryException
*/
public void extractTitlings(Element e) throws ConflictingTitlingException, MissingCatalogEntryException, MissingIncludeEntryException {
e = followInclude(e);
List<Element> childElements = e.getChildren();
for (Element child : childElements) {
LOG.debug("Checking child: "+ child.getName());
System.out.println("Checking child: "+ child.getName());
child = followInclude(child);
if (Corpus.isTitling(child)) {
LOG.debug("Adding titling");
System.out.println("Adding titling");
addImportableTitling(child);
}
if (Corpus.isConstruct(child) || Corpus.isRestrict(child)) {
extractTitlings(child);
}
}
}
/**
* Adds an importable titling to the map of importable titlings.
* @param e
* @throws ConflictingTitlingException
*/
private void addImportableTitling(Element e)
throws ConflictingTitlingException {
String titlingName = getName(e);
//TODO if titlingName is CURIE, expand to IRI
//TODO if nameElement has no cri attribute, get symbol
Element existingImportableTitling = importableTitlings.get(titlingName);
if (existingImportableTitling != null) {
if (!XMLUtil.equal(e, existingImportableTitling)) {
// conflicting Titling
throw new ConflictingTitlingException(titlingName,
existingImportableTitling, e);
} else {
LOG.debug(" Titling " + titlingName + " exists. Skipping.");
System.out.println(" Titling " + titlingName + " exists. Skipping.");
}
} else {
LOG.debug(" Adding titling " + titlingName);
System.out.println(" Adding titling " + titlingName);
importableTitlings.put(titlingName, e.clone());
}
}
public Element followInclude(Element e) throws MissingCatalogEntryException, MissingIncludeEntryException {
if(isXInclude(e)) {
String uri = e.getAttributeValue("href");
try {
String filePath = catalog.getFileHash(uri);
try {
Element enew = includes.getInclude(filePath, null);
return followInclude(enew);
} catch (NullPointerException ex) {
LOG.warn("Warning: no entry for this key in includes. Not following.");
System.out.println("Warning: no entry for this key in includes. Not following.");
throw new MissingIncludeEntryException(filePath);
}
} catch (NullPointerException ex) {
LOG.warn("Warning: no entry for this key in includes. Not following.");
System.out.println("Warning: no entry for this key in includes. Not following.");
throw new MissingCatalogEntryException(uri);
//TODO try to resolve includes that are not in catalog
}
}
return e;
}
public void write(File resultDir) throws FolderCreationException {
if (size() > 0) {
if (!resultDir.mkdir()) {
throw new FolderCreationException("Error creating directory " + resultDir.getAbsolutePath());
}
for (Document document : getDocuments()) {
XMLUtil.writeXML(document, getOutputFile(resultDir, document) );
}
catalog.write(resultDir);
includes.writeIncludes(resultDir);
List<String> unresolvedImports = getUnresolvedImports();
if (!(unresolvedImports.isEmpty())) {
LOG.warn("Warning. There are unresolved importations:");
System.out.println("Warning. There are unresolved importations:");
for (String unresolvedImport : unresolvedImports) {
LOG.info(unresolvedImport);
System.out.println(unresolvedImport);
}
}
}
}
private File getOutputFile(File resultDir, Document document) {
return new File(resultDir, getOriginalFile(document).getName().replaceAll("myText", "resultText"));
}
private LinkedList<String> getUnresolvedImports() {
final LinkedList<String> unresolvedImports = new LinkedList<String>();
Iterable<Document> documents = getDocuments();
for (Document document : documents) {
final String fileName = getOutputFile( null, document).getName() ;
XMLUtil.performRecursivelAction(document.getRootElement(), new XMLUtil.Action() {
public void doAction(Element e) {
if (e.getName().equals("Import") && isUntitled(e)) {
unresolvedImports.add(getName(e) + " in " + fileName);
}
}
});
}
return unresolvedImports;
}
private boolean isUntitled(Element e) {
if(!(e.isRootElement())) {
Element enew = e.getParentElement();
if(isTitling(enew)) {
return false;
}
return isUntitled(enew);
}
return true;
}
public static String getName(Element e) {
Element nameElement = e.getChild("Name", NS_XCL2);
//TODO: resolve CURIEs
//TODO: get non-IRI names
return nameElement == null ? null : nameElement.getAttributeValue("cri");
}
public static boolean isTitling(Content e) {
switch(e.getCType()) {
case Element :
return (((Element) e).getName().equals("Titling") && ((Element) e).getNamespace().equals(XMLUtil.NS_XCL2));
default:
return false;
}
}
public static boolean isRestrict(Content e) {
switch(e.getCType()) {
case Element :
return (((Element) e).getName().equals("Restrict") && ((Element) e).getNamespace().equals(XMLUtil.NS_XCL2));
default:
return false;
}
}
public static boolean isConstruct(Content e) {
switch(e.getCType()) {
case Element :
return (((Element) e).getName().equals("Construct") && ((Element) e).getNamespace().equals(XMLUtil.NS_XCL2));
default:
return false;
}
}
public static boolean isImport(Content e) {
switch(e.getCType()) {
case Element :
return (((Element) e).getName().equals("Import") && ((Element) e).getNamespace().equals(XMLUtil.NS_XCL2));
default:
return false;
}
}
public static boolean isXInclude(Content e) {
switch(e.getCType()) {
case Element :
return (((Element) e).getName().equals("include") && ((Element) e).getNamespace().equals(XMLUtil.NS_XINCLUDE) );
default:
return false;
}
}
public static boolean isXMLComment(Content e) {
switch(e.getCType()) {
case Comment :
return true;
default:
return false;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
import org.apache.camel.ExchangePattern;
import org.apache.camel.LoggingLevel;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
import org.apache.camel.spi.ExceptionHandler;
import org.apache.camel.spi.PollingConsumerPollStrategy;
/**
* Poll for changes in Google Sheets.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface GoogleSheetsStreamEndpointBuilderFactory {
/**
* Builder for endpoint for the Google Sheets Stream component.
*/
public interface GoogleSheetsStreamEndpointBuilder
extends
EndpointConsumerBuilder {
default AdvancedGoogleSheetsStreamEndpointBuilder advanced() {
return (AdvancedGoogleSheetsStreamEndpointBuilder) this;
}
/**
* Google sheets application name. Example would be
* camel-google-sheets/1.0.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder applicationName(
String applicationName) {
doSetProperty("applicationName", applicationName);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder bridgeErrorHandler(
String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Client ID of the sheets application.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder clientId(String clientId) {
doSetProperty("clientId", clientId);
return this;
}
/**
* True if grid data should be returned.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder includeGridData(
boolean includeGridData) {
doSetProperty("includeGridData", includeGridData);
return this;
}
/**
* True if grid data should be returned.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder includeGridData(
String includeGridData) {
doSetProperty("includeGridData", includeGridData);
return this;
}
/**
* Specifies the major dimension that results should use..
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: ROWS
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder majorDimension(
String majorDimension) {
doSetProperty("majorDimension", majorDimension);
return this;
}
/**
* Specify the maximum number of returned results. This will limit the
* number of rows in a returned value range data set or the number of
* returned value ranges in a batch request.
*
* The option is a: <code>int</code> type.
*
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder maxResults(int maxResults) {
doSetProperty("maxResults", maxResults);
return this;
}
/**
* Specify the maximum number of returned results. This will limit the
* number of rows in a returned value range data set or the number of
* returned value ranges in a batch request.
*
* The option will be converted to a <code>int</code> type.
*
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder maxResults(String maxResults) {
doSetProperty("maxResults", maxResults);
return this;
}
/**
* Specifies the range of rows and columns in a sheet to get data from.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder range(String range) {
doSetProperty("range", range);
return this;
}
/**
* Specifies the level of permissions you want a sheets application to
* have to a user account. See
* https://developers.google.com/identity/protocols/googlescopes for
* more info.
*
* The option is a: <code>java.util.List<java.lang.String></code>
* type.
*
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder scopes(List<String> scopes) {
doSetProperty("scopes", scopes);
return this;
}
/**
* Specifies the level of permissions you want a sheets application to
* have to a user account. See
* https://developers.google.com/identity/protocols/googlescopes for
* more info.
*
* The option will be converted to a
* <code>java.util.List<java.lang.String></code> type.
*
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder scopes(String scopes) {
doSetProperty("scopes", scopes);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder sendEmptyMessageWhenIdle(
boolean sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder sendEmptyMessageWhenIdle(
String sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* True if value range result should be split into rows or columns to
* process each of them individually. When true each row or column is
* represented with a separate exchange in batch processing. Otherwise
* value range object is used as exchange junk size.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder splitResults(
boolean splitResults) {
doSetProperty("splitResults", splitResults);
return this;
}
/**
* True if value range result should be split into rows or columns to
* process each of them individually. When true each row or column is
* represented with a separate exchange in batch processing. Otherwise
* value range object is used as exchange junk size.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder splitResults(
String splitResults) {
doSetProperty("splitResults", splitResults);
return this;
}
/**
* Specifies the spreadsheet identifier that is used to identify the
* target to obtain.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder spreadsheetId(
String spreadsheetId) {
doSetProperty("spreadsheetId", spreadsheetId);
return this;
}
/**
* Determines how values should be rendered in the output.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: FORMATTED_VALUE
* Group: consumer
*/
default GoogleSheetsStreamEndpointBuilder valueRenderOption(
String valueRenderOption) {
doSetProperty("valueRenderOption", valueRenderOption);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder backoffErrorThreshold(
int backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder backoffErrorThreshold(
String backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder backoffIdleThreshold(
int backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder backoffIdleThreshold(
String backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder backoffMultiplier(
int backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder backoffMultiplier(
String backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option is a: <code>long</code> type.
*
* Default: 500
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder delay(long delay) {
doSetProperty("delay", delay);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 500
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder delay(String delay) {
doSetProperty("delay", delay);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder greedy(boolean greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder greedy(String greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option is a: <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder initialDelay(long initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder initialDelay(
String initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option is a: <code>long</code> type.
*
* Default: 0
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder repeatCount(long repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 0
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder repeatCount(String repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option is a: <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder runLoggingLevel(
LoggingLevel runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option will be converted to a
* <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder runLoggingLevel(
String runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option is a:
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder scheduledExecutorService(
ScheduledExecutorService scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option will be converted to a
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder scheduledExecutorService(
String scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option is a: <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder scheduler(Object scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option will be converted to a <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder scheduler(String scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder schedulerProperties(
String key,
Object value) {
doSetMultiValueProperty("schedulerProperties", "scheduler." + key, value);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder schedulerProperties(Map values) {
doSetMultiValueProperties("schedulerProperties", "scheduler.", values);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder startScheduler(
boolean startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder startScheduler(
String startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option is a: <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder timeUnit(TimeUnit timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option will be converted to a
* <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder timeUnit(String timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder useFixedDelay(
boolean useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*/
default GoogleSheetsStreamEndpointBuilder useFixedDelay(
String useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* OAuth 2 access token. This typically expires after an hour so
* refreshToken is recommended for long term usage.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GoogleSheetsStreamEndpointBuilder accessToken(String accessToken) {
doSetProperty("accessToken", accessToken);
return this;
}
/**
* Client secret of the sheets application.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GoogleSheetsStreamEndpointBuilder clientSecret(
String clientSecret) {
doSetProperty("clientSecret", clientSecret);
return this;
}
/**
* OAuth 2 refresh token. Using this, the Google Calendar component can
* obtain a new accessToken whenever the current one expires - a
* necessity if the application is long-lived.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*/
default GoogleSheetsStreamEndpointBuilder refreshToken(
String refreshToken) {
doSetProperty("refreshToken", refreshToken);
return this;
}
}
/**
* Advanced builder for endpoint for the Google Sheets Stream component.
*/
public interface AdvancedGoogleSheetsStreamEndpointBuilder
extends
EndpointConsumerBuilder {
default GoogleSheetsStreamEndpointBuilder basic() {
return (GoogleSheetsStreamEndpointBuilder) this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*/
default AdvancedGoogleSheetsStreamEndpointBuilder exceptionHandler(
ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedGoogleSheetsStreamEndpointBuilder exceptionHandler(
String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedGoogleSheetsStreamEndpointBuilder exchangePattern(
ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedGoogleSheetsStreamEndpointBuilder exchangePattern(
String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option is a:
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedGoogleSheetsStreamEndpointBuilder pollStrategy(
PollingConsumerPollStrategy pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
/**
* A pluggable org.apache.camel.PollingConsumerPollingStrategy allowing
* you to provide your custom implementation to control error handling
* usually occurred during the poll operation before an Exchange have
* been created and being routed in Camel.
*
* The option will be converted to a
* <code>org.apache.camel.spi.PollingConsumerPollStrategy</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedGoogleSheetsStreamEndpointBuilder pollStrategy(
String pollStrategy) {
doSetProperty("pollStrategy", pollStrategy);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGoogleSheetsStreamEndpointBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGoogleSheetsStreamEndpointBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGoogleSheetsStreamEndpointBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedGoogleSheetsStreamEndpointBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
public interface GoogleSheetsStreamBuilders {
/**
* Google Sheets Stream (camel-google-sheets)
* Poll for changes in Google Sheets.
*
* Category: api,cloud,sheets
* Since: 2.23
* Maven coordinates: org.apache.camel:camel-google-sheets
*
* Syntax: <code>google-sheets-stream:apiName</code>
*
* Path parameter: apiName (required)
* Sets the apiName.
*
* @param path apiName
*/
default GoogleSheetsStreamEndpointBuilder googleSheetsStream(String path) {
return GoogleSheetsStreamEndpointBuilderFactory.endpointBuilder("google-sheets-stream", path);
}
/**
* Google Sheets Stream (camel-google-sheets)
* Poll for changes in Google Sheets.
*
* Category: api,cloud,sheets
* Since: 2.23
* Maven coordinates: org.apache.camel:camel-google-sheets
*
* Syntax: <code>google-sheets-stream:apiName</code>
*
* Path parameter: apiName (required)
* Sets the apiName.
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path apiName
*/
default GoogleSheetsStreamEndpointBuilder googleSheetsStream(
String componentName,
String path) {
return GoogleSheetsStreamEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static GoogleSheetsStreamEndpointBuilder endpointBuilder(
String componentName,
String path) {
class GoogleSheetsStreamEndpointBuilderImpl extends AbstractEndpointBuilder implements GoogleSheetsStreamEndpointBuilder, AdvancedGoogleSheetsStreamEndpointBuilder {
public GoogleSheetsStreamEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new GoogleSheetsStreamEndpointBuilderImpl(path);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.tests;
import com.google.common.collect.ImmutableMap;
import io.prestosql.spi.type.VarcharType;
import io.prestosql.testing.MaterializedResult;
import org.testng.annotations.Test;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.DoubleType.DOUBLE;
import static io.prestosql.spi.type.VarcharType.VARCHAR;
import static io.prestosql.spi.type.VarcharType.createVarcharType;
import static io.prestosql.testing.MaterializedResult.resultBuilder;
import static io.prestosql.testing.assertions.Assert.assertEquals;
import static io.prestosql.tests.QueryAssertions.assertEqualsIgnoreOrder;
import static io.prestosql.tests.StructuralTestUtil.mapType;
public class AbstractTestWindowQueries
extends AbstractTestQueryFramework
{
public AbstractTestWindowQueries(QueryRunnerSupplier supplier)
{
super(supplier);
}
@Test
public void testRowFieldAccessorInWindowFunction()
{
assertQuery("SELECT a.col0, " +
"SUM(a.col1[1].col1) OVER(PARTITION BY a.col2.col0), " +
"SUM(a.col2.col1) OVER(PARTITION BY a.col2.col0) FROM " +
"(VALUES " +
"ROW(CAST(ROW(1.0, ARRAY[row(31, 14.5E0), row(12, 4.2E0)], row(3, 4.0E0)) AS ROW(col0 double, col1 array(ROW(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " +
"ROW(CAST(ROW(2.2, ARRAY[row(41, 13.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(ROW(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " +
"ROW(CAST(ROW(2.2, ARRAY[row(41, 17.1E0), row(45, 4.2E0)], row(7, 16.0E0)) AS ROW(col0 double, col1 array(ROW(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " +
"ROW(CAST(ROW(2.2, ARRAY[row(41, 13.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(ROW(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " +
"ROW(CAST(ROW(3.1, ARRAY[row(41, 13.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(ROW(col0 integer, col1 double)), col2 row(col0 integer, col1 double))))) t(a) ",
"SELECT * FROM VALUES (1.0, 14.5, 4.0), (2.2, 39.3, 18.0), (2.2, 39.3, 18.0), (2.2, 17.1, 16.0), (3.1, 39.3, 18.0)");
assertQuery("SELECT a.col1[1].col0, " +
"SUM(a.col0) OVER(PARTITION BY a.col1[1].col0), " +
"SUM(a.col1[1].col1) OVER(PARTITION BY a.col1[1].col0), " +
"SUM(a.col2.col1) OVER(PARTITION BY a.col1[1].col0) FROM " +
"(VALUES " +
"ROW(CAST(ROW(1.0, ARRAY[row(31, 14.5E0), row(12, 4.2E0)], row(3, 4.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " +
"ROW(CAST(ROW(3.1, ARRAY[row(41, 13.1E0), row(32, 4.2E0)], row(6, 6.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double)))), " +
"ROW(CAST(ROW(2.2, ARRAY[row(31, 14.2E0), row(22, 5.2E0)], row(5, 4.0E0)) AS ROW(col0 double, col1 array(row(col0 integer, col1 double)), col2 row(col0 integer, col1 double))))) t(a) " +
"WHERE a.col1[2].col1 > a.col2.col0",
"SELECT * FROM VALUES (31, 3.2, 28.7, 8.0), (31, 3.2, 28.7, 8.0)");
}
@Test
public void testDistinctWindow()
{
MaterializedResult actual = computeActual(
"SELECT RANK() OVER (PARTITION BY orderdate ORDER BY COUNT(DISTINCT clerk)) rnk " +
"FROM orders " +
"GROUP BY orderdate, custkey " +
"ORDER BY rnk " +
"LIMIT 1");
MaterializedResult expected = resultBuilder(getSession(), BIGINT).row(1L).build();
assertEquals(actual, expected);
}
@Test
public void testGroupingInWindowFunction()
{
assertQuery(
"SELECT orderkey, custkey, sum(totalprice), grouping(orderkey)+grouping(custkey) AS g, " +
" rank() OVER (PARTITION BY grouping(orderkey)+grouping(custkey), " +
" CASE WHEN grouping(orderkey) = 0 THEN custkey END ORDER BY orderkey ASC) AS r " +
"FROM orders " +
"GROUP BY ROLLUP (orderkey, custkey) " +
"ORDER BY orderkey, custkey " +
"LIMIT 10",
"VALUES (1, 370, 172799.49, 0, 1), " +
" (1, NULL, 172799.49, 1, 1), " +
" (2, 781, 38426.09, 0, 1), " +
" (2, NULL, 38426.09, 1, 2), " +
" (3, 1234, 205654.30, 0, 1), " +
" (3, NULL, 205654.30, 1, 3), " +
" (4, 1369, 56000.91, 0, 1), " +
" (4, NULL, 56000.91, 1, 4), " +
" (5, 445, 105367.67, 0, 1), " +
" (5, NULL, 105367.67, 1, 5)");
}
@Test
public void testWindowImplicitCoercion()
{
assertQueryOrdered(
"SELECT orderkey, 1e0 / row_number() OVER (ORDER BY orderkey) FROM orders LIMIT 2",
"VALUES (1, 1.0), (2, 0.5)");
}
@Test
public void testWindowsSameOrdering()
{
MaterializedResult actual = computeActual("SELECT " +
"sum(quantity) OVER(PARTITION BY suppkey ORDER BY orderkey)," +
"min(tax) OVER(PARTITION BY suppkey ORDER BY shipdate)" +
"FROM lineitem " +
"ORDER BY 1 " +
"LIMIT 10");
MaterializedResult expected = resultBuilder(getSession(), DOUBLE, DOUBLE)
.row(1.0, 0.0)
.row(2.0, 0.0)
.row(2.0, 0.0)
.row(3.0, 0.0)
.row(3.0, 0.0)
.row(4.0, 0.0)
.row(4.0, 0.0)
.row(5.0, 0.0)
.row(5.0, 0.0)
.row(5.0, 0.0)
.build();
assertEquals(actual, expected);
}
@Test
public void testWindowsPrefixPartitioning()
{
MaterializedResult actual = computeActual("SELECT " +
"max(tax) OVER(PARTITION BY suppkey, tax ORDER BY receiptdate)," +
"sum(quantity) OVER(PARTITION BY suppkey ORDER BY orderkey)" +
"FROM lineitem " +
"ORDER BY 2, 1 " +
"LIMIT 10");
MaterializedResult expected = resultBuilder(getSession(), DOUBLE, DOUBLE)
.row(0.06, 1.0)
.row(0.02, 2.0)
.row(0.06, 2.0)
.row(0.02, 3.0)
.row(0.08, 3.0)
.row(0.03, 4.0)
.row(0.03, 4.0)
.row(0.02, 5.0)
.row(0.03, 5.0)
.row(0.07, 5.0)
.build();
assertEquals(actual, expected);
}
@Test
public void testWindowsDifferentPartitions()
{
MaterializedResult actual = computeActual("SELECT " +
"sum(quantity) OVER(PARTITION BY suppkey ORDER BY orderkey)," +
"count(discount) OVER(PARTITION BY partkey ORDER BY receiptdate)," +
"min(tax) OVER(PARTITION BY suppkey, tax ORDER BY receiptdate)" +
"FROM lineitem " +
"ORDER BY 1, 2 " +
"LIMIT 10");
MaterializedResult expected = resultBuilder(getSession(), DOUBLE, BIGINT, DOUBLE)
.row(1.0, 10L, 0.06)
.row(2.0, 4L, 0.06)
.row(2.0, 16L, 0.02)
.row(3.0, 3L, 0.08)
.row(3.0, 38L, 0.02)
.row(4.0, 10L, 0.03)
.row(4.0, 10L, 0.03)
.row(5.0, 9L, 0.03)
.row(5.0, 13L, 0.07)
.row(5.0, 15L, 0.02)
.build();
assertEquals(actual, expected);
}
@Test
public void testWindowsConstantExpression()
{
assertQueryOrdered(
"SELECT " +
"sum(size) OVER(PARTITION BY type ORDER BY brand)," +
"lag(partkey, 1) OVER(PARTITION BY type ORDER BY name)" +
"FROM part " +
"ORDER BY 1, 2 " +
"LIMIT 10",
"VALUES " +
"(1, 315), " +
"(1, 881), " +
"(1, 1009), " +
"(3, 1087), " +
"(3, 1187), " +
"(3, 1529), " +
"(4, 969), " +
"(5, 151), " +
"(5, 505), " +
"(5, 872)");
}
@Test
public void testDependentWindows()
{
// For such query as below generated plan has two adjacent window nodes where second depends on output of first.
String sql = "WITH " +
"t1 AS (" +
"SELECT extendedprice FROM lineitem ORDER BY orderkey, partkey LIMIT 2)," +
"t2 AS (" +
"SELECT extendedprice, sum(extendedprice) OVER() AS x FROM t1)," +
"t3 AS (" +
"SELECT max(x) OVER() FROM t2) " +
"SELECT * FROM t3";
assertQuery(sql, "VALUES 59645.36, 59645.36");
}
@Test
public void testWindowFunctionWithoutParameters()
{
MaterializedResult actual = computeActual("SELECT count() over(partition by custkey) FROM orders WHERE custkey < 3 ORDER BY custkey");
MaterializedResult expected = resultBuilder(getSession(), BIGINT)
.row(9L)
.row(9L)
.row(9L)
.row(9L)
.row(9L)
.row(9L)
.row(9L)
.row(9L)
.row(9L)
.row(10L)
.row(10L)
.row(10L)
.row(10L)
.row(10L)
.row(10L)
.row(10L)
.row(10L)
.row(10L)
.row(10L)
.build();
assertEquals(actual, expected);
}
@Test
public void testWindowFunctionWithImplicitCoercion()
{
assertQuery("SELECT *, 1.0 * sum(x) OVER () FROM (VALUES 1) t(x)", "SELECT 1, 1.0");
}
@SuppressWarnings("PointlessArithmeticExpression")
@Test
public void testWindowFunctionsExpressions()
{
assertQueryOrdered(
"SELECT orderkey, orderstatus " +
", row_number() OVER (ORDER BY orderkey * 2) * " +
" row_number() OVER (ORDER BY orderkey DESC) + 100 " +
"FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) x " +
"ORDER BY orderkey LIMIT 5",
"VALUES " +
"(1, 'O', 110), " +
"(2, 'O', 118), " +
"(3, 'F', 124), " +
"(4, 'O', 128), " +
"(5, 'F', 130)");
}
@Test
public void testWindowFunctionsFromAggregate()
{
MaterializedResult actual = computeActual("" +
"SELECT * FROM (\n" +
" SELECT orderstatus, clerk, sales\n" +
" , rank() OVER (PARTITION BY x.orderstatus ORDER BY sales DESC) rnk\n" +
" FROM (\n" +
" SELECT orderstatus, clerk, sum(totalprice) sales\n" +
" FROM orders\n" +
" GROUP BY orderstatus, clerk\n" +
" ) x\n" +
") x\n" +
"WHERE rnk <= 2\n" +
"ORDER BY orderstatus, rnk");
MaterializedResult expected = resultBuilder(getSession(), VARCHAR, VARCHAR, DOUBLE, BIGINT)
.row("F", "Clerk#000000090", 2784836.61, 1L)
.row("F", "Clerk#000000084", 2674447.15, 2L)
.row("O", "Clerk#000000500", 2569878.29, 1L)
.row("O", "Clerk#000000050", 2500162.92, 2L)
.row("P", "Clerk#000000071", 841820.99, 1L)
.row("P", "Clerk#000001000", 643679.49, 2L)
.build();
assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows());
}
@Test
public void testOrderByWindowFunction()
{
assertQueryOrdered(
"SELECT orderkey, row_number() OVER (ORDER BY orderkey) " +
"FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) " +
"ORDER BY 2 DESC " +
"LIMIT 5",
"VALUES (34, 10), " +
"(33, 9), " +
"(32, 8), " +
"(7, 7), " +
"(6, 6)");
}
@Test
public void testSameWindowFunctionsTwoCoerces()
{
MaterializedResult actual = computeActual("" +
"SELECT 12.0E0 * row_number() OVER ()/row_number() OVER(),\n" +
"row_number() OVER()\n" +
"FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10)\n" +
"ORDER BY 2 DESC\n" +
"LIMIT 5");
MaterializedResult expected = resultBuilder(getSession(), DOUBLE, BIGINT)
.row(12.0, 10L)
.row(12.0, 9L)
.row(12.0, 8L)
.row(12.0, 7L)
.row(12.0, 6L)
.build();
assertEquals(actual, expected);
actual = computeActual("" +
"SELECT (MAX(x.a) OVER () - x.a) * 100.0E0 / MAX(x.a) OVER ()\n" +
"FROM (VALUES 1, 2, 3, 4) x(a)");
expected = resultBuilder(getSession(), DOUBLE)
.row(75.0)
.row(50.0)
.row(25.0)
.row(0.0)
.build();
assertEquals(actual, expected);
}
@Test
public void testWindowMapAgg()
{
MaterializedResult actual = computeActual("" +
"SELECT map_agg(orderkey, orderpriority) OVER(PARTITION BY orderstatus) FROM\n" +
"(SELECT * FROM orders ORDER BY orderkey LIMIT 5) t");
MaterializedResult expected = resultBuilder(getSession(), mapType(BIGINT, VarcharType.createVarcharType(1)))
.row(ImmutableMap.of(1L, "5-LOW", 2L, "1-URGENT", 4L, "5-LOW"))
.row(ImmutableMap.of(1L, "5-LOW", 2L, "1-URGENT", 4L, "5-LOW"))
.row(ImmutableMap.of(1L, "5-LOW", 2L, "1-URGENT", 4L, "5-LOW"))
.row(ImmutableMap.of(3L, "5-LOW", 5L, "5-LOW"))
.row(ImmutableMap.of(3L, "5-LOW", 5L, "5-LOW"))
.build();
assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows());
}
@Test
public void testWindowPropertyDerivation()
{
assertQuery(
"SELECT orderstatus, orderkey, " +
"SUM(s) OVER (PARTITION BY orderstatus), " +
"SUM(s) OVER (PARTITION BY orderstatus, orderkey), " +
"SUM(s) OVER (PARTITION BY orderstatus ORDER BY orderkey), " +
"SUM(s) OVER (ORDER BY orderstatus, orderkey) " +
"FROM ( " +
" SELECT orderkey, orderstatus, SUM(orderkey) OVER (ORDER BY orderstatus, orderkey) s " +
" FROM ( " +
" SELECT * FROM orders ORDER BY orderkey LIMIT 10 " +
" ) " +
")",
"VALUES " +
"('F', 3, 72, 3, 3, 3), " +
"('F', 5, 72, 8, 11, 11), " +
"('F', 6, 72, 14, 25, 25), " +
"('F', 33, 72, 47, 72, 72), " +
"('O', 1, 433, 48, 48, 120), " +
"('O', 2, 433, 50, 98, 170), " +
"('O', 4, 433, 54, 152, 224), " +
"('O', 7, 433, 61, 213, 285), " +
"('O', 32, 433, 93, 306, 378), " +
"('O', 34, 433, 127, 433, 505)");
}
@Test
public void testWindowFunctionWithGroupBy()
{
MaterializedResult actual = computeActual("" +
"SELECT *, rank() OVER (PARTITION BY x)\n" +
"FROM (SELECT 'foo' x)\n" +
"GROUP BY 1");
MaterializedResult expected = resultBuilder(getSession(), createVarcharType(3), BIGINT)
.row("foo", 1L)
.build();
assertEquals(actual, expected);
}
@Test
public void testPartialPrePartitionedWindowFunction()
{
assertQueryOrdered("" +
"SELECT orderkey, COUNT(*) OVER (PARTITION BY orderkey, custkey) " +
"FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) " +
"ORDER BY orderkey LIMIT 5",
"VALUES (1, 1), " +
"(2, 1), " +
"(3, 1), " +
"(4, 1), " +
"(5, 1)");
}
@Test
public void testFullPrePartitionedWindowFunction()
{
assertQueryOrdered(
"SELECT orderkey, COUNT(*) OVER (PARTITION BY orderkey) " +
"FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) " +
"ORDER BY orderkey LIMIT 5",
"VALUES (1, 1), (2, 1), (3, 1), (4, 1), (5, 1)");
}
@Test
public void testPartialPreSortedWindowFunction()
{
assertQueryOrdered(
"SELECT orderkey, COUNT(*) OVER (ORDER BY orderkey, custkey) " +
"FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) " +
"ORDER BY orderkey LIMIT 5",
"VALUES (1, 1), " +
"(2, 2), " +
"(3, 3), " +
"(4, 4), " +
"(5, 5)");
}
@Test
public void testFullPreSortedWindowFunction()
{
assertQueryOrdered(
"SELECT orderkey, COUNT(*) OVER (ORDER BY orderkey) " +
"FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) " +
"ORDER BY orderkey LIMIT 5",
"VALUES (1, 1), (2, 2), (3, 3), (4, 4), (5, 5)");
}
@Test
public void testFullyPartitionedAndPartiallySortedWindowFunction()
{
assertQueryOrdered(
"SELECT orderkey, custkey, orderPriority, COUNT(*) OVER (PARTITION BY orderkey ORDER BY custkey, orderPriority) " +
"FROM (SELECT * FROM orders ORDER BY orderkey, custkey LIMIT 10) " +
"ORDER BY orderkey LIMIT 5",
"VALUES (1, 370, '5-LOW', 1), " +
"(2, 781, '1-URGENT', 1), " +
"(3, 1234, '5-LOW', 1), " +
"(4, 1369, '5-LOW', 1), " +
"(5, 445, '5-LOW', 1)");
}
@Test
public void testFullyPartitionedAndFullySortedWindowFunction()
{
assertQueryOrdered(
"SELECT orderkey, custkey, COUNT(*) OVER (PARTITION BY orderkey ORDER BY custkey) " +
"FROM (SELECT * FROM orders ORDER BY orderkey, custkey LIMIT 10) " +
"ORDER BY orderkey LIMIT 5",
"VALUES (1, 370, 1), " +
"(2, 781, 1), " +
"(3, 1234, 1), " +
"(4, 1369, 1), " +
"(5, 445, 1)");
}
@Test
public void testOrderByWindowFunctionWithNulls()
{
// Nulls first
assertQueryOrdered(
"SELECT orderkey, row_number() OVER (ORDER BY nullif(orderkey, 3) NULLS FIRST) " +
"FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) " +
"ORDER BY 2 ASC " +
"LIMIT 5",
"VALUES (3, 1), " +
"(1, 2), " +
"(2, 3), " +
"(4, 4)," +
"(5, 5)");
// Nulls last
String nullsLastExpected = "VALUES (3, 10), " +
"(34, 9), " +
"(33, 8), " +
"(32, 7), " +
"(7, 6)";
assertQueryOrdered(
"SELECT orderkey, row_number() OVER (ORDER BY nullif(orderkey, 3) NULLS LAST) " +
"FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) " +
"ORDER BY 2 DESC " +
"LIMIT 5",
nullsLastExpected);
// and nulls last should be the default
assertQueryOrdered(
"SELECT orderkey, row_number() OVER (ORDER BY nullif(orderkey, 3)) " +
"FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) " +
"ORDER BY 2 DESC " +
"LIMIT 5",
nullsLastExpected);
}
@Test
public void testValueWindowFunctions()
{
assertQueryOrdered(
"SELECT * FROM ( " +
" SELECT orderkey, orderstatus " +
" , first_value(orderkey + 1000) OVER (PARTITION BY orderstatus ORDER BY orderkey) fvalue " +
" , nth_value(orderkey + 1000, 2) OVER (PARTITION BY orderstatus ORDER BY orderkey " +
" ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) nvalue " +
" FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) x " +
" ) x " +
"ORDER BY orderkey LIMIT 5",
"VALUES " +
"(1, 'O', 1001, 1002), " +
"(2, 'O', 1001, 1002), " +
"(3, 'F', 1003, 1005), " +
"(4, 'O', 1001, 1002), " +
"(5, 'F', 1003, 1005)");
}
@Test
public void testWindowFrames()
{
MaterializedResult actual = computeActual("SELECT * FROM (\n" +
" SELECT orderkey, orderstatus\n" +
" , sum(orderkey + 1000) OVER (PARTITION BY orderstatus ORDER BY orderkey\n" +
" ROWS BETWEEN mod(custkey, 2) PRECEDING AND custkey / 500 FOLLOWING)\n" +
" FROM (SELECT * FROM orders ORDER BY orderkey LIMIT 10) x\n" +
" ) x\n" +
"ORDER BY orderkey LIMIT 5");
MaterializedResult expected = resultBuilder(getSession(), BIGINT, VARCHAR, BIGINT)
.row(1L, "O", 1001L)
.row(2L, "O", 3007L)
.row(3L, "F", 3014L)
.row(4L, "O", 4045L)
.row(5L, "F", 2008L)
.build();
assertEquals(actual.getMaterializedRows(), expected.getMaterializedRows());
}
@Test
public void testWindowNoChannels()
{
MaterializedResult actual = computeActual("SELECT rank() OVER ()\n" +
"FROM (SELECT * FROM orders LIMIT 10)\n" +
"LIMIT 3");
MaterializedResult expected = resultBuilder(getSession(), BIGINT)
.row(1L)
.row(1L)
.row(1L)
.build();
assertEquals(actual, expected);
}
@Test
public void testDuplicateColumnsInWindowOrderByClause()
{
MaterializedResult actual = computeActual("SELECT a, row_number() OVER (ORDER BY a ASC, a DESC) FROM (VALUES 3, 2, 1) t(a)");
MaterializedResult expected = resultBuilder(getSession(), BIGINT, BIGINT)
.row(1, 1L)
.row(2, 2L)
.row(3, 3L)
.build();
assertEqualsIgnoreOrder(actual, expected);
}
}
| |
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.config.annotation.web.configurers;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationListener;
import org.springframework.context.event.GenericApplicationListenerAdapter;
import org.springframework.context.event.SmartApplicationListener;
import org.springframework.security.authentication.AuthenticationTrustResolver;
import org.springframework.security.config.Customizer;
import org.springframework.security.config.annotation.web.HttpSecurityBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.context.DelegatingApplicationListener;
import org.springframework.security.core.session.SessionRegistry;
import org.springframework.security.core.session.SessionRegistryImpl;
import org.springframework.security.web.authentication.AuthenticationFailureHandler;
import org.springframework.security.web.authentication.SimpleUrlAuthenticationFailureHandler;
import org.springframework.security.web.authentication.logout.LogoutHandler;
import org.springframework.security.web.authentication.session.ChangeSessionIdAuthenticationStrategy;
import org.springframework.security.web.authentication.session.CompositeSessionAuthenticationStrategy;
import org.springframework.security.web.authentication.session.ConcurrentSessionControlAuthenticationStrategy;
import org.springframework.security.web.authentication.session.NullAuthenticatedSessionStrategy;
import org.springframework.security.web.authentication.session.RegisterSessionAuthenticationStrategy;
import org.springframework.security.web.authentication.session.SessionAuthenticationStrategy;
import org.springframework.security.web.authentication.session.SessionFixationProtectionStrategy;
import org.springframework.security.web.context.HttpSessionSecurityContextRepository;
import org.springframework.security.web.context.NullSecurityContextRepository;
import org.springframework.security.web.context.SecurityContextRepository;
import org.springframework.security.web.savedrequest.NullRequestCache;
import org.springframework.security.web.savedrequest.RequestCache;
import org.springframework.security.web.session.ConcurrentSessionFilter;
import org.springframework.security.web.session.InvalidSessionStrategy;
import org.springframework.security.web.session.SessionInformationExpiredStrategy;
import org.springframework.security.web.session.SessionManagementFilter;
import org.springframework.security.web.session.SimpleRedirectInvalidSessionStrategy;
import org.springframework.security.web.session.SimpleRedirectSessionInformationExpiredStrategy;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
/**
* Allows configuring session management.
*
* <h2>Security Filters</h2>
*
* The following Filters are populated
*
* <ul>
* <li>{@link SessionManagementFilter}</li>
* <li>{@link ConcurrentSessionFilter} if there are restrictions on how many concurrent
* sessions a user can have</li>
* </ul>
*
* <h2>Shared Objects Created</h2>
*
* The following shared objects are created:
*
* <ul>
* <li>{@link RequestCache}</li>
* <li>{@link SecurityContextRepository}</li>
* <li>{@link SessionManagementConfigurer}</li>
* <li>{@link InvalidSessionStrategy}</li>
* </ul>
*
* <h2>Shared Objects Used</h2>
*
* <ul>
* <li>{@link SecurityContextRepository}</li>
* <li>{@link AuthenticationTrustResolver} is optionally used to populate the
* {@link HttpSessionSecurityContextRepository} and {@link SessionManagementFilter}</li>
* </ul>
*
* @author Rob Winch
* @author Onur Kagan Ozcan
* @since 3.2
* @see SessionManagementFilter
* @see ConcurrentSessionFilter
*/
public final class SessionManagementConfigurer<H extends HttpSecurityBuilder<H>>
extends AbstractHttpConfigurer<SessionManagementConfigurer<H>, H> {
private final SessionAuthenticationStrategy DEFAULT_SESSION_FIXATION_STRATEGY = createDefaultSessionFixationProtectionStrategy();
private SessionAuthenticationStrategy sessionFixationAuthenticationStrategy = this.DEFAULT_SESSION_FIXATION_STRATEGY;
private SessionAuthenticationStrategy sessionAuthenticationStrategy;
private SessionAuthenticationStrategy providedSessionAuthenticationStrategy;
private InvalidSessionStrategy invalidSessionStrategy;
private SessionInformationExpiredStrategy expiredSessionStrategy;
private List<SessionAuthenticationStrategy> sessionAuthenticationStrategies = new ArrayList<>();
private SessionRegistry sessionRegistry;
private Integer maximumSessions;
private String expiredUrl;
private boolean maxSessionsPreventsLogin;
private SessionCreationPolicy sessionPolicy;
private boolean enableSessionUrlRewriting;
private String invalidSessionUrl;
private String sessionAuthenticationErrorUrl;
private AuthenticationFailureHandler sessionAuthenticationFailureHandler;
/**
* Creates a new instance
* @see HttpSecurity#sessionManagement()
*/
public SessionManagementConfigurer() {
}
/**
* Setting this attribute will inject the {@link SessionManagementFilter} with a
* {@link SimpleRedirectInvalidSessionStrategy} configured with the attribute value.
* When an invalid session ID is submitted, the strategy will be invoked, redirecting
* to the configured URL.
* @param invalidSessionUrl the URL to redirect to when an invalid session is detected
* @return the {@link SessionManagementConfigurer} for further customization
*/
public SessionManagementConfigurer<H> invalidSessionUrl(String invalidSessionUrl) {
this.invalidSessionUrl = invalidSessionUrl;
return this;
}
/**
* Setting this attribute will inject the provided invalidSessionStrategy into the
* {@link SessionManagementFilter}. When an invalid session ID is submitted, the
* strategy will be invoked, redirecting to the configured URL.
* @param invalidSessionStrategy the strategy to use when an invalid session ID is
* submitted.
* @return the {@link SessionManagementConfigurer} for further customization
*/
public SessionManagementConfigurer<H> invalidSessionStrategy(InvalidSessionStrategy invalidSessionStrategy) {
Assert.notNull(invalidSessionStrategy, "invalidSessionStrategy");
this.invalidSessionStrategy = invalidSessionStrategy;
return this;
}
/**
* Defines the URL of the error page which should be shown when the
* SessionAuthenticationStrategy raises an exception. If not set, an unauthorized
* (402) error code will be returned to the client. Note that this attribute doesn't
* apply if the error occurs during a form-based login, where the URL for
* authentication failure will take precedence.
* @param sessionAuthenticationErrorUrl the URL to redirect to
* @return the {@link SessionManagementConfigurer} for further customization
*/
public SessionManagementConfigurer<H> sessionAuthenticationErrorUrl(String sessionAuthenticationErrorUrl) {
this.sessionAuthenticationErrorUrl = sessionAuthenticationErrorUrl;
return this;
}
/**
* Defines the {@code AuthenticationFailureHandler} which will be used when the
* SessionAuthenticationStrategy raises an exception. If not set, an unauthorized
* (402) error code will be returned to the client. Note that this attribute doesn't
* apply if the error occurs during a form-based login, where the URL for
* authentication failure will take precedence.
* @param sessionAuthenticationFailureHandler the handler to use
* @return the {@link SessionManagementConfigurer} for further customization
*/
public SessionManagementConfigurer<H> sessionAuthenticationFailureHandler(
AuthenticationFailureHandler sessionAuthenticationFailureHandler) {
this.sessionAuthenticationFailureHandler = sessionAuthenticationFailureHandler;
return this;
}
/**
* If set to true, allows HTTP sessions to be rewritten in the URLs when using
* {@link HttpServletResponse#encodeRedirectURL(String)} or
* {@link HttpServletResponse#encodeURL(String)}, otherwise disallows HTTP sessions to
* be included in the URL. This prevents leaking information to external domains.
* @param enableSessionUrlRewriting true if should allow the JSESSIONID to be
* rewritten into the URLs, else false (default)
* @return the {@link SessionManagementConfigurer} for further customization
* @see HttpSessionSecurityContextRepository#setDisableUrlRewriting(boolean)
*/
public SessionManagementConfigurer<H> enableSessionUrlRewriting(boolean enableSessionUrlRewriting) {
this.enableSessionUrlRewriting = enableSessionUrlRewriting;
return this;
}
/**
* Allows specifying the {@link SessionCreationPolicy}
* @param sessionCreationPolicy the {@link SessionCreationPolicy} to use. Cannot be
* null.
* @return the {@link SessionManagementConfigurer} for further customizations
* @throws IllegalArgumentException if {@link SessionCreationPolicy} is null.
* @see SessionCreationPolicy
*/
public SessionManagementConfigurer<H> sessionCreationPolicy(SessionCreationPolicy sessionCreationPolicy) {
Assert.notNull(sessionCreationPolicy, "sessionCreationPolicy cannot be null");
this.sessionPolicy = sessionCreationPolicy;
return this;
}
/**
* Allows explicitly specifying the {@link SessionAuthenticationStrategy}. The default
* is to use {@link ChangeSessionIdAuthenticationStrategy}. If restricting the maximum
* number of sessions is configured, then
* {@link CompositeSessionAuthenticationStrategy} delegating to
* {@link ConcurrentSessionControlAuthenticationStrategy}, the default OR supplied
* {@code SessionAuthenticationStrategy} and
* {@link RegisterSessionAuthenticationStrategy}.
*
* <p>
* NOTE: Supplying a custom {@link SessionAuthenticationStrategy} will override the
* default session fixation strategy.
* @param sessionAuthenticationStrategy
* @return the {@link SessionManagementConfigurer} for further customizations
*/
public SessionManagementConfigurer<H> sessionAuthenticationStrategy(
SessionAuthenticationStrategy sessionAuthenticationStrategy) {
this.providedSessionAuthenticationStrategy = sessionAuthenticationStrategy;
return this;
}
/**
* Adds an additional {@link SessionAuthenticationStrategy} to be used within the
* {@link CompositeSessionAuthenticationStrategy}.
* @param sessionAuthenticationStrategy
* @return the {@link SessionManagementConfigurer} for further customizations
*/
SessionManagementConfigurer<H> addSessionAuthenticationStrategy(
SessionAuthenticationStrategy sessionAuthenticationStrategy) {
this.sessionAuthenticationStrategies.add(sessionAuthenticationStrategy);
return this;
}
/**
* Allows changing the default {@link SessionFixationProtectionStrategy}.
* @return the {@link SessionFixationConfigurer} for further customizations
*/
public SessionFixationConfigurer sessionFixation() {
return new SessionFixationConfigurer();
}
/**
* Allows configuring session fixation protection.
* @param sessionFixationCustomizer the {@link Customizer} to provide more options for
* the {@link SessionFixationConfigurer}
* @return the {@link SessionManagementConfigurer} for further customizations
*/
public SessionManagementConfigurer<H> sessionFixation(
Customizer<SessionFixationConfigurer> sessionFixationCustomizer) {
sessionFixationCustomizer.customize(new SessionFixationConfigurer());
return this;
}
/**
* Controls the maximum number of sessions for a user. The default is to allow any
* number of users.
* @param maximumSessions the maximum number of sessions for a user
* @return the {@link SessionManagementConfigurer} for further customizations
*/
public ConcurrencyControlConfigurer maximumSessions(int maximumSessions) {
this.maximumSessions = maximumSessions;
return new ConcurrencyControlConfigurer();
}
/**
* Controls the maximum number of sessions for a user. The default is to allow any
* number of users.
* @param sessionConcurrencyCustomizer the {@link Customizer} to provide more options
* for the {@link ConcurrencyControlConfigurer}
* @return the {@link SessionManagementConfigurer} for further customizations
*/
public SessionManagementConfigurer<H> sessionConcurrency(
Customizer<ConcurrencyControlConfigurer> sessionConcurrencyCustomizer) {
sessionConcurrencyCustomizer.customize(new ConcurrencyControlConfigurer());
return this;
}
/**
* Invokes {@link #postProcess(Object)} and sets the
* {@link SessionAuthenticationStrategy} for session fixation.
* @param sessionFixationAuthenticationStrategy
*/
private void setSessionFixationAuthenticationStrategy(
SessionAuthenticationStrategy sessionFixationAuthenticationStrategy) {
this.sessionFixationAuthenticationStrategy = postProcess(sessionFixationAuthenticationStrategy);
}
@Override
public void init(H http) {
SecurityContextRepository securityContextRepository = http.getSharedObject(SecurityContextRepository.class);
boolean stateless = isStateless();
if (securityContextRepository == null) {
if (stateless) {
http.setSharedObject(SecurityContextRepository.class, new NullSecurityContextRepository());
}
else {
HttpSessionSecurityContextRepository httpSecurityRepository = new HttpSessionSecurityContextRepository();
httpSecurityRepository.setDisableUrlRewriting(!this.enableSessionUrlRewriting);
httpSecurityRepository.setAllowSessionCreation(isAllowSessionCreation());
AuthenticationTrustResolver trustResolver = http.getSharedObject(AuthenticationTrustResolver.class);
if (trustResolver != null) {
httpSecurityRepository.setTrustResolver(trustResolver);
}
http.setSharedObject(SecurityContextRepository.class, httpSecurityRepository);
}
}
RequestCache requestCache = http.getSharedObject(RequestCache.class);
if (requestCache == null) {
if (stateless) {
http.setSharedObject(RequestCache.class, new NullRequestCache());
}
}
http.setSharedObject(SessionAuthenticationStrategy.class, getSessionAuthenticationStrategy(http));
http.setSharedObject(InvalidSessionStrategy.class, getInvalidSessionStrategy());
}
@Override
public void configure(H http) {
SecurityContextRepository securityContextRepository = http.getSharedObject(SecurityContextRepository.class);
SessionManagementFilter sessionManagementFilter = new SessionManagementFilter(securityContextRepository,
getSessionAuthenticationStrategy(http));
if (this.sessionAuthenticationErrorUrl != null) {
sessionManagementFilter.setAuthenticationFailureHandler(
new SimpleUrlAuthenticationFailureHandler(this.sessionAuthenticationErrorUrl));
}
InvalidSessionStrategy strategy = getInvalidSessionStrategy();
if (strategy != null) {
sessionManagementFilter.setInvalidSessionStrategy(strategy);
}
AuthenticationFailureHandler failureHandler = getSessionAuthenticationFailureHandler();
if (failureHandler != null) {
sessionManagementFilter.setAuthenticationFailureHandler(failureHandler);
}
AuthenticationTrustResolver trustResolver = http.getSharedObject(AuthenticationTrustResolver.class);
if (trustResolver != null) {
sessionManagementFilter.setTrustResolver(trustResolver);
}
sessionManagementFilter = postProcess(sessionManagementFilter);
http.addFilter(sessionManagementFilter);
if (isConcurrentSessionControlEnabled()) {
ConcurrentSessionFilter concurrentSessionFilter = createConcurrencyFilter(http);
concurrentSessionFilter = postProcess(concurrentSessionFilter);
http.addFilter(concurrentSessionFilter);
}
}
private ConcurrentSessionFilter createConcurrencyFilter(H http) {
SessionInformationExpiredStrategy expireStrategy = getExpiredSessionStrategy();
SessionRegistry sessionRegistry = getSessionRegistry(http);
ConcurrentSessionFilter concurrentSessionFilter = (expireStrategy != null)
? new ConcurrentSessionFilter(sessionRegistry, expireStrategy)
: new ConcurrentSessionFilter(sessionRegistry);
LogoutConfigurer<H> logoutConfigurer = http.getConfigurer(LogoutConfigurer.class);
if (logoutConfigurer != null) {
List<LogoutHandler> logoutHandlers = logoutConfigurer.getLogoutHandlers();
if (!CollectionUtils.isEmpty(logoutHandlers)) {
concurrentSessionFilter.setLogoutHandlers(logoutHandlers);
}
}
return concurrentSessionFilter;
}
/**
* Gets the {@link InvalidSessionStrategy} to use. If null and
* {@link #invalidSessionUrl} is not null defaults to
* {@link SimpleRedirectInvalidSessionStrategy}.
* @return the {@link InvalidSessionStrategy} to use
*/
InvalidSessionStrategy getInvalidSessionStrategy() {
if (this.invalidSessionStrategy != null) {
return this.invalidSessionStrategy;
}
if (this.invalidSessionUrl == null) {
return null;
}
this.invalidSessionStrategy = new SimpleRedirectInvalidSessionStrategy(this.invalidSessionUrl);
return this.invalidSessionStrategy;
}
SessionInformationExpiredStrategy getExpiredSessionStrategy() {
if (this.expiredSessionStrategy != null) {
return this.expiredSessionStrategy;
}
if (this.expiredUrl == null) {
return null;
}
this.expiredSessionStrategy = new SimpleRedirectSessionInformationExpiredStrategy(this.expiredUrl);
return this.expiredSessionStrategy;
}
AuthenticationFailureHandler getSessionAuthenticationFailureHandler() {
if (this.sessionAuthenticationFailureHandler != null) {
return this.sessionAuthenticationFailureHandler;
}
if (this.sessionAuthenticationErrorUrl == null) {
return null;
}
this.sessionAuthenticationFailureHandler = new SimpleUrlAuthenticationFailureHandler(
this.sessionAuthenticationErrorUrl);
return this.sessionAuthenticationFailureHandler;
}
/**
* Gets the {@link SessionCreationPolicy}. Can not be null.
* @return the {@link SessionCreationPolicy}
*/
SessionCreationPolicy getSessionCreationPolicy() {
if (this.sessionPolicy != null) {
return this.sessionPolicy;
}
SessionCreationPolicy sessionPolicy = getBuilder().getSharedObject(SessionCreationPolicy.class);
return (sessionPolicy != null) ? sessionPolicy : SessionCreationPolicy.IF_REQUIRED;
}
/**
* Returns true if the {@link SessionCreationPolicy} allows session creation, else
* false
* @return true if the {@link SessionCreationPolicy} allows session creation
*/
private boolean isAllowSessionCreation() {
SessionCreationPolicy sessionPolicy = getSessionCreationPolicy();
return SessionCreationPolicy.ALWAYS == sessionPolicy || SessionCreationPolicy.IF_REQUIRED == sessionPolicy;
}
/**
* Returns true if the {@link SessionCreationPolicy} is stateless
* @return
*/
private boolean isStateless() {
SessionCreationPolicy sessionPolicy = getSessionCreationPolicy();
return SessionCreationPolicy.STATELESS == sessionPolicy;
}
/**
* Gets the customized {@link SessionAuthenticationStrategy} if
* {@link #sessionAuthenticationStrategy(SessionAuthenticationStrategy)} was
* specified. Otherwise creates a default {@link SessionAuthenticationStrategy}.
* @return the {@link SessionAuthenticationStrategy} to use
*/
private SessionAuthenticationStrategy getSessionAuthenticationStrategy(H http) {
if (this.sessionAuthenticationStrategy != null) {
return this.sessionAuthenticationStrategy;
}
List<SessionAuthenticationStrategy> delegateStrategies = this.sessionAuthenticationStrategies;
SessionAuthenticationStrategy defaultSessionAuthenticationStrategy;
if (this.providedSessionAuthenticationStrategy == null) {
// If the user did not provide a SessionAuthenticationStrategy
// then default to sessionFixationAuthenticationStrategy
defaultSessionAuthenticationStrategy = postProcess(this.sessionFixationAuthenticationStrategy);
}
else {
defaultSessionAuthenticationStrategy = this.providedSessionAuthenticationStrategy;
}
if (isConcurrentSessionControlEnabled()) {
SessionRegistry sessionRegistry = getSessionRegistry(http);
ConcurrentSessionControlAuthenticationStrategy concurrentSessionControlStrategy = new ConcurrentSessionControlAuthenticationStrategy(
sessionRegistry);
concurrentSessionControlStrategy.setMaximumSessions(this.maximumSessions);
concurrentSessionControlStrategy.setExceptionIfMaximumExceeded(this.maxSessionsPreventsLogin);
concurrentSessionControlStrategy = postProcess(concurrentSessionControlStrategy);
RegisterSessionAuthenticationStrategy registerSessionStrategy = new RegisterSessionAuthenticationStrategy(
sessionRegistry);
registerSessionStrategy = postProcess(registerSessionStrategy);
delegateStrategies.addAll(Arrays.asList(concurrentSessionControlStrategy,
defaultSessionAuthenticationStrategy, registerSessionStrategy));
}
else {
delegateStrategies.add(defaultSessionAuthenticationStrategy);
}
this.sessionAuthenticationStrategy = postProcess(
new CompositeSessionAuthenticationStrategy(delegateStrategies));
return this.sessionAuthenticationStrategy;
}
private SessionRegistry getSessionRegistry(H http) {
if (this.sessionRegistry == null) {
this.sessionRegistry = getBeanOrNull(SessionRegistry.class);
}
if (this.sessionRegistry == null) {
SessionRegistryImpl sessionRegistry = new SessionRegistryImpl();
registerDelegateApplicationListener(http, sessionRegistry);
this.sessionRegistry = sessionRegistry;
}
return this.sessionRegistry;
}
private void registerDelegateApplicationListener(H http, ApplicationListener<?> delegate) {
DelegatingApplicationListener delegating = getBeanOrNull(DelegatingApplicationListener.class);
if (delegating == null) {
return;
}
SmartApplicationListener smartListener = new GenericApplicationListenerAdapter(delegate);
delegating.addListener(smartListener);
}
/**
* Returns true if the number of concurrent sessions per user should be restricted.
* @return
*/
private boolean isConcurrentSessionControlEnabled() {
return this.maximumSessions != null;
}
/**
* Creates the default {@link SessionAuthenticationStrategy} for session fixation
* @return the default {@link SessionAuthenticationStrategy} for session fixation
*/
private static SessionAuthenticationStrategy createDefaultSessionFixationProtectionStrategy() {
return new ChangeSessionIdAuthenticationStrategy();
}
private <T> T getBeanOrNull(Class<T> type) {
ApplicationContext context = getBuilder().getSharedObject(ApplicationContext.class);
if (context == null) {
return null;
}
try {
return context.getBean(type);
}
catch (NoSuchBeanDefinitionException ex) {
return null;
}
}
/**
* Allows configuring SessionFixation protection
*
* @author Rob Winch
*/
public final class SessionFixationConfigurer {
/**
* Specifies that a new session should be created, but the session attributes from
* the original {@link HttpSession} should not be retained.
* @return the {@link SessionManagementConfigurer} for further customizations
*/
public SessionManagementConfigurer<H> newSession() {
SessionFixationProtectionStrategy sessionFixationProtectionStrategy = new SessionFixationProtectionStrategy();
sessionFixationProtectionStrategy.setMigrateSessionAttributes(false);
setSessionFixationAuthenticationStrategy(sessionFixationProtectionStrategy);
return SessionManagementConfigurer.this;
}
/**
* Specifies that a new session should be created and the session attributes from
* the original {@link HttpSession} should be retained.
* @return the {@link SessionManagementConfigurer} for further customizations
*/
public SessionManagementConfigurer<H> migrateSession() {
setSessionFixationAuthenticationStrategy(new SessionFixationProtectionStrategy());
return SessionManagementConfigurer.this;
}
/**
* Specifies that the Servlet container-provided session fixation protection
* should be used. When a session authenticates, the Servlet method
* {@code HttpServletRequest#changeSessionId()} is called to change the session ID
* and retain all session attributes.
* @return the {@link SessionManagementConfigurer} for further customizations
*/
public SessionManagementConfigurer<H> changeSessionId() {
setSessionFixationAuthenticationStrategy(new ChangeSessionIdAuthenticationStrategy());
return SessionManagementConfigurer.this;
}
/**
* Specifies that no session fixation protection should be enabled. This may be
* useful when utilizing other mechanisms for protecting against session fixation.
* For example, if application container session fixation protection is already in
* use. Otherwise, this option is not recommended.
* @return the {@link SessionManagementConfigurer} for further customizations
*/
public SessionManagementConfigurer<H> none() {
setSessionFixationAuthenticationStrategy(new NullAuthenticatedSessionStrategy());
return SessionManagementConfigurer.this;
}
}
/**
* Allows configuring controlling of multiple sessions.
*
* @author Rob Winch
*/
public final class ConcurrencyControlConfigurer {
private ConcurrencyControlConfigurer() {
}
/**
* Controls the maximum number of sessions for a user. The default is to allow any
* number of users.
* @param maximumSessions the maximum number of sessions for a user
* @return the {@link ConcurrencyControlConfigurer} for further customizations
*/
public ConcurrencyControlConfigurer maximumSessions(int maximumSessions) {
SessionManagementConfigurer.this.maximumSessions = maximumSessions;
return this;
}
/**
* The URL to redirect to if a user tries to access a resource and their session
* has been expired due to too many sessions for the current user. The default is
* to write a simple error message to the response.
* @param expiredUrl the URL to redirect to
* @return the {@link ConcurrencyControlConfigurer} for further customizations
*/
public ConcurrencyControlConfigurer expiredUrl(String expiredUrl) {
SessionManagementConfigurer.this.expiredUrl = expiredUrl;
return this;
}
/**
* Determines the behaviour when an expired session is detected.
* @param expiredSessionStrategy the {@link SessionInformationExpiredStrategy} to
* use when an expired session is detected.
* @return the {@link ConcurrencyControlConfigurer} for further customizations
*/
public ConcurrencyControlConfigurer expiredSessionStrategy(
SessionInformationExpiredStrategy expiredSessionStrategy) {
SessionManagementConfigurer.this.expiredSessionStrategy = expiredSessionStrategy;
return this;
}
/**
* If true, prevents a user from authenticating when the
* {@link #maximumSessions(int)} has been reached. Otherwise (default), the user
* who authenticates is allowed access and an existing user's session is expired.
* The user's who's session is forcibly expired is sent to
* {@link #expiredUrl(String)}. The advantage of this approach is if a user
* accidentally does not log out, there is no need for an administrator to
* intervene or wait till their session expires.
* @param maxSessionsPreventsLogin true to have an error at time of
* authentication, else false (default)
* @return the {@link ConcurrencyControlConfigurer} for further customizations
*/
public ConcurrencyControlConfigurer maxSessionsPreventsLogin(boolean maxSessionsPreventsLogin) {
SessionManagementConfigurer.this.maxSessionsPreventsLogin = maxSessionsPreventsLogin;
return this;
}
/**
* Controls the {@link SessionRegistry} implementation used. The default is
* {@link SessionRegistryImpl} which is an in memory implementation.
* @param sessionRegistry the {@link SessionRegistry} to use
* @return the {@link ConcurrencyControlConfigurer} for further customizations
*/
public ConcurrencyControlConfigurer sessionRegistry(SessionRegistry sessionRegistry) {
SessionManagementConfigurer.this.sessionRegistry = sessionRegistry;
return this;
}
/**
* Used to chain back to the {@link SessionManagementConfigurer}
* @return the {@link SessionManagementConfigurer} for further customizations
*/
public SessionManagementConfigurer<H> and() {
return SessionManagementConfigurer.this;
}
}
}
| |
package com.qozix.widgets;
import android.content.Context;
import android.hardware.SensorManager;
import android.util.FloatMath;
import android.view.ViewConfiguration;
import android.view.animation.AnimationUtils;
import android.view.animation.Interpolator;
/**
* This class encapsulates scrolling. The duration of the scroll
* can be passed in the constructor and specifies the maximum time that
* the scrolling animation should take. Past this time, the scrolling is
* automatically moved to its final stage and computeScrollOffset()
* will always return false to indicate that scrolling is over.
*/
public class Scroller {
private int mMode;
private int mStartX;
private int mStartY;
private int mFinalX;
private int mFinalY;
private int mMinX;
private int mMaxX;
private int mMinY;
private int mMaxY;
private int mCurrX;
private int mCurrY;
private long mStartTime;
private int mDuration;
private float mDurationReciprocal;
private float mDeltaX;
private float mDeltaY;
private boolean mFinished;
private Interpolator mInterpolator;
private boolean mFlywheel;
private float mVelocity;
private static final int DEFAULT_DURATION = 250;
private static final int SCROLL_MODE = 0;
private static final int FLING_MODE = 1;
private static float DECELERATION_RATE = (float) (Math.log( 0.75 ) / Math.log( 0.9 ));
private static float ALPHA = 800; // pixels / seconds
private static float START_TENSION = 0.4f; // Tension at start: (0.4 * total T, 1.0 * Distance)
private static float END_TENSION = 1.0f - START_TENSION;
private static final int NB_SAMPLES = 100;
private static final float[] SPLINE = new float[NB_SAMPLES + 1];
private float mDeceleration;
private final float mPpi;
static {
float x_min = 0.0f;
for (int i = 0; i <= NB_SAMPLES; i++) {
final float t = (float) i / NB_SAMPLES;
float x_max = 1.0f;
float x, tx, coef;
while (true) {
x = x_min + (x_max - x_min) / 2.0f;
coef = 3.0f * x * (1.0f - x);
tx = coef * ((1.0f - x) * START_TENSION + x * END_TENSION) + x * x * x;
if (Math.abs( tx - t ) < 1E-5) break;
if (tx > t) x_max = x;
else x_min = x;
}
final float d = coef + x * x * x;
SPLINE[i] = d;
}
SPLINE[NB_SAMPLES] = 1.0f;
// This controls the viscous fluid effect (how much of it)
sViscousFluidScale = 8.0f;
// must be set to 1.0 (used in viscousFluid())
sViscousFluidNormalize = 1.0f;
sViscousFluidNormalize = 1.0f / viscousFluid(1.0f);
}
private static float sViscousFluidScale;
private static float sViscousFluidNormalize;
/**
* Create a Scroller with the default duration and interpolator.
*/
public Scroller(Context context) {
this(context, null);
}
public Scroller(Context context, Interpolator interpolator) {
mFinished = true;
mInterpolator = interpolator;
mPpi = context.getResources().getDisplayMetrics().density * 160.0f;
mDeceleration = computeDeceleration( ViewConfiguration.getScrollFriction());
}
/**
* The amount of friction applied to flings. The default value
* is {@link ViewConfiguration#getScrollFriction}.
*
* @param friction A scalar dimension-less value representing the coefficient of
* friction.
*/
public final void setFriction(float friction) {
mDeceleration = computeDeceleration(friction);
}
private float computeDeceleration(float friction) {
return SensorManager.GRAVITY_EARTH // g (m/s^2)
* 39.37f // inch/meter
* mPpi // pixels per inch
* friction;
}
/**
*
* Returns whether the scroller has finished scrolling.
*
* @return True if the scroller has finished scrolling, false otherwise.
*/
public final boolean isFinished() {
return mFinished;
}
/**
* Force the finished field to a particular value.
*
* @param finished The new finished value.
*/
public final void forceFinished(boolean finished) {
mFinished = finished;
}
/**
* Returns how long the scroll event will take, in milliseconds.
*
* @return The duration of the scroll in milliseconds.
*/
public final int getDuration() {
return mDuration;
}
/**
* Returns the current X offset in the scroll.
*
* @return The new X offset as an absolute distance from the origin.
*/
public final int getCurrX() {
return mCurrX;
}
/**
* Returns the current Y offset in the scroll.
*
* @return The new Y offset as an absolute distance from the origin.
*/
public final int getCurrY() {
return mCurrY;
}
/**
* Returns the current velocity.
*
* @return The original velocity less the deceleration. Result may be
* negative.
*/
public float getCurrVelocity() {
return mVelocity - mDeceleration * timePassed() / 2000.0f;
}
/**
* Returns the start X offset in the scroll.
*
* @return The start X offset as an absolute distance from the origin.
*/
public final int getStartX() {
return mStartX;
}
/**
* Returns the start Y offset in the scroll.
*
* @return The start Y offset as an absolute distance from the origin.
*/
public final int getStartY() {
return mStartY;
}
/**
* Returns where the scroll will end. Valid only for "fling" scrolls.
*
* @return The final X offset as an absolute distance from the origin.
*/
public final int getFinalX() {
return mFinalX;
}
/**
* Returns where the scroll will end. Valid only for "fling" scrolls.
*
* @return The final Y offset as an absolute distance from the origin.
*/
public final int getFinalY() {
return mFinalY;
}
/**
* Call this when you want to know the new location. If it returns true,
* the animation is not yet finished. loc will be altered to provide the
* new location.
*/
public boolean computeScrollOffset() {
if (mFinished) {
return false;
}
int timePassed = (int)(AnimationUtils.currentAnimationTimeMillis() - mStartTime);
if (timePassed < mDuration) {
switch (mMode) {
case SCROLL_MODE:
float x = timePassed * mDurationReciprocal;
if (mInterpolator == null)
x = viscousFluid(x);
else
x = mInterpolator.getInterpolation(x);
mCurrX = mStartX + Math.round( x * mDeltaX );
mCurrY = mStartY + Math.round( x * mDeltaY );
break;
case FLING_MODE:
final float t = (float) timePassed / mDuration;
final int index = (int) (NB_SAMPLES * t);
final float t_inf = (float) index / NB_SAMPLES;
final float t_sup = (float) (index + 1) / NB_SAMPLES;
final float d_inf = SPLINE[index];
final float d_sup = SPLINE[index + 1];
final float distanceCoef = d_inf + (t - t_inf) / (t_sup - t_inf) * (d_sup - d_inf);
mCurrX = mStartX + Math.round( distanceCoef * (mFinalX - mStartX) );
// Pin to mMinX <= mCurrX <= mMaxX
mCurrX = Math.min( mCurrX, mMaxX );
mCurrX = Math.max( mCurrX, mMinX );
mCurrY = mStartY + Math.round( distanceCoef * (mFinalY - mStartY) );
// Pin to mMinY <= mCurrY <= mMaxY
mCurrY = Math.min( mCurrY, mMaxY );
mCurrY = Math.max( mCurrY, mMinY );
if (mCurrX == mFinalX && mCurrY == mFinalY) {
mFinished = true;
}
break;
}
}
else {
mCurrX = mFinalX;
mCurrY = mFinalY;
mFinished = true;
}
return true;
}
/**
* Start scrolling by providing a starting point and the distance to travel.
* The scroll will use the default value of 250 milliseconds for the
* duration.
*
* @param startX Starting horizontal scroll offset in pixels. Positive
* numbers will scroll the content to the left.
* @param startY Starting vertical scroll offset in pixels. Positive numbers
* will scroll the content up.
* @param dx Horizontal distance to travel. Positive numbers will scroll the
* content to the left.
* @param dy Vertical distance to travel. Positive numbers will scroll the
* content up.
*/
public void startScroll(int startX, int startY, int dx, int dy) {
startScroll(startX, startY, dx, dy, DEFAULT_DURATION);
}
/**
* Start scrolling by providing a starting point and the distance to travel.
*
* @param startX Starting horizontal scroll offset in pixels. Positive
* numbers will scroll the content to the left.
* @param startY Starting vertical scroll offset in pixels. Positive numbers
* will scroll the content up.
* @param dx Horizontal distance to travel. Positive numbers will scroll the
* content to the left.
* @param dy Vertical distance to travel. Positive numbers will scroll the
* content up.
* @param duration Duration of the scroll in milliseconds.
*/
public void startScroll(int startX, int startY, int dx, int dy, int duration) {
mMode = SCROLL_MODE;
mFinished = false;
mDuration = duration;
mStartTime = AnimationUtils.currentAnimationTimeMillis();
mStartX = startX;
mStartY = startY;
mFinalX = startX + dx;
mFinalY = startY + dy;
mDeltaX = dx;
mDeltaY = dy;
mDurationReciprocal = 1.0f / (float) mDuration;
}
/**
* Start scrolling based on a fling gesture. The distance travelled will
* depend on the initial velocity of the fling.
*
* @param startX Starting point of the scroll (X)
* @param startY Starting point of the scroll (Y)
* @param velocityX Initial velocity of the fling (X) measured in pixels per
* second.
* @param velocityY Initial velocity of the fling (Y) measured in pixels per
* second
* @param minX Minimum X value. The scroller will not scroll past this
* point.
* @param maxX Maximum X value. The scroller will not scroll past this
* point.
* @param minY Minimum Y value. The scroller will not scroll past this
* point.
* @param maxY Maximum Y value. The scroller will not scroll past this
* point.
*/
public void fling(int startX, int startY, int velocityX, int velocityY,
int minX, int maxX, int minY, int maxY) {
// Continue a scroll or fling in progress
if (mFlywheel && !mFinished) {
float oldVel = getCurrVelocity();
float dx = (float) (mFinalX - mStartX);
float dy = (float) (mFinalY - mStartY);
float hyp = FloatMath.sqrt( dx * dx + dy * dy );
float ndx = dx / hyp;
float ndy = dy / hyp;
float oldVelocityX = ndx * oldVel;
float oldVelocityY = ndy * oldVel;
if (Math.signum( velocityX ) == Math.signum( oldVelocityX ) &&
Math.signum( velocityY ) == Math.signum( oldVelocityY )) {
velocityX += oldVelocityX;
velocityY += oldVelocityY;
}
}
mMode = FLING_MODE;
mFinished = false;
float velocity = FloatMath.sqrt( velocityX * velocityX + velocityY * velocityY );
mVelocity = velocity;
final double l = Math.log( START_TENSION * velocity / ALPHA );
mDuration = (int) (1000.0 * Math.exp( l / (DECELERATION_RATE - 1.0) ));
mStartTime = AnimationUtils.currentAnimationTimeMillis();
mStartX = startX;
mStartY = startY;
float coeffX = velocity == 0 ? 1.0f : velocityX / velocity;
float coeffY = velocity == 0 ? 1.0f : velocityY / velocity;
int totalDistance =
(int) (ALPHA * Math.exp( DECELERATION_RATE / (DECELERATION_RATE - 1.0) * l ));
mMinX = minX;
mMaxX = maxX;
mMinY = minY;
mMaxY = maxY;
mFinalX = startX + Math.round( totalDistance * coeffX );
// Pin to mMinX <= mFinalX <= mMaxX
mFinalX = Math.min( mFinalX, mMaxX );
mFinalX = Math.max( mFinalX, mMinX );
mFinalY = startY + Math.round( totalDistance * coeffY );
// Pin to mMinY <= mFinalY <= mMaxY
mFinalY = Math.min( mFinalY, mMaxY );
mFinalY = Math.max( mFinalY, mMinY );
}
static float viscousFluid(float x)
{
x *= sViscousFluidScale;
if (x < 1.0f) {
x -= (1.0f - (float) Math.exp( -x ));
} else {
float start = 0.36787944117f; // 1/e == exp(-1)
x = 1.0f - (float) Math.exp( 1.0f - x );
x = start + x * (1.0f - start);
}
x *= sViscousFluidNormalize;
return x;
}
/**
* Stops the animation. Contrary to {@link #forceFinished(boolean)},
* aborting the animating cause the scroller to move to the final x and y
* position
*
* @see #forceFinished(boolean)
*/
public void abortAnimation() {
mCurrX = mFinalX;
mCurrY = mFinalY;
mFinished = true;
}
/**
* Extend the scroll animation. This allows a running animation to scroll
* further and longer, when used with {@link #setFinalX(int)} or {@link #setFinalY(int)}.
*
* @param extend Additional time to scroll in milliseconds.
* @see #setFinalX(int)
* @see #setFinalY(int)
*/
public void extendDuration(int extend) {
int passed = timePassed();
mDuration = passed + extend;
mDurationReciprocal = 1.0f / mDuration;
mFinished = false;
}
/**
* Returns the time elapsed since the beginning of the scrolling.
*
* @return The elapsed time in milliseconds.
*/
public int timePassed() {
return (int)(AnimationUtils.currentAnimationTimeMillis() - mStartTime);
}
/**
* Sets the final position (X) for this scroller.
*
* @param newX The new X offset as an absolute distance from the origin.
* @see #extendDuration(int)
* @see #setFinalY(int)
*/
public void setFinalX(int newX) {
mFinalX = newX;
mDeltaX = mFinalX - mStartX;
mFinished = false;
}
/**
* Sets the final position (Y) for this scroller.
*
* @param newY The new Y offset as an absolute distance from the origin.
* @see #extendDuration(int)
* @see #setFinalX(int)
*/
public void setFinalY(int newY) {
mFinalY = newY;
mDeltaY = mFinalY - mStartY;
mFinished = false;
}
/**
* @hide
*/
public boolean isScrollingInDirection(float xvel, float yvel) {
return !mFinished && Math.signum( xvel ) == Math.signum( mFinalX - mStartX ) &&
Math.signum( yvel ) == Math.signum( mFinalY - mStartY );
}
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.jet.impl.pipeline;
import com.hazelcast.function.BiConsumerEx;
import com.hazelcast.function.BiFunctionEx;
import com.hazelcast.function.BiPredicateEx;
import com.hazelcast.function.FunctionEx;
import com.hazelcast.function.PredicateEx;
import com.hazelcast.function.ToLongFunctionEx;
import com.hazelcast.jet.JetException;
import com.hazelcast.jet.Traverser;
import com.hazelcast.jet.aggregate.AggregateOperation;
import com.hazelcast.jet.aggregate.AggregateOperation1;
import com.hazelcast.jet.aggregate.AggregateOperation2;
import com.hazelcast.jet.aggregate.AggregateOperation3;
import com.hazelcast.jet.core.Inbox;
import com.hazelcast.jet.core.Processor;
import com.hazelcast.jet.core.ProcessorMetaSupplier;
import com.hazelcast.jet.function.TriFunction;
import com.hazelcast.jet.impl.JetEvent;
import com.hazelcast.jet.impl.processor.ProcessorWrapper;
import com.hazelcast.jet.impl.util.WrappingProcessorMetaSupplier;
import com.hazelcast.jet.pipeline.JoinClause;
import javax.annotation.Nonnull;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import static com.hazelcast.jet.impl.JetEvent.jetEvent;
import static com.hazelcast.jet.impl.util.Util.toList;
public class FunctionAdapter {
@Nonnull
public <T, K> FunctionEx<?, ? extends K> adaptKeyFn(@Nonnull FunctionEx<? super T, ? extends K> keyFn) {
return keyFn;
}
@Nonnull
<T> ToLongFunctionEx<?> adaptTimestampFn() {
return t -> Long.MIN_VALUE;
}
@Nonnull
<T, R> FunctionEx<?, ?> adaptMapFn(@Nonnull FunctionEx<? super T, ? extends R> mapFn) {
return mapFn;
}
@Nonnull
<T> PredicateEx<?> adaptFilterFn(@Nonnull PredicateEx<? super T> filterFn) {
return filterFn;
}
@Nonnull
<T, R> FunctionEx<?, ? extends Traverser<?>> adaptFlatMapFn(
@Nonnull FunctionEx<? super T, ? extends Traverser<R>> flatMapFn
) {
return flatMapFn;
}
@Nonnull
<S, K, T, R> TriFunction<? super S, ? super K, ?, ?> adaptStatefulMapFn(
@Nonnull TriFunction<? super S, ? super K, ? super T, ? extends R> mapFn
) {
return mapFn;
}
@Nonnull
<S, K, R> TriFunction<? super S, ? super K, ? super Long, ?> adaptOnEvictFn(
@Nonnull TriFunction<? super S, ? super K, ? super Long, ? extends R> onEvictFn
) {
return onEvictFn;
}
@Nonnull
<S, K, T, R> TriFunction<? super S, ? super K, ?, ? extends Traverser<?>> adaptStatefulFlatMapFn(
@Nonnull TriFunction<? super S, ? super K, ? super T, ? extends Traverser<R>> flatMapFn
) {
return flatMapFn;
}
@Nonnull
<S, K, R> TriFunction<? super S, ? super K, ? super Long, ? extends Traverser<?>> adaptOnEvictFlatMapFn(
@Nonnull TriFunction<? super S, ? super K, ? super Long, ? extends Traverser<R>> onEvictFn
) {
return onEvictFn;
}
@Nonnull
<S, T, R> BiFunctionEx<? super S, ?, ?> adaptMapUsingServiceFn(
@Nonnull BiFunctionEx<? super S, ? super T, ? extends R> mapFn
) {
return mapFn;
}
@Nonnull
<S, T> BiPredicateEx<? super S, ?> adaptFilterUsingServiceFn(
@Nonnull BiPredicateEx<? super S, ? super T> filterFn
) {
return filterFn;
}
@Nonnull
<S, T, R> BiFunctionEx<? super S, ?, ? extends Traverser<?>> adaptFlatMapUsingServiceFn(
@Nonnull BiFunctionEx<? super S, ? super T, ? extends Traverser<R>> flatMapFn
) {
return flatMapFn;
}
@Nonnull
@SuppressWarnings({"unchecked", "rawtypes"})
<S, T, R> BiFunctionEx<? super S, ?, ? extends CompletableFuture<Traverser<?>>> adaptFlatMapUsingServiceAsyncFn(
@Nonnull BiFunctionEx<? super S, ? super T, ? extends CompletableFuture<Traverser<R>>> flatMapAsyncFn
) {
return (BiFunctionEx) flatMapAsyncFn;
}
@Nonnull
@SuppressWarnings({"unchecked", "rawtypes"})
<S, T, R> BiFunctionEx<? super S, ? super List<?>, ? extends CompletableFuture<List<Traverser<?>>>>
adaptFlatMapUsingServiceAsyncBatchedFn(
@Nonnull BiFunctionEx<? super S, ? super List<T>, ? extends CompletableFuture<List<Traverser<R>>>>
flatMapAsyncBatchedFn
) {
// there's no transformation here, only checking that the input and output list sizes match
BiFunctionEx<S, List<T>, CompletableFuture<? extends List<?>>> fn =
(svc, items) -> flatMapAsyncBatchedFn.apply(svc, items)
.thenApply(output -> requireSizeMatch(output, items));
return (BiFunctionEx) fn;
}
@Nonnull
<T, R extends CharSequence> FunctionEx<?, ? extends R> adaptToStringFn(
@Nonnull FunctionEx<? super T, ? extends R> toStringFn
) {
return toStringFn;
}
@Nonnull
public <K, T0, T1, T1_OUT> JoinClause<? extends K, ?, ? super T1, ? extends T1_OUT>
adaptJoinClause(@Nonnull JoinClause<? extends K, ? super T0, ? super T1, ? extends T1_OUT> joinClause) {
return joinClause;
}
@Nonnull
public <T, T1, R> BiFunctionEx<?, ? super T1, ?> adaptHashJoinOutputFn(
@Nonnull BiFunctionEx<? super T, ? super T1, ? extends R> mapToOutputFn
) {
return mapToOutputFn;
}
@Nonnull
<T, T1, T2, R> TriFunction<?, ? super T1, ? super T2, ?> adaptHashJoinOutputFn(
@Nonnull TriFunction<? super T, ? super T1, ? super T2, ? extends R> mapToOutputFn
) {
return mapToOutputFn;
}
@Nonnull
<A, R> AggregateOperation<A, ? extends R> adaptAggregateOperation(
@Nonnull AggregateOperation<A, ? extends R> aggrOp
) {
return aggrOp;
}
@Nonnull
<T, A, R> AggregateOperation1<?, A, ? extends R> adaptAggregateOperation1(
@Nonnull AggregateOperation1<? super T, A, ? extends R> aggrOp
) {
return aggrOp;
}
@Nonnull
public static ProcessorMetaSupplier adaptingMetaSupplier(ProcessorMetaSupplier metaSup, int[] ordinalsToAdapt) {
return new WrappingProcessorMetaSupplier(metaSup, p -> new AdaptingProcessor(p, ordinalsToAdapt));
}
static <EI, EO> List<EO> requireSizeMatch(List<EO> output, List<EI> input) {
if (input.size() != output.size()) {
throw new JetException(String.format(
"Output batch size %,d is not the same as input batch size %,d",
output.size(), input.size()));
}
return output;
}
private static final class AdaptingProcessor extends ProcessorWrapper {
private final AdaptingInbox adaptingInbox = new AdaptingInbox();
private final BitSet shouldAdaptOrdinal = new BitSet();
AdaptingProcessor(Processor wrapped, int[] ordinalsToAdapt) {
super(wrapped);
for (int ordinal : ordinalsToAdapt) {
shouldAdaptOrdinal.set(ordinal);
}
}
@Override
public void process(int ordinal, @Nonnull Inbox inbox) {
Inbox inboxToUse;
if (shouldAdaptOrdinal.get(ordinal)) {
inboxToUse = adaptingInbox;
adaptingInbox.setWrappedInbox(inbox);
} else {
inboxToUse = inbox;
}
super.process(ordinal, inboxToUse);
}
}
private static final class AdaptingInbox implements Inbox {
private Inbox wrapped;
void setWrappedInbox(@Nonnull Inbox wrapped) {
this.wrapped = wrapped;
}
@Override
public boolean isEmpty() {
return wrapped.isEmpty();
}
@Nonnull @Override
public Iterator<Object> iterator() {
Iterator<Object> iterator = wrapped.iterator();
return new Iterator<Object>() {
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public Object next() {
return unwrapPayload(iterator.next());
}
};
}
@Override
public Object peek() {
return unwrapPayload(wrapped.peek());
}
@Override
public Object poll() {
return unwrapPayload(wrapped.poll());
}
@Override
public void remove() {
wrapped.remove();
}
@Override
public void clear() {
wrapped.clear();
}
@Override
public int size() {
return wrapped.size();
}
private static Object unwrapPayload(Object jetEvent) {
return jetEvent != null ? ((JetEvent) jetEvent).payload() : null;
}
}
}
class JetEventFunctionAdapter extends FunctionAdapter {
@Nonnull @Override
public <T, K> FunctionEx<? super JetEvent<T>, ? extends K> adaptKeyFn(
@Nonnull FunctionEx<? super T, ? extends K> keyFn
) {
return e -> keyFn.apply(e.payload());
}
@Nonnull @Override
<T> ToLongFunctionEx<? super JetEvent<T>> adaptTimestampFn() {
return JetEvent::timestamp;
}
@Nonnull @Override
<T, R> FunctionEx<? super JetEvent<T>, ?> adaptMapFn(
@Nonnull FunctionEx<? super T, ? extends R> mapFn
) {
return e -> jetEvent(e.timestamp(), mapFn.apply(e.payload()));
}
@Nonnull @Override
<T> PredicateEx<? super JetEvent<T>> adaptFilterFn(@Nonnull PredicateEx<? super T> filterFn) {
return e -> filterFn.test(e.payload());
}
@Nonnull @Override
<T, R> FunctionEx<? super JetEvent<T>, ? extends Traverser<JetEvent<R>>> adaptFlatMapFn(
@Nonnull FunctionEx<? super T, ? extends Traverser<R>> flatMapFn
) {
return e -> flatMapFn.apply(e.payload()).map(r -> jetEvent(e.timestamp(), r));
}
@Nonnull @Override
<S, K, T, R> TriFunction<? super S, ? super K, ? super JetEvent<T>, ? extends JetEvent<R>> adaptStatefulMapFn(
@Nonnull TriFunction<? super S, ? super K, ? super T, ? extends R> mapFn
) {
return (state, key, e) -> jetEvent(e.timestamp(), mapFn.apply(state, key, e.payload()));
}
@Nonnull
<S, K, R> TriFunction<? super S, ? super K, ? super Long, ? extends JetEvent<R>> adaptOnEvictFn(
@Nonnull TriFunction<? super S, ? super K, ? super Long, ? extends R> onEvictFn
) {
return (s, k, wm) -> jetEvent(wm, onEvictFn.apply(s, k, wm));
}
@Nonnull @Override
<S, K, T, R> TriFunction<? super S, ? super K, ? super JetEvent<T>, ? extends Traverser<JetEvent<R>>>
adaptStatefulFlatMapFn(
@Nonnull TriFunction<? super S, ? super K, ? super T, ? extends Traverser<R>> flatMapFn
) {
return (state, key, e) -> flatMapFn.apply(state, key, e.payload()).map(r -> jetEvent(e.timestamp(), r));
}
@Nonnull
<S, K, R> TriFunction<? super S, ? super K, ? super Long, ? extends Traverser<JetEvent<R>>> adaptOnEvictFlatMapFn(
@Nonnull TriFunction<? super S, ? super K, ? super Long, ? extends Traverser<R>> onEvictFn
) {
return (s, k, wm) -> onEvictFn.apply(s, k, wm).map(r -> jetEvent(wm, r));
}
@Nonnull @Override
<S, T, R> BiFunctionEx<? super S, ? super JetEvent<T>, ? extends JetEvent<R>> adaptMapUsingServiceFn(
@Nonnull BiFunctionEx<? super S, ? super T, ? extends R> mapFn
) {
return (s, e) -> jetEvent(e.timestamp(), mapFn.apply(s, e.payload()));
}
@Nonnull @Override
<S, T> BiPredicateEx<? super S, ? super JetEvent<T>> adaptFilterUsingServiceFn(
@Nonnull BiPredicateEx<? super S, ? super T> filterFn
) {
return (s, e) -> filterFn.test(s, e.payload());
}
@Nonnull @Override
<S, T, R> BiFunctionEx<? super S, ? super JetEvent<T>, ? extends Traverser<JetEvent<R>>>
adaptFlatMapUsingServiceFn(
@Nonnull BiFunctionEx<? super S, ? super T, ? extends Traverser<R>> flatMapFn
) {
return (s, e) -> flatMapFn.apply(s, e.payload()).map(r -> jetEvent(e.timestamp(), r));
}
@Nonnull @Override
<S, T, R> BiFunctionEx<? super S, ?, ? extends CompletableFuture<Traverser<?>>>
adaptFlatMapUsingServiceAsyncFn(
@Nonnull BiFunctionEx<? super S, ? super T, ? extends CompletableFuture<Traverser<R>>> flatMapAsyncFn
) {
return (S s, JetEvent<T> e) ->
flatMapAsyncFn.apply(s, e.payload()).thenApply(trav -> trav.map(re -> jetEvent(e.timestamp(), re)));
}
@Nonnull @Override
@SuppressWarnings({"unchecked", "rawtypes"})
<S, T, R> BiFunctionEx<? super S, ? super List<?>, ? extends CompletableFuture<List<Traverser<?>>>>
adaptFlatMapUsingServiceAsyncBatchedFn(@Nonnull BiFunctionEx<? super S, ? super List<T>,
? extends CompletableFuture<List<Traverser<R>>>> flatMapAsyncBatchedFn
) {
BiFunctionEx<S, List<JetEvent<T>>, CompletableFuture<List<Traverser<?>>>> fn =
(S s, List<JetEvent<T>> input) -> flatMapAsyncBatchedFn
.apply(s, toList(input, JetEvent::payload))
.thenApply(travList -> {
List<Traverser<?>> output = (List) travList;
requireSizeMatch(output, input);
for (int i = 0; i < output.size(); i++) {
long timestamp = input.get(i).timestamp();
output.set(i, output.get(i).map(r -> jetEvent(timestamp, r)));
}
return output;
});
return (BiFunctionEx) fn;
}
@Nonnull @Override
<T, STR extends CharSequence> FunctionEx<? super JetEvent<T>, ? extends STR> adaptToStringFn(
@Nonnull FunctionEx<? super T, ? extends STR> toStringFn
) {
return e -> toStringFn.apply(e.payload());
}
@Nonnull @Override
public <K, T0, T1, T1_OUT> JoinClause<? extends K, ? super JetEvent<T0>, ? super T1, ? extends T1_OUT>
adaptJoinClause(
@Nonnull JoinClause<? extends K, ? super T0, ? super T1, ? extends T1_OUT> joinClause
) {
return JoinClause.<K, JetEvent<T0>, T1>onKeys(adaptKeyFn(joinClause.leftKeyFn()), joinClause.rightKeyFn())
.projecting(joinClause.rightProjectFn());
}
@Nonnull @Override
public <T, T1, R> BiFunctionEx<? super JetEvent<T>, ? super T1, ?> adaptHashJoinOutputFn(
@Nonnull BiFunctionEx<? super T, ? super T1, ? extends R> mapToOutputFn
) {
return (e, t1) -> jetEvent(e.timestamp(), mapToOutputFn.apply(e.payload(), t1));
}
@Nonnull @Override
<T, T1, T2, R> TriFunction<? super JetEvent<T>, ? super T1, ? super T2, ?> adaptHashJoinOutputFn(
@Nonnull TriFunction<? super T, ? super T1, ? super T2, ? extends R> mapToOutputFn
) {
return (e, t1, t2) -> jetEvent(e.timestamp(), mapToOutputFn.apply(e.payload(), t1, t2));
}
@Nonnull @Override
@SuppressWarnings({"unchecked", "rawtypes"})
<A, R> AggregateOperation<A, ? extends R> adaptAggregateOperation(
@Nonnull AggregateOperation<A, ? extends R> aggrOp
) {
if (aggrOp instanceof AggregateOperation1) {
return adaptAggregateOperation1((AggregateOperation1) aggrOp);
} else if (aggrOp instanceof AggregateOperation2) {
return adaptAggregateOperation2((AggregateOperation2) aggrOp);
} else if (aggrOp instanceof AggregateOperation3) {
return adaptAggregateOperation3((AggregateOperation3) aggrOp);
} else {
BiConsumerEx[] adaptedAccFns = new BiConsumerEx[aggrOp.arity()];
Arrays.setAll(adaptedAccFns, i -> adaptAccumulateFn((BiConsumerEx) aggrOp.accumulateFn(i)));
return aggrOp.withAccumulateFns(adaptedAccFns);
}
}
@Nonnull @Override
<T, A, R> AggregateOperation1<? super JetEvent<T>, A, ? extends R> adaptAggregateOperation1(
@Nonnull AggregateOperation1<? super T, A, ? extends R> aggrOp
) {
return aggrOp.withAccumulateFn(adaptAccumulateFn(aggrOp.accumulateFn()));
}
@Nonnull
static <T0, T1, A, R> AggregateOperation2<? super JetEvent<T0>, ? super JetEvent<T1>, A, ? extends R>
adaptAggregateOperation2(@Nonnull AggregateOperation2<? super T0, ? super T1, A, ? extends R> aggrOp) {
return aggrOp
.<JetEvent<T0>>withAccumulateFn0(adaptAccumulateFn(aggrOp.accumulateFn0()))
.withAccumulateFn1(adaptAccumulateFn(aggrOp.accumulateFn1()));
}
@Nonnull
static <T0, T1, T2, A, R>
AggregateOperation3<? super JetEvent<T0>, ? super JetEvent<T1>, ? super JetEvent<T2>, A, ? extends R>
adaptAggregateOperation3(@Nonnull AggregateOperation3<? super T0, ? super T1, ? super T2, A, ? extends R> aggrOp) {
return aggrOp
.<JetEvent<T0>>withAccumulateFn0(adaptAccumulateFn(aggrOp.accumulateFn0()))
.<JetEvent<T1>>withAccumulateFn1(adaptAccumulateFn(aggrOp.accumulateFn1()))
.withAccumulateFn2(adaptAccumulateFn(aggrOp.accumulateFn2()));
}
@Nonnull
private static <A, T> BiConsumerEx<? super A, ? super JetEvent<T>> adaptAccumulateFn(
@Nonnull BiConsumerEx<? super A, ? super T> accumulateFn
) {
return (acc, t) -> accumulateFn.accept(acc, t.payload());
}
}
| |
/**
* Copyright 2005 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io;
import java.io.*;
import java.util.*;
import java.util.zip.*;
import java.util.logging.*;
import java.net.InetAddress;
import java.rmi.server.UID;
import java.security.MessageDigest;
import org.apache.lucene.util.PriorityQueue;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.util.LogFormatter;
/** Support for flat files of binary key/value pairs. */
public class SequenceFile {
public static final Logger LOG =
LogFormatter.getLogger("org.apache.hadoop.io.SequenceFile");
private SequenceFile() {} // no public ctor
private static byte[] VERSION = new byte[] {
(byte)'S', (byte)'E', (byte)'Q', 3
};
private static final int SYNC_ESCAPE = -1; // "length" of sync entries
private static final int SYNC_HASH_SIZE = 16; // number of bytes in hash
private static final int SYNC_SIZE = 4+SYNC_HASH_SIZE; // escape + hash
/** The number of bytes between sync points.*/
public static final int SYNC_INTERVAL = 100*SYNC_SIZE;
/** Write key/value pairs to a sequence-format file. */
public static class Writer {
private FSDataOutputStream out;
private DataOutputBuffer buffer = new DataOutputBuffer();
private FileSystem fs = null;
private Path target = null;
private Class keyClass;
private Class valClass;
private boolean deflateValues;
private Deflater deflater = new Deflater(Deflater.BEST_SPEED);
private DeflaterOutputStream deflateFilter =
new DeflaterOutputStream(buffer, deflater);
private DataOutputStream deflateOut =
new DataOutputStream(new BufferedOutputStream(deflateFilter));
// Insert a globally unique 16-byte value every few entries, so that one
// can seek into the middle of a file and then synchronize with record
// starts and ends by scanning for this value.
private long lastSyncPos; // position of last sync
private byte[] sync; // 16 random bytes
{
try { // use hash of uid + host
MessageDigest digester = MessageDigest.getInstance("MD5");
digester.update((new UID()+"@"+InetAddress.getLocalHost()).getBytes());
sync = digester.digest();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/** @deprecated Call {@link #Writer(FileSystem,Path,Class,Class)}. */
public Writer(FileSystem fs, String name, Class keyClass, Class valClass)
throws IOException {
this(fs, new Path(name), keyClass, valClass, false);
}
/** Create the named file. */
public Writer(FileSystem fs, Path name, Class keyClass, Class valClass)
throws IOException {
this(fs, name, keyClass, valClass, false);
}
/** Create the named file.
* @param compress if true, values are compressed.
*/
public Writer(FileSystem fs, Path name,
Class keyClass, Class valClass, boolean compress)
throws IOException {
this.fs = fs;
this.target = name;
init(fs.create(target), keyClass, valClass, compress);
}
/** Write to an arbitrary stream using a specified buffer size. */
private Writer(FSDataOutputStream out,
Class keyClass, Class valClass, boolean compress)
throws IOException {
init(out, keyClass, valClass, compress);
}
/** Write and flush the file header. */
private void init(FSDataOutputStream out,
Class keyClass, Class valClass,
boolean compress) throws IOException {
this.out = out;
this.out.write(VERSION);
this.keyClass = keyClass;
this.valClass = valClass;
this.deflateValues = compress;
new UTF8(WritableName.getName(keyClass)).write(this.out);
new UTF8(WritableName.getName(valClass)).write(this.out);
this.out.writeBoolean(deflateValues);
out.write(sync); // write the sync bytes
this.out.flush(); // flush header
}
/** Returns the class of keys in this file. */
public Class getKeyClass() { return keyClass; }
/** Returns the class of values in this file. */
public Class getValueClass() { return valClass; }
/** Close the file. */
public synchronized void close() throws IOException {
if (out != null) {
out.close();
out = null;
}
}
/** Append a key/value pair. */
public synchronized void append(Writable key, Writable val)
throws IOException {
if (key.getClass() != keyClass)
throw new IOException("wrong key class: "+key+" is not "+keyClass);
if (val.getClass() != valClass)
throw new IOException("wrong value class: "+val+" is not "+valClass);
buffer.reset();
key.write(buffer);
int keyLength = buffer.getLength();
if (keyLength == 0)
throw new IOException("zero length keys not allowed: " + key);
if (deflateValues) {
deflater.reset();
val.write(deflateOut);
deflateOut.flush();
deflateFilter.finish();
} else {
val.write(buffer);
}
append(buffer.getData(), 0, buffer.getLength(), keyLength);
}
/** Append a key/value pair. */
public synchronized void append(byte[] data, int start, int length,
int keyLength) throws IOException {
if (keyLength == 0)
throw new IOException("zero length keys not allowed");
if (sync != null &&
out.getPos() >= lastSyncPos+SYNC_INTERVAL) { // time to emit sync
lastSyncPos = out.getPos(); // update lastSyncPos
//LOG.info("sync@"+lastSyncPos);
out.writeInt(SYNC_ESCAPE); // escape it
out.write(sync); // write sync
}
out.writeInt(length); // total record length
out.writeInt(keyLength); // key portion length
out.write(data, start, length); // data
}
/** Returns the current length of the output file. */
public synchronized long getLength() throws IOException {
return out.getPos();
}
}
/** Writes key/value pairs from a sequence-format file. */
public static class Reader {
private Path file;
private FSDataInputStream in;
private DataOutputBuffer outBuf = new DataOutputBuffer();
private DataInputBuffer inBuf = new DataInputBuffer();
private FileSystem fs = null;
private byte[] version = new byte[VERSION.length];
private Class keyClass;
private Class valClass;
private byte[] sync = new byte[SYNC_HASH_SIZE];
private byte[] syncCheck = new byte[SYNC_HASH_SIZE];
private boolean syncSeen;
private long end;
private int keyLength;
private boolean inflateValues;
private byte[] inflateIn = new byte[8192];
private DataOutputBuffer inflateOut = new DataOutputBuffer();
private Inflater inflater = new Inflater();
private Configuration conf;
/** @deprecated Call {@link #Reader(FileSystem,Path,Configuration)}.*/
public Reader(FileSystem fs, String file, Configuration conf)
throws IOException {
this(fs, new Path(file), conf);
}
/** Open the named file. */
public Reader(FileSystem fs, Path file, Configuration conf)
throws IOException {
this(fs, file, conf.getInt("io.file.buffer.size", 4096));
this.conf = conf;
}
private Reader(FileSystem fs, Path name, int bufferSize) throws IOException {
this.fs = fs;
this.file = name;
this.in = fs.open(file, bufferSize);
this.end = fs.getLength(file);
init();
}
private Reader(FileSystem fs, Path file, int bufferSize, long start, long length)
throws IOException {
this.fs = fs;
this.file = file;
this.in = fs.open(file, bufferSize);
seek(start);
init();
this.end = in.getPos() + length;
}
private void init() throws IOException {
in.readFully(version);
if ((version[0] != VERSION[0]) ||
(version[1] != VERSION[1]) ||
(version[2] != VERSION[2]))
throw new IOException(file + " not a SequenceFile");
if (version[3] > VERSION[3])
throw new VersionMismatchException(VERSION[3], version[3]);
UTF8 className = new UTF8();
className.readFields(in); // read key class name
this.keyClass = WritableName.getClass(className.toString());
className.readFields(in); // read val class name
this.valClass = WritableName.getClass(className.toString());
if (version[3] > 2) { // if version > 2
this.inflateValues = in.readBoolean(); // is compressed?
}
if (version[3] > 1) { // if version > 1
in.readFully(sync); // read sync bytes
}
}
/** Close the file. */
public synchronized void close() throws IOException {
in.close();
}
/** Returns the class of keys in this file. */
public Class getKeyClass() { return keyClass; }
/** Returns the class of values in this file. */
public Class getValueClass() { return valClass; }
/** Returns true if values are compressed. */
public boolean isCompressed() { return inflateValues; }
/** Read the next key in the file into <code>key</code>, skipping its
* value. True if another entry exists, and false at end of file. */
public synchronized boolean next(Writable key) throws IOException {
if (key.getClass() != keyClass)
throw new IOException("wrong key class: "+key+" is not "+keyClass);
outBuf.reset();
keyLength = next(outBuf);
if (keyLength < 0)
return false;
inBuf.reset(outBuf.getData(), outBuf.getLength());
key.readFields(inBuf);
if (inBuf.getPosition() != keyLength)
throw new IOException(key + " read " + inBuf.getPosition()
+ " bytes, should read " + keyLength);
return true;
}
/** Read the next key/value pair in the file into <code>key</code> and
* <code>val</code>. Returns true if such a pair exists and false when at
* end of file */
public synchronized boolean next(Writable key, Writable val)
throws IOException {
if (val.getClass() != valClass)
throw new IOException("wrong value class: "+val+" is not "+valClass);
boolean more = next(key);
if (more) {
if (inflateValues) {
inflater.reset();
inflater.setInput(outBuf.getData(), keyLength,
outBuf.getLength()-keyLength);
inflateOut.reset();
while (!inflater.finished()) {
try {
int count = inflater.inflate(inflateIn);
inflateOut.write(inflateIn, 0, count);
} catch (DataFormatException e) {
throw new IOException (e.toString());
}
}
inBuf.reset(inflateOut.getData(), inflateOut.getLength());
}
if(val instanceof Configurable) {
((Configurable) val).setConf(this.conf);
}
val.readFields(inBuf);
if (inBuf.getPosition() != inBuf.getLength())
throw new IOException(val+" read "+(inBuf.getPosition()-keyLength)
+ " bytes, should read " +
(inBuf.getLength()-keyLength));
}
return more;
}
/** Read the next key/value pair in the file into <code>buffer</code>.
* Returns the length of the key read, or -1 if at end of file. The length
* of the value may be computed by calling buffer.getLength() before and
* after calls to this method. */
public synchronized int next(DataOutputBuffer buffer) throws IOException {
if (in.getPos() >= end)
return -1;
try {
int length = in.readInt();
if (version[3] > 1 && sync != null &&
length == SYNC_ESCAPE) { // process a sync entry
//LOG.info("sync@"+in.getPos());
in.readFully(syncCheck); // read syncCheck
if (!Arrays.equals(sync, syncCheck)) // check it
throw new IOException("File is corrupt!");
syncSeen = true;
length = in.readInt(); // re-read length
} else {
syncSeen = false;
}
int keyLength = in.readInt();
buffer.write(in, length);
return keyLength;
} catch (ChecksumException e) { // checksum failure
handleChecksumException(e);
return next(buffer);
}
}
private void handleChecksumException(ChecksumException e)
throws IOException {
if (this.conf.getBoolean("io.skip.checksum.errors", false)) {
LOG.warning("Bad checksum at "+getPosition()+". Skipping entries.");
sync(getPosition()+this.conf.getInt("io.bytes.per.checksum", 512));
} else {
throw e;
}
}
/** Set the current byte position in the input file. */
public synchronized void seek(long position) throws IOException {
in.seek(position);
}
/** Seek to the next sync mark past a given position.*/
public synchronized void sync(long position) throws IOException {
if (position+SYNC_SIZE >= end) {
seek(end);
return;
}
try {
seek(position+4); // skip escape
in.readFully(syncCheck);
int syncLen = sync.length;
for (int i = 0; in.getPos() < end; i++) {
int j = 0;
for (; j < syncLen; j++) {
if (sync[j] != syncCheck[(i+j)%syncLen])
break;
}
if (j == syncLen) {
in.seek(in.getPos() - SYNC_SIZE); // position before sync
return;
}
syncCheck[i%syncLen] = in.readByte();
}
} catch (ChecksumException e) { // checksum failure
handleChecksumException(e);
}
}
/** Returns true iff the previous call to next passed a sync mark.*/
public boolean syncSeen() { return syncSeen; }
/** Return the current byte position in the input file. */
public synchronized long getPosition() throws IOException {
return in.getPos();
}
/** Returns the name of the file. */
public String toString() {
return file.toString();
}
}
/** Sorts key/value pairs in a sequence-format file.
*
* <p>For best performance, applications should make sure that the {@link
* Writable#readFields(DataInput)} implementation of their keys is
* very efficient. In particular, it should avoid allocating memory.
*/
public static class Sorter {
private WritableComparator comparator;
private Path inFile; // when sorting
private Path[] inFiles; // when merging
private Path outFile;
private int memory; // bytes
private int factor; // merged per pass
private FileSystem fs = null;
private Class keyClass;
private Class valClass;
private Configuration conf;
/** Sort and merge files containing the named classes. */
public Sorter(FileSystem fs, Class keyClass, Class valClass, Configuration conf) {
this(fs, new WritableComparator(keyClass), valClass, conf);
}
/** Sort and merge using an arbitrary {@link WritableComparator}. */
public Sorter(FileSystem fs, WritableComparator comparator, Class valClass, Configuration conf) {
this.fs = fs;
this.comparator = comparator;
this.keyClass = comparator.getKeyClass();
this.valClass = valClass;
this.memory = conf.getInt("io.sort.mb", 100) * 1024 * 1024;
this.factor = conf.getInt("io.sort.factor", 100);
this.conf = conf;
}
/** Set the number of streams to merge at once.*/
public void setFactor(int factor) { this.factor = factor; }
/** Get the number of streams to merge at once.*/
public int getFactor() { return factor; }
/** Set the total amount of buffer memory, in bytes.*/
public void setMemory(int memory) { this.memory = memory; }
/** Get the total amount of buffer memory, in bytes.*/
public int getMemory() { return memory; }
/** Perform a file sort.*/
public void sort(Path inFile, Path outFile) throws IOException {
if (fs.exists(outFile)) {
throw new IOException("already exists: " + outFile);
}
this.inFile = inFile;
this.outFile = outFile;
int segments = sortPass();
int pass = 1;
while (segments > 1) {
segments = mergePass(pass, segments <= factor);
pass++;
}
}
private int sortPass() throws IOException {
LOG.fine("running sort pass");
SortPass sortPass = new SortPass(this.conf); // make the SortPass
try {
return sortPass.run(); // run it
} finally {
sortPass.close(); // close it
}
}
private class SortPass {
private int limit = memory/4;
private DataOutputBuffer buffer = new DataOutputBuffer();
private byte[] rawBuffer;
private int[] starts = new int[1024];
private int[] pointers = new int[starts.length];
private int[] pointersCopy = new int[starts.length];
private int[] keyLengths = new int[starts.length];
private int[] lengths = new int[starts.length];
private Reader in;
private FSDataOutputStream out;
private Path outName;
public SortPass(Configuration conf) throws IOException {
in = new Reader(fs, inFile, conf);
}
public int run() throws IOException {
int segments = 0;
boolean atEof = false;
while (!atEof) {
int count = 0;
buffer.reset();
while (!atEof && buffer.getLength() < limit) {
int start = buffer.getLength(); // read an entry into buffer
int keyLength = in.next(buffer);
int length = buffer.getLength() - start;
if (keyLength == -1) {
atEof = true;
break;
}
if (count == starts.length)
grow();
starts[count] = start; // update pointers
pointers[count] = count;
lengths[count] = length;
keyLengths[count] = keyLength;
count++;
}
// buffer is full -- sort & flush it
LOG.finer("flushing segment " + segments);
rawBuffer = buffer.getData();
sort(count);
flush(count, segments==0 && atEof);
segments++;
}
return segments;
}
public void close() throws IOException {
in.close();
if (out != null) {
out.close();
}
}
private void grow() {
int newLength = starts.length * 3 / 2;
starts = grow(starts, newLength);
pointers = grow(pointers, newLength);
pointersCopy = new int[newLength];
keyLengths = grow(keyLengths, newLength);
lengths = grow(lengths, newLength);
}
private int[] grow(int[] old, int newLength) {
int[] result = new int[newLength];
System.arraycopy(old, 0, result, 0, old.length);
return result;
}
private void flush(int count, boolean done) throws IOException {
if (out == null) {
outName = done ? outFile : outFile.suffix(".0");
out = fs.create(outName);
}
if (!done) { // an intermediate file
long length = buffer.getLength(); // compute its size
length += count*8; // allow for length/keyLength
out.writeLong(length); // write size
out.writeLong(count); // write count
}
Writer writer = new Writer(out, keyClass, valClass, in.isCompressed());
if (!done) {
writer.sync = null; // disable sync on temp files
}
for (int i = 0; i < count; i++) { // write in sorted order
int p = pointers[i];
writer.append(rawBuffer, starts[p], lengths[p], keyLengths[p]);
}
}
private void sort(int count) {
System.arraycopy(pointers, 0, pointersCopy, 0, count);
mergeSort(pointersCopy, pointers, 0, count);
}
private int compare(int i, int j) {
return comparator.compare(rawBuffer, starts[i], keyLengths[i],
rawBuffer, starts[j], keyLengths[j]);
}
private void mergeSort(int src[], int dest[], int low, int high) {
int length = high - low;
// Insertion sort on smallest arrays
if (length < 7) {
for (int i=low; i<high; i++)
for (int j=i; j>low && compare(dest[j-1], dest[j])>0; j--)
swap(dest, j, j-1);
return;
}
// Recursively sort halves of dest into src
int mid = (low + high) >> 1;
mergeSort(dest, src, low, mid);
mergeSort(dest, src, mid, high);
// If list is already sorted, just copy from src to dest. This is an
// optimization that results in faster sorts for nearly ordered lists.
if (compare(src[mid-1], src[mid]) <= 0) {
System.arraycopy(src, low, dest, low, length);
return;
}
// Merge sorted halves (now in src) into dest
for(int i = low, p = low, q = mid; i < high; i++) {
if (q>=high || p<mid && compare(src[p], src[q]) <= 0)
dest[i] = src[p++];
else
dest[i] = src[q++];
}
}
private void swap(int x[], int a, int b) {
int t = x[a];
x[a] = x[b];
x[b] = t;
}
}
private int mergePass(int pass, boolean last) throws IOException {
LOG.fine("running merge pass=" + pass);
MergePass mergePass = new MergePass(pass, last);
try { // make a merge pass
return mergePass.run(); // run it
} finally {
mergePass.close(); // close it
}
}
private class MergePass {
private int pass;
private boolean last;
private MergeQueue queue;
private FSDataInputStream in;
private Path inName;
public MergePass(int pass, boolean last) throws IOException {
this.pass = pass;
this.last = last;
this.queue =
new MergeQueue(factor, last?outFile:outFile.suffix("."+pass), last);
this.inName = outFile.suffix("."+(pass-1));
this.in = fs.open(inName);
}
public void close() throws IOException {
in.close(); // close and delete input
fs.delete(inName);
queue.close(); // close queue
}
public int run() throws IOException {
int segments = 0;
long end = fs.getLength(inName);
while (in.getPos() < end) {
LOG.finer("merging segment " + segments);
long totalLength = 0;
long totalCount = 0;
while (in.getPos() < end && queue.size() < factor) {
long length = in.readLong();
long count = in.readLong();
totalLength += length;
totalCount+= count;
Reader reader = new Reader(fs, inName, memory/(factor+1),
in.getPos(), length);
reader.sync = null; // disable sync on temp files
MergeStream ms = new MergeStream(reader); // add segment to queue
if (ms.next()) {
queue.add(ms);
}
in.seek(reader.end);
}
if (!last) { // intermediate file
queue.out.writeLong(totalLength); // write size
queue.out.writeLong(totalCount); // write count
}
queue.merge(); // do a merge
segments++;
}
return segments;
}
}
/** Merge the provided files.*/
public void merge(Path[] inFiles, Path outFile) throws IOException {
this.inFiles = inFiles;
this.outFile = outFile;
this.factor = inFiles.length;
if (fs.exists(outFile)) {
throw new IOException("already exists: " + outFile);
}
MergeFiles mergeFiles = new MergeFiles();
try { // make a merge pass
mergeFiles.run(); // run it
} finally {
mergeFiles.close(); // close it
}
}
private class MergeFiles {
private MergeQueue queue;
public MergeFiles() throws IOException {
this.queue = new MergeQueue(factor, outFile, true);
}
public void close() throws IOException {
queue.close();
}
public void run() throws IOException {
LOG.finer("merging files=" + inFiles.length);
for (int i = 0; i < inFiles.length; i++) {
Path inFile = inFiles[i];
MergeStream ms =
new MergeStream(new Reader(fs, inFile, memory/(factor+1)));
if (ms.next())
queue.put(ms);
}
queue.merge();
}
}
private class MergeStream {
private Reader in;
private DataOutputBuffer buffer = new DataOutputBuffer();
private int keyLength;
public MergeStream(Reader reader) throws IOException {
if (reader.keyClass != keyClass)
throw new IOException("wrong key class: " + reader.getKeyClass() +
" is not " + keyClass);
if (reader.valClass != valClass)
throw new IOException("wrong value class: "+reader.getValueClass()+
" is not " + valClass);
this.in = reader;
}
public boolean next() throws IOException {
buffer.reset();
keyLength = in.next(buffer);
return keyLength >= 0;
}
}
private class MergeQueue extends PriorityQueue {
private FSDataOutputStream out;
private boolean done;
private boolean compress;
public void add(MergeStream stream) throws IOException {
if (size() == 0) {
compress = stream.in.isCompressed();
} else if (compress != stream.in.isCompressed()) {
throw new IOException("All merged files must be compressed or not.");
}
put(stream);
}
public MergeQueue(int size, Path outName, boolean done)
throws IOException {
initialize(size);
this.out = fs.create(outName, true, memory/(factor+1));
this.done = done;
}
protected boolean lessThan(Object a, Object b) {
MergeStream msa = (MergeStream)a;
MergeStream msb = (MergeStream)b;
return comparator.compare(msa.buffer.getData(), 0, msa.keyLength,
msb.buffer.getData(), 0, msb.keyLength) < 0;
}
public void merge() throws IOException {
Writer writer = new Writer(out, keyClass, valClass, compress);
if (!done) {
writer.sync = null; // disable sync on temp files
}
while (size() != 0) {
MergeStream ms = (MergeStream)top();
DataOutputBuffer buffer = ms.buffer; // write top entry
writer.append(buffer.getData(), 0, buffer.getLength(), ms.keyLength);
if (ms.next()) { // has another entry
adjustTop();
} else {
pop(); // done with this file
ms.in.close();
}
}
}
public void close() throws IOException {
MergeStream ms; // close inputs
while ((ms = (MergeStream)pop()) != null) {
ms.in.close();
}
out.close(); // close output
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.batik.apps.rasterizer;
import org.apache.batik.test.*;
import org.apache.batik.test.util.ImageCompareTest;
import org.apache.batik.transcoder.Transcoder;
import org.apache.batik.transcoder.image.ImageTranscoder;
import org.apache.batik.transcoder.image.JPEGTranscoder;
import org.apache.batik.transcoder.image.PNGTranscoder;
import java.awt.*;
import java.io.*;
import java.util.*;
import java.util.List;
/**
* Validates the operation of the <code>SVGRasterizer</code>.
* It validates the option setting and the manipulation
* of source and destination sources.
*
* @author <a href="mailto:vhardy@apache.org">Vincent Hardy</a>
* @version $Id$
*/
public class SVGConverterTest extends DefaultTestSuite {
public SVGConverterTest(){
///////////////////////////////////////////////////////////////////////
// Add configuration tests
///////////////////////////////////////////////////////////////////////
AbstractTest t = null;
//
// Test Trancoder usage
//
t = new TranscoderConfigTest(DestinationType.PNG,
org.apache.batik.transcoder.image.PNGTranscoder.class);
addTest(t);
t.setId("TranscoderConfigTest.PNG");
t = new TranscoderConfigTest(DestinationType.JPEG,
org.apache.batik.transcoder.image.JPEGTranscoder.class);
addTest(t);
t.setId("TranscoderConfigTest.JPEG");
t = new TranscoderConfigTest(DestinationType.TIFF,
org.apache.batik.transcoder.image.TIFFTranscoder.class);
addTest(t);
t.setId("TranscoderConfigTest.TIFF");
try {
Class pdfClass = Class.forName("org.apache.fop.svg.PDFTranscoder");
t = new TranscoderConfigTest(DestinationType.PDF, pdfClass);
t.setId("TranscoderConfigTest.PDF");
addTest(t);
} catch (Exception e) {
}
//
// Checks that the proper hints are used
//
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_AOI, new Rectangle(40, 50, 40, 80)}}){
protected void deltaConfigure(SVGConverter c){
c.setArea(new Rectangle(40, 50, 40, 80));
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_AOI");
t = new HintsConfigTest(new Object[][]{
{JPEGTranscoder.KEY_QUALITY, new Float(.5)}}){
protected void deltaConfigure(SVGConverter c){
c.setQuality(.5f);
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_QUALITY");
t = new HintsConfigTest(new Object[][]{
{PNGTranscoder.KEY_INDEXED, new Integer(8)}}){
protected void deltaConfigure(SVGConverter c){
c.setIndexed(8);
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_INDEXED");
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_BACKGROUND_COLOR, Color.pink}}){
protected void deltaConfigure(SVGConverter c){
c.setBackgroundColor(Color.pink);
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_BACKGROUND_COLOR");
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_HEIGHT, new Float(50)}}){
protected void deltaConfigure(SVGConverter c){
c.setHeight(50);
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_HEIGHT");
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_WIDTH, new Float(50)}}){
protected void deltaConfigure(SVGConverter c){
c.setWidth(50);
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_WIDTH");
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_MAX_HEIGHT, new Float(50)}}){
protected void deltaConfigure(SVGConverter c){
c.setMaxHeight(50);
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_MAX_HEIGHT");
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_MAX_WIDTH, new Float(50)}}){
protected void deltaConfigure(SVGConverter c){
c.setMaxWidth(50);
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_MAX_WIDTH");
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_MEDIA, "print"}}){
protected void deltaConfigure(SVGConverter c){
c.setMediaType("print");
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_MEDIA");
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_DEFAULT_FONT_FAMILY, "Times"}}){
protected void deltaConfigure(SVGConverter c){
c.setDefaultFontFamily("Times");
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_DEFAULT_FONT_FAMILY");
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_ALTERNATE_STYLESHEET, "myStyleSheet"}}){
protected void deltaConfigure(SVGConverter c){
c.setAlternateStylesheet("myStyleSheet");
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_ALTERNATE_STYLESHEET");
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_USER_STYLESHEET_URI, "userStylesheet.css"}}){
protected void deltaConfigure(SVGConverter c){
c.setUserStylesheet("userStylesheet.css");
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_USER_STYLESHEET_URI");
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_LANGUAGE, "fr"}}){
protected void deltaConfigure(SVGConverter c){
c.setLanguage("fr");
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_LANGUAGE");
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_PIXEL_UNIT_TO_MILLIMETER, new Float(.5f)}}){
protected void deltaConfigure(SVGConverter c){
c.setPixelUnitToMillimeter(.5f);
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_PIXEL_UNIT_TO_MILLIMETER");
t = new HintsConfigTest(new Object[][]{
{ImageTranscoder.KEY_XML_PARSER_VALIDATING, Boolean.TRUE }}){
protected void deltaConfigure(SVGConverter c){
c.setValidate(true);
}
};
addTest(t);
t.setId("HintsConfigTest.KEY_XML_PARSER_VALIDATING");
//
// Check sources
//
t = new SourcesConfigTest(new String[] { "samples/anne", "samples/batikFX", "samples/tests/spec/styling/smiley" }){
protected void setSources(SVGConverter c){
c.setSources(new String[] {"samples/anne.svg", "samples/batikFX.svg", "samples/tests/spec/styling/smiley.svg"});
}
};
addTest(t);
t.setId("SourcesConfigTest.SimpleList");
//
// Check destination
//
t = new DestConfigTest(new String[] { "samples/anne.svg" },
new String[] { "test-reports/anne.png"}){
protected void setDestination(SVGConverter c){
c.setDst(new File("test-reports/anne.png"));
}
};
addTest(t);
t.setId("DestConfigTest.DstFile");
t = new DestConfigTest(new String[] { "samples/anne.svg", "samples/tests/spec/styling/smiley.svg" },
new String[] { "test-resources/anne.png", "test-resources/smiley.png"}){
protected void setDestination(SVGConverter c){
c.setDst(new File("test-resources"));
}
};
addTest(t);
t.setId("DestConfigTest.DstDir");
//
// Check that complete process goes without error
//
t = new OperationTest(){
protected void configure(SVGConverter c){
c.setSources(new String[]{"samples/anne.svg"});
c.setDst(new File("anne.png"));
File file = new File("anne.png");
file.deleteOnExit();
}
};
addTest(t);
t.setId("OperationTest.Bug4888");
t = new OperationTest(){
protected void configure(SVGConverter c){
c.setDestinationType(DestinationType.PDF);
c.setSources(new String[]{"samples/anne.svg"});
}
};
addTest(t);
t.setId("Operationtest.PDFTranscoding");
///////////////////////////////////////////////////////////////////////
// Add configuration error test. These tests check that the expected
// error gets reported for a given mis-configuration
///////////////////////////////////////////////////////////////////////
t = new ConfigErrorTest(SVGConverter.ERROR_NO_SOURCES_SPECIFIED) {
protected void configure(SVGConverter c){
c.setSources(null);
}
};
addTest(t);
t.setId("ConfigErrorTest.ERROR_NO_SOURCES_SPECIFIED");
t = new ConfigErrorTest(SVGConverter.ERROR_CANNOT_COMPUTE_DESTINATION){
protected void configure(SVGConverter c){
// Do not set destination file or destination directory
c.setSources(new String[]{"http://xml.apache.org/batik/dummy.svg"});
}
};
addTest(t);
t.setId("ConfigErrorTest.ERROR_CANNOT_COMPUTE_DESTINATION");
t = new ConfigErrorTest(SVGConverter.ERROR_CANNOT_USE_DST_FILE){
protected void configure(SVGConverter c){
File dummy = null;
try {
dummy = File.createTempFile("dummyPNG", ".png");
} catch(IOException ioEx ){
throw new Error( ioEx.getMessage() );
}
c.setSources(new String[]{"samples/anne.svg", "samples/batikFX.svg"});
c.setDst(dummy);
dummy.deleteOnExit();
}
};
addTest(t);
t.setId("ConfigErrorTest.ERROR_CANNOT_USE_DST_FILE");
t = new ConfigErrorTest(SVGConverter.ERROR_SOURCE_SAME_AS_DESTINATION){
protected void configure(SVGConverter c){
c.setSources(new String[]{ "samples/anne.svg" });
c.setDst(new File("samples/anne.svg"));
}
};
addTest(t);
t.setId("ConfigErrorTest.ERROR_SOURCE_SAME_AS_DESTINATION");
t = new ConfigErrorTest(SVGConverter.ERROR_CANNOT_READ_SOURCE){
protected void configure(SVGConverter c){
c.setSources(new String[]{ "test-resources/org/apache/batik/apps/rasterizer/notReadable.svg" });
c.setDst(new File("test-reports"));
}
public boolean proceedWithSourceTranscoding(SVGConverterSource source,
File dest){
// Big hack to simulate a non-readable SVG file
File hackedFile = new File(((SVGConverterFileSource)source).file.getPath()){
public boolean canRead(){
System.out.println("Yahoooooooo! In canRead");
return false;
}
};
((SVGConverterFileSource)source).file = hackedFile;
return true;
}
};
addTest(t);
t.setId("ConfigErrorTest.ERROR_CANNOT_READ_SOURCE");
t = new ConfigErrorTest(SVGConverter.ERROR_CANNOT_OPEN_SOURCE){
protected void configure(SVGConverter c){
c.setSources(new String[]{ "test-resources/org/apache/batik/apps/rasterizer/notReadable.svg" });
}
public boolean proceedWithComputedTask(Transcoder transcoder,
Map hints,
List sources,
List dest){
System.out.println("==================> Hacked Starting to process Task <=========================");
SVGConverterFileSource source = (SVGConverterFileSource)sources.get(0);
source = new SVGConverterFileSource(source.file){
public InputStream openStream() throws FileNotFoundException {
throw new FileNotFoundException("Simulated FileNotFoundException");
}
};
sources.set(0, source );
return true;
}
};
addTest(t);
t.setId("ConfigErrorTest.ERROR_CANNOT_OPEN_SOURCE");
t = new ConfigErrorTest(SVGConverter.ERROR_OUTPUT_NOT_WRITEABLE){
protected void configure(SVGConverter c){
c.setSources(new String[]{ "samples/anne.svg" });
File o = new File("test-resources/org/apache/batik/apps/rasterizer/readOnly.png");
o.setReadOnly();
c.setDst(o);
}
};
addTest(t);
t.setId("ConfigErrorTest.ERROR_OUTPUT_NOT_WRITEABLE");
t = new ConfigErrorTest(SVGConverter.ERROR_UNABLE_TO_CREATE_OUTPUT_DIR){
protected void configure(SVGConverter c){
c.setDst(new File("ZYZ::/cannotCreate\000"));
}
};
addTest(t);
t.setId("ConfigErrorTest.ERROR_UNABLE_TO_CREATE_OUTPUT_DIR");
t = new ConfigErrorTest(SVGConverter.ERROR_WHILE_RASTERIZING_FILE){
protected void configure(SVGConverter c){
c.setSources(new String[]{ "test-resources/org/apache/batik/apps/rasterizer/invalidSVG.svg"});
}
};
addTest(t);
t.setId("ConfigErrorTest(SVGConverter.ERROR_WHILE_RASTERIZING_FILE");
//
// Test that files are created as expected and are producing the
// expected result.
//
// Plain file
t = new ConverterOutputTest("samples/anne.svg", // File to convert
"test-reports/anne.png", // Output
"test-references/samples/anne.png"); // reference
addTest(t);
t.setId("OutputTest.plain");
// File with reference
t = new ConverterOutputTest("samples/anne.svg#svgView(viewBox(0,0,100,200))", // File to convert
"test-reports/anne.png", // Output
"test-references/samples/anneViewBox1.png"); // reference
addTest(t);
t.setId("OutputTest.reference");
}
}
/**
* A ConfigTest builds an SVGConverter, configures it,
* sets itself as the SVGConverterController and checks that
* the computed task is as expected (i.e., right set of
* hints).
*/
abstract class AbstractConfigTest extends AbstractTest implements SVGConverterController {
/**
* The 'proceedWithComputedTask' handler was not called
*/
public static final String ERROR_NO_COMPUTED_TASK
= "ConfigTest.error.no.computed.task";
/**
* The transcoderClass is not the one expected.
*/
public static final String ERROR_UNEXPECTED_TRANSCODER_CLASS
= "ConfigTest.error.unexpected.transcoder.class";
public static final String ENTRY_KEY_EXPECTED_TRANSCODER_CLASS
= "ConfigTest.entry.key.expected.transcoder.class";
public static final String ENTRY_KEY_COMPUTED_TRANSCODER_CLASS
= "ConfigTest.entry.key.computed.trancoder.class";
/**
* Error if the hints do not match
*/
public static final String ERROR_UNEXPECTED_NUMBER_OF_HINTS
= "ConfigTest.error.unexpected.number.of.hints";
public static final String ENTRY_KEY_EXPECTED_NUMBER_OF_HINTS
= "ConfigTest.entry.key.expected.number.of.hints";
public static final String ENTRY_KEY_COMPUTED_NUMBER_OF_HINTS
= "ConfigTest.entry.key.computed.number.of.hints";
public static final String ENTRY_KEY_EXPECTED_HINTS
= "ConfigTest.entry.key.expected.hints";
public static final String ENTRY_KEY_COMPUTED_HINTS
= "ConfigTest.entry.key.computed.hints";
public static final String ERROR_UNEXPECTED_TRANSCODING_HINT
= "ConfigTest.error.unexpected.transcoding.hint";
public static final String ENTRY_KEY_EXPECTED_HINT_KEY
= "ConfigTest.entry.key.expected.hint.key";
public static final String ENTRY_KEY_COMPUTED_HINT_VALUE
= "ConfigTest.entry.key.computed.hint.value";
public static final String ENTRY_KEY_EXPECTED_HINT_VALUE
= "ConfigTest.entry.key.expected.hint.value";
/**
* Error if the sources do not match
*/
public static final String ERROR_UNEXPECTED_SOURCES_LIST
= "ConfigTest.error.unexpected.sources.list";
public static final String ENTRY_KEY_EXPECTED_NUMBER_OF_SOURCES
= "ConfigTest.entry.key.expected.number.of.sources";
public static final String ENTRY_KEY_COMPUTED_NUMBER_OF_SOURCES
= "ConfigTest.entry.key.computed.number.of.sources";
public static final String ENTRY_KEY_EXPECTED_SOURCE_AT_INDEX
= "ConfigTest.entry.key.expected.source.at.index";
public static final String ENTRY_KEY_COMPUTED_SOURCE_AT_INDEX
= "ConfigTest.entry.key.computed.source.at.index";
public static final String ENTRY_KEY_COMPUTED_SOURCES_LIST
= "ConfigTest.entry.key.computed.sources.list";
public static final String ENTRY_KEY_EXPECTED_SOURCES_LIST
= "ConfigTest.entry.key.expected.sources.list";
/**
* Error if the dest do not match
*/
public static final String ERROR_UNEXPECTED_DEST_LIST
= "ConfigTest.error.unexpected.dest.list";
public static final String ENTRY_KEY_EXPECTED_NUMBER_OF_DEST
= "ConfigTest.entry.key.expected.number.of.dest";
public static final String ENTRY_KEY_COMPUTED_NUMBER_OF_DEST
= "ConfigTest.entry.key.computed.number.of.dest";
public static final String ENTRY_KEY_EXPECTED_DEST_AT_INDEX
= "ConfigTest.entry.key.expected.dest.at.index";
public static final String ENTRY_KEY_COMPUTED_DEST_AT_INDEX
= "ConfigTest.entry.key.computed.dest.at.index";
public static final String ENTRY_KEY_COMPUTED_DEST_LIST
= "ConfigTest.entry.key.computed.dest.list";
public static final String ENTRY_KEY_EXPECTED_DEST_LIST
= "ConfigTest.entry.key.expected.dest.list";
/**
* Configuration Description
*/
static class Config {
Class transcoderClass;
HashMap hints;
List sources;
List dest;
}
protected Config expectedConfig;
protected Config computedConfig;
protected AbstractConfigTest(){
}
protected void setExpectedConfig(Config expectedConfig){
this.expectedConfig = expectedConfig;
}
protected abstract void configure(SVGConverter c);
protected String makeSourceList(List v){
int n = v.size();
StringBuffer sb = new StringBuffer( n * 8 );
for (int i=0; i<n; i++){
sb.append( v.get(i).toString() );
}
return sb.toString();
}
protected String makeHintsString( Map map){
Iterator iter = map.keySet().iterator();
StringBuffer sb = new StringBuffer();
while (iter.hasNext()){
Object key = iter.next();
sb.append(key.toString());
sb.append( '(' );
sb.append(map.get(key).toString());
sb.append(") -- ");
}
return sb.toString();
}
public String getName(){
return getId();
}
public TestReport runImpl() throws Exception {
SVGConverter c = new SVGConverter(this);
configure(c);
c.execute();
//
// Now, check that the expectedConfig and the
// computedConfig are identical
//
if (computedConfig == null){
return reportError(ERROR_NO_COMPUTED_TASK);
}
if (!expectedConfig.transcoderClass.equals
(computedConfig.transcoderClass)){
TestReport report = reportError(ERROR_UNEXPECTED_TRANSCODER_CLASS);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_TRANSCODER_CLASS,
expectedConfig.transcoderClass);
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_TRANSCODER_CLASS,
computedConfig.transcoderClass);
return report;
}
// Compare sources
int en = expectedConfig.sources.size();
int cn = computedConfig.sources.size();
if (en != cn){
TestReport report = reportError(ERROR_UNEXPECTED_SOURCES_LIST);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_NUMBER_OF_SOURCES,
"" + en);
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_NUMBER_OF_SOURCES,
"" + cn);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_SOURCES_LIST,
makeSourceList(expectedConfig.sources));
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_SOURCES_LIST,
makeSourceList(computedConfig.sources));
return report;
}
for (int i=0; i<en; i++){
Object es = expectedConfig.sources.get(i);
Object cs = computedConfig.sources.get(i);
if (!computedConfig.sources.contains(es)){
TestReport report = reportError(ERROR_UNEXPECTED_SOURCES_LIST);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_SOURCE_AT_INDEX,
"[" + i + "] = -" + es + "- (" + es.getClass().getName() + ")");
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_SOURCE_AT_INDEX,
"[" + i + "] = -" + cs + "- (" + es.getClass().getName() + ")");
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_SOURCES_LIST,
makeSourceList(expectedConfig.sources));
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_SOURCES_LIST,
makeSourceList(computedConfig.sources));
return report;
}
}
// Compare dest
en = expectedConfig.dest.size();
cn = computedConfig.dest.size();
if (en != cn){
TestReport report = reportError(ERROR_UNEXPECTED_DEST_LIST);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_NUMBER_OF_DEST,
"" + en);
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_NUMBER_OF_DEST,
"" + cn);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_DEST_LIST,
makeSourceList(expectedConfig.dest));
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_DEST_LIST,
makeSourceList(computedConfig.dest));
return report;
}
for (int i=0; i<en; i++){
Object es = expectedConfig.dest.get(i);
Object cs = computedConfig.dest.get(i);
if (!computedConfig.dest.contains(cs)){
TestReport report = reportError(ERROR_UNEXPECTED_DEST_LIST);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_DEST_AT_INDEX,
"[" + i + "] = " + es);
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_DEST_AT_INDEX,
"[" + i + "] = " + cs);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_DEST_LIST,
makeSourceList(expectedConfig.dest));
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_DEST_LIST,
makeSourceList(computedConfig.dest));
return report;
}
}
//
// Finally, check the hints
//
en = expectedConfig.hints.size();
cn = computedConfig.hints.size();
if (en != cn){
TestReport report = reportError(ERROR_UNEXPECTED_NUMBER_OF_HINTS);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_NUMBER_OF_HINTS,
"" + en);
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_NUMBER_OF_HINTS,
"" + cn);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_HINTS,
makeHintsString(expectedConfig.hints));
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_HINTS,
makeHintsString(computedConfig.hints));
return report;
}
Iterator iter = expectedConfig.hints.keySet().iterator();
while (iter.hasNext()){
Object hintKey = iter.next();
Object expectedHintValue = expectedConfig.hints.get(hintKey);
Object computedHintValue = computedConfig.hints.get(hintKey);
if (!expectedHintValue.equals(computedHintValue)){
TestReport report = reportError(ERROR_UNEXPECTED_TRANSCODING_HINT);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_HINT_KEY,
hintKey.toString());
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_HINT_VALUE,
expectedHintValue.toString());
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_HINT_VALUE,
"" + computedHintValue);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_HINTS,
makeHintsString(expectedConfig.hints));
report.addDescriptionEntry(ENTRY_KEY_COMPUTED_HINTS,
makeHintsString(computedConfig.hints));
return report;
}
}
return reportSuccess();
}
public boolean proceedWithComputedTask(Transcoder transcoder,
Map hints,
List sources,
List dest){
computedConfig = new Config();
computedConfig.transcoderClass = transcoder.getClass();
computedConfig.sources = new ArrayList( sources );
computedConfig.dest = new ArrayList( dest );
computedConfig.hints = new HashMap(hints);
return false; // Do not proceed with the convertion process,
// we are only checking the config in this test.
}
public boolean proceedWithSourceTranscoding(SVGConverterSource source,
File dest) {
return true;
}
public boolean proceedOnSourceTranscodingFailure(SVGConverterSource source,
File dest,
String errorCode){
return true;
}
public void onSourceTranscodingSuccess(SVGConverterSource source,
File dest){
}
}
/**
* Tests that a convertion task goes without exception.
*/
class OperationTest extends AbstractTest{
public TestReport runImpl() throws Exception {
SVGConverter c = new SVGConverter();
configure(c);
c.execute();
return reportSuccess();
}
protected void configure(SVGConverter c){
// Should be overridden by subclasses.
}
}
/**
* Provides a simple string constructor which allows the user to
* create a given test to check that a specific transcoder class is
* used for a given mime type.
*/
class TranscoderConfigTest extends AbstractConfigTest {
static final String SOURCE_FILE = "samples/anne.svg";
static final String DEST_FILE_NAME = "samples/anne";
protected DestinationType dstType;
/**
* @param dstType type of result image
* @param expectedTranscoderClass class for the Transcoder expected to perform
* the convertion.
*/
public TranscoderConfigTest(DestinationType dstType,
Class expectedTranscoderClass){
this.dstType = dstType;
Config config = new Config();
config.transcoderClass = expectedTranscoderClass;
List sources = new ArrayList();
sources.add(new SVGConverterFileSource(new File(SOURCE_FILE)));
config.sources = sources;
List dest = new ArrayList();
dest.add(new File(DEST_FILE_NAME + dstType.getExtension()));
config.dest = dest;
HashMap hints = new HashMap();
config.hints = hints;
setExpectedConfig(config);
}
/**
* Configures the test with the given mime type
*/
public void configure(SVGConverter c){
c.setSources(new String[] { SOURCE_FILE });
c.setDst(new File(DEST_FILE_NAME + dstType.getExtension()));
c.setDestinationType(dstType);
}
}
/**
* Provides a simple string array constructor which allows the user to
* create a test checking for a specific hint configuration.
*/
class HintsConfigTest extends AbstractConfigTest {
static final String SOURCE_FILE = "samples/anne.svg";
static final String DEST_FILE_NAME = "samples/anne";
static final Class EXPECTED_TRANSCODER_CLASS = org.apache.batik.transcoder.image.PNGTranscoder.class;
static final DestinationType DST_TYPE = DestinationType.PNG;
/**
*/
public HintsConfigTest(Object[][] hintsMap){
Config config = new Config();
config.transcoderClass = EXPECTED_TRANSCODER_CLASS;
List sources = new ArrayList();
sources.add(new SVGConverterFileSource(new File(SOURCE_FILE)));
config.sources = sources;
List dest = new ArrayList();
dest.add(new File(DEST_FILE_NAME + DST_TYPE.getExtension()));
config.dest = dest;
HashMap hints = new HashMap();
//
// Add hints from constructor argument
//
int n = hintsMap.length;
for (int i=0; i<n; i++){
hints.put(hintsMap[i][0], hintsMap[i][1]);
}
config.hints = hints;
setExpectedConfig(config);
}
/**
* Configures the test with the given mime type
*/
public void configure(SVGConverter c){
c.setSources(new String[] { SOURCE_FILE });
c.setDst(new File(DEST_FILE_NAME + DST_TYPE.getExtension()));
c.setDestinationType(DST_TYPE);
deltaConfigure(c);
}
protected void deltaConfigure(SVGConverter c){
}
}
/**
* Provides a simple string array constructor which allows the user to
* create a test checking for a specific source configuration.
* The constructor argument takes the list of expected files and the
* deltaConfigure method should set the sources which is expected to
* produce that list of sources. The sources should be file names
* which ommit the ".svg" extension.
*/
class SourcesConfigTest extends AbstractConfigTest {
static final Class EXPECTED_TRANSCODER_CLASS = org.apache.batik.transcoder.image.PNGTranscoder.class;
static final DestinationType DST_TYPE = DestinationType.PNG;
static final String SVG_EXTENSION = ".svg";
/**
*/
public SourcesConfigTest(Object[] expectedSources){
Config config = new Config();
config.transcoderClass = EXPECTED_TRANSCODER_CLASS;
List sources = new ArrayList();
List dest = new ArrayList();
for (int i=0; i<expectedSources.length; i++){
sources.add(new SVGConverterFileSource(new File(expectedSources[i] + SVG_EXTENSION)));
dest.add(new File(expectedSources[i] + DST_TYPE.getExtension()));
}
config.sources = sources;
config.dest = dest;
HashMap hints = new HashMap();
config.hints = hints;
setExpectedConfig(config);
}
/**
* Configures the test with the given mime type
*/
public void configure(SVGConverter c){
c.setDestinationType(DST_TYPE);
setSources(c);
}
protected void setSources(SVGConverter c){
}
}
/**
* Provides a simple string array constructor which allows the user to
* create a test checking for a specific destination configuration.
* The constructor argument takes the list of sources and the list of
* expected configuration which is influenced by the 'setDestination'
* content.
*/
class DestConfigTest extends AbstractConfigTest {
static final Class EXPECTED_TRANSCODER_CLASS = org.apache.batik.transcoder.image.PNGTranscoder.class;
static final DestinationType DST_TYPE = DestinationType.PNG;
String[] sourcesStrings;
/**
*/
public DestConfigTest(String[] sourcesStrings,
String[] expectedDest){
this.sourcesStrings = sourcesStrings;
Config config = new Config();
config.transcoderClass = EXPECTED_TRANSCODER_CLASS;
List sources = new ArrayList();
List dest = new ArrayList();
for (int i=0; i<sourcesStrings.length; i++){
sources.add(new SVGConverterFileSource(new File(sourcesStrings[i])));
}
for (int i=0; i<expectedDest.length; i++){
dest.add(new File(expectedDest[i]));
}
config.sources = sources;
config.dest = dest;
HashMap hints = new HashMap();
config.hints = hints;
setExpectedConfig(config);
}
/**
* Configures the test with the given mime type
*/
public void configure(SVGConverter c){
c.setDestinationType(DST_TYPE);
c.setSources(sourcesStrings);
setDestination(c);
}
protected void setDestination(SVGConverter c){
}
}
/**
* This test lously checks that errors are reported as expected. It
* checks that the error code given at construction time is reported
* either my an exception thrown from the execute method or during the
* processing of single files in the SVGConverterController handler.
*/
class ConfigErrorTest extends AbstractTest implements SVGConverterController{
String errorCode;
String foundErrorCode = null;
public static final String ERROR_DID_NOT_GET_ERROR
= "ConfigErrorTest.error.did.not.get.error";
public static final String ERROR_UNEXPECTED_ERROR_CODE
= "ConfigErrorTest.error.unexpected.error.code";
public static final String ENTRY_KEY_EXPECTED_ERROR_CODE
= "ConfigErrorTest.entry.key.expected.error.code";
public static final String ENTRY_KEY_GOT_ERROR_CODE
= "ConfigErrorTest.entry.key.got.error.code";
public ConfigErrorTest(String expectedErrorCode){
this.errorCode = expectedErrorCode;
}
public String getName(){
return getId();
}
public TestReport runImpl() throws Exception {
SVGConverter c = new SVGConverter(this);
c.setDestinationType(DestinationType.PNG);
c.setSources(new String[]{ "samples/anne.svg" });
configure(c);
try {
c.execute();
} catch(SVGConverterException e){
e.printStackTrace();
foundErrorCode = e.getErrorCode();
}
if (foundErrorCode == null){
TestReport report = reportError(ERROR_DID_NOT_GET_ERROR);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_ERROR_CODE,
errorCode);
return report;
}
if (foundErrorCode.equals(errorCode)){
return reportSuccess();
}
TestReport report = reportError(ERROR_UNEXPECTED_ERROR_CODE);
report.addDescriptionEntry(ENTRY_KEY_EXPECTED_ERROR_CODE,
errorCode);
report.addDescriptionEntry(ENTRY_KEY_GOT_ERROR_CODE,
foundErrorCode);
return report;
}
protected void configure(SVGConverter c){
}
public boolean proceedWithComputedTask(Transcoder transcoder,
Map hints,
List sources,
List dest){
System.out.println("==================> Starting to process Task <=========================");
return true;
}
public boolean proceedWithSourceTranscoding(SVGConverterSource source,
File dest) {
System.out.print("Transcoding " + source + " to " + dest + " ... ");
return true;
}
public boolean proceedOnSourceTranscodingFailure(SVGConverterSource source,
File dest,
String errorCode){
System.out.println(" ... FAILURE");
foundErrorCode = errorCode;
return true;
}
public void onSourceTranscodingSuccess(SVGConverterSource source,
File dest){
System.out.println(" ... SUCCESS");
}
}
/**
* This test checks that a file is indeed created and that it is identical to
* an expected reference.
*/
class ConverterOutputTest extends AbstractTest {
String svgSource;
String pngDest;
String pngRef;
public ConverterOutputTest(String svgSource,
String pngDest,
String pngRef){
this.svgSource = svgSource;
this.pngDest = pngDest;
this.pngRef = pngRef;
}
public TestReport runImpl() throws Exception {
SVGConverter c = new SVGConverter();
System.out.println("Converting : " + svgSource);
c.setSources(new String[]{svgSource});
c.setDst(new File(pngDest));
c.setDestinationType(DestinationType.PNG);
c.execute();
ImageCompareTest t = new ImageCompareTest(pngDest,
pngRef);
TestReport r = t.run();
(new File(pngDest)).delete();
return r;
}
}
| |
/*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.catalog.models;
import org.opencb.opencga.core.common.TimeUtils;
import java.net.URI;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* Created by jacobo on 11/09/14.
*/
public class File {
private int id;
/**
* File name.
*/
private String name;
/**
* Formats: file, folder, index
*/
private Type type;
/**
* Formats: txt, executable, image, ...
*/
private Format format;
/**
* BAM, VCF, ...
*/
private Bioformat bioformat;
/**
* Optional external file location. If null, file is inside its study.
*/
private URI uri;
private String path;
private String ownerId;
private String creationDate;
private String modificationDate;
private String description;
private Status status;
private long diskUsage;
//private int studyId;
private int experimentId;
private List<Integer> sampleIds;
/**
* This field values -1 when file has been uploaded.
*/
private int jobId;
private List<Acl> acl;
private Index index;
private Map<String, Object> stats;
private Map<String, Object> attributes;
/* Status */
public enum Status {
STAGE,
READY,
MISSING,
TRASHED,
DELETED
}
public enum Type {
FOLDER,
FILE
}
/**
* General format of the file, such as text, or binary, etc.
*/
public enum Format {
VCF,
BCF,
GVCF,
TBI,
SAM,
BAM,
BAI,
CRAM,
FASTQ,
PED,
TAB_SEPARATED_VALUES, COMMA_SEPARATED_VALUES, XML, PROTOCOL_BUFFER, JSON, AVRO, PARQUET, //Serialization formats
IMAGE,
PLAIN,
BINARY,
EXECUTABLE,
@Deprecated GZIP,
UNKNOWN,
}
public enum Compression {
GZIP,
BGZIP,
ZIP,
SNAPPY,
NONE,
}
/**
* Specific format of the biological file, such as variant, alignment, pedigree, etc.
*/
public enum Bioformat {
MICROARRAY_EXPRESSION_ONECHANNEL_AGILENT,
MICROARRAY_EXPRESSION_ONECHANNEL_AFFYMETRIX,
MICROARRAY_EXPRESSION_ONECHANNEL_GENEPIX,
MICROARRAY_EXPRESSION_TWOCHANNELS_AGILENT,
MICROARRAY_EXPRESSION_TWOCHANNELS_GENEPIX,
DATAMATRIX_EXPRESSION,
// DATAMATRIX_SNP,
// IDLIST_GENE,
// IDLIST_TRANSCRIPT,
// IDLIST_PROTEIN,
// IDLIST_SNP,
// IDLIST_FUNCTIONALTERMS,
// IDLIST_RANKED,
IDLIST,
IDLIST_RANKED,
ANNOTATION_GENEVSANNOTATION,
OTHER_NEWICK,
OTHER_BLAST,
OTHER_INTERACTION,
OTHER_GENOTYPE,
OTHER_PLINK,
OTHER_VCF,
OTHER_PED,
@Deprecated VCF4,
VARIANT,
ALIGNMENT,
SEQUENCE,
PEDIGREE,
NONE
}
/* Attributes known values */
public static final String DELETE_DATE = "deleteDate"; //Long
/**
* To think:
* ACL, url, responsible, extended source ??
*/
public File() {
}
public File(String name, Type type, Format format, Bioformat bioformat, String path, String ownerId,
String description, Status status, long diskUsage) {
this(-1, name, type, format, bioformat, path, ownerId, TimeUtils.getTime(), description, status, diskUsage,
-1, new LinkedList<Integer>(), -1, new LinkedList<Acl>(), new HashMap<String, Object>(),
new HashMap<String, Object>());
}
public File(String name, Type type, Format format, Bioformat bioformat, String path, String ownerId,
String creationDate, String description, Status status, long diskUsage) {
this(-1, name, type, format, bioformat, path, ownerId, creationDate, description, status, diskUsage,
-1, new LinkedList<Integer>(), -1, new LinkedList<Acl>(), new HashMap<String, Object>(),
new HashMap<String, Object>());
}
public File(int id, String name, Type type, Format format, Bioformat bioformat, String path, String ownerId,
String creationDate, String description, Status status, long diskUsage, int experimentId,
List<Integer> sampleIds, int jobId, List<Acl> acl, Map<String, Object> stats,
Map<String, Object> attributes) {
this.id = id;
this.name = name;
this.type = type;
this.format = format;
this.bioformat = bioformat;
this.uri = null;
this.path = path;
this.ownerId = ownerId;
this.creationDate = creationDate;
this.modificationDate = creationDate;
this.description = description;
this.status = status;
this.diskUsage = diskUsage;
this.experimentId = experimentId;
this.sampleIds = sampleIds;
this.jobId = jobId;
this.acl = acl;
this.index = null;
this.stats = stats;
this.attributes = attributes;
}
@Override
public String toString() {
return "File {" + "\n\t" +
"id:" + id + "\n\t" +
", name:'" + name + '\'' + "\n\t" +
", type:'" + type + '\'' + "\n\t" +
", format:'" + format + '\'' + "\n\t" +
", bioformat:'" + bioformat + '\'' + "\n\t" +
// ", uriScheme:'" + uriScheme + '\'' + "\n\t" +
", path:'" + path + '\'' + "\n\t" +
", ownerId:'" + ownerId + '\'' + "\n\t" +
", creationDate:'" + creationDate + '\'' + "\n\t" +
", modificationDate:'" + modificationDate + '\'' + "\n\t" +
", description:'" + description + '\'' + "\n\t" +
", status:'" + status + '\'' + "\n\t" +
", diskUsage:" + diskUsage + "\n\t" +
", experimentId:" + experimentId + "\n\t" +
", sampleIds:" + sampleIds + "\n\t" +
", jobId:" + jobId + "\n\t" +
", acl:" + acl + "\n\t" +
", stats:" + stats + "\n\t" +
", attributes:" + attributes + "\n\t" +
// ", indices:" + indices + "\n" +
'}';
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Type getType() {
return type;
}
public void setType(Type type) {
this.type = type;
}
public Format getFormat() {
return format;
}
public void setFormat(Format format) {
this.format = format;
}
public Bioformat getBioformat() {
return bioformat;
}
public void setBioformat(Bioformat bioformat) {
this.bioformat = bioformat;
}
public URI getUri() {
return uri;
}
public void setUri(URI uri) {
this.uri = uri;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public String getOwnerId() {
return ownerId;
}
public void setOwnerId(String ownerId) {
this.ownerId = ownerId;
}
public String getCreationDate() {
return creationDate;
}
public void setCreationDate(String creationDate) {
this.creationDate = creationDate;
}
public String getModificationDate() {
return modificationDate;
}
public void setModificationDate(String modificationDate) {
this.modificationDate = modificationDate;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
public long getDiskUsage() {
return diskUsage;
}
public void setDiskUsage(long diskUsage) {
this.diskUsage = diskUsage;
}
public int getExperimentId() {
return experimentId;
}
public void setExperimentId(int experimentId) {
this.experimentId = experimentId;
}
public List<Integer> getSampleIds() {
return sampleIds;
}
public void setSampleIds(List<Integer> sampleIds) {
this.sampleIds = sampleIds;
}
public int getJobId() {
return jobId;
}
public void setJobId(int jobId) {
this.jobId = jobId;
}
public List<Acl> getAcl() {
return acl;
}
public void setAcl(List<Acl> acl) {
this.acl = acl;
}
public Index getIndex() {
return index;
}
public void setIndex(Index index) {
this.index = index;
}
public Map<String, Object> getStats() {
return stats;
}
public void setStats(Map<String, Object> stats) {
this.stats = stats;
}
public Map<String, Object> getAttributes() {
return attributes;
}
public void setAttributes(Map<String, Object> attributes) {
this.attributes = attributes;
}
}
| |
/*
===========================================================================
Copyright (c) 2010 BrickRed Technologies Limited
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sub-license, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
===========================================================================
*/
package org.brickred.socialauth.provider;
import java.io.InputStream;
import java.io.Serializable;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import javax.json.Json;
import javax.json.JsonObject;
import org.brickred.socialauth.AbstractProvider;
import org.brickred.socialauth.AuthProvider;
import org.brickred.socialauth.Contact;
import org.brickred.socialauth.Permission;
import org.brickred.socialauth.Profile;
import org.brickred.socialauth.exception.AccessTokenExpireException;
import org.brickred.socialauth.exception.SocialAuthException;
import org.brickred.socialauth.exception.UserDeniedPermissionException;
import org.brickred.socialauth.oauthstrategy.OAuth2;
import org.brickred.socialauth.oauthstrategy.OAuthStrategyBase;
import org.brickred.socialauth.util.AccessGrant;
import org.brickred.socialauth.util.Constants;
import org.brickred.socialauth.util.MethodType;
import org.brickred.socialauth.util.OAuthConfig;
import org.brickred.socialauth.util.Response;
/**
* Provider implementation for SalesForce
*
*
*/
public class SalesForceImpl extends AbstractProvider implements AuthProvider,
Serializable {
private static final long serialVersionUID = 6929330230703360670L;
private static final Map<String, String> ENDPOINTS;
private final Logger LOG = Logger.getLogger(SalesForceImpl.class.getName());
private OAuthConfig config;
private Permission scope;
private AccessGrant accessGrant;
private Profile userProfile;
private String profileURL;
private OAuthStrategyBase authenticationStrategy;
// set this to the list of extended permissions you want
private static final String AllPerms = new String("full");
private static final String AuthPerms = new String("api");
static {
ENDPOINTS = new HashMap<String, String>();
ENDPOINTS.put(Constants.OAUTH_AUTHORIZATION_URL,
"https://login.salesforce.com/services/oauth2/authorize");
ENDPOINTS.put(Constants.OAUTH_ACCESS_TOKEN_URL,
"https://login.salesforce.com/services/oauth2/token");
}
/**
* Stores configuration for the provider
*
* @param providerConfig
* It contains the configuration of application like consumer key
* and consumer secret
* @throws Exception
*/
public SalesForceImpl(final OAuthConfig providerConfig) throws Exception {
config = providerConfig;
if (config.getCustomPermissions() != null) {
scope = Permission.CUSTOM;
}
if (config.getAuthenticationUrl() != null) {
ENDPOINTS.put(Constants.OAUTH_AUTHORIZATION_URL,
config.getAuthenticationUrl());
} else {
config.setAuthenticationUrl(ENDPOINTS
.get(Constants.OAUTH_AUTHORIZATION_URL));
}
if (config.getAccessTokenUrl() != null) {
ENDPOINTS.put(Constants.OAUTH_ACCESS_TOKEN_URL,
config.getAccessTokenUrl());
} else {
config.setAccessTokenUrl(ENDPOINTS
.get(Constants.OAUTH_ACCESS_TOKEN_URL));
}
authenticationStrategy = new OAuth2(config, ENDPOINTS);
authenticationStrategy.setPermission(scope);
authenticationStrategy.setScope(getScope());
}
/**
* Stores access grant for the provider
*
* @param accessGrant
* It contains the access token and other information
* @throws AccessTokenExpireException
*/
@Override
public void setAccessGrant(final AccessGrant accessGrant)
throws AccessTokenExpireException {
this.accessGrant = accessGrant;
authenticationStrategy.setAccessGrant(accessGrant);
}
/**
* This is the most important action. It redirects the browser to an
* appropriate URL which will be used for authentication with the provider
* that has been set using setId()
*
* @throws Exception
*/
@Override
public String getLoginRedirectURL(final String successUrl) throws Exception {
LOG.info("Determining URL for redirection");
if (!successUrl.startsWith("https")) {
throw new SocialAuthException(
"To implement SalesForce provider your web application should run on a secure port. Please use an https URL instead of http.");
}
return authenticationStrategy.getLoginRedirectURL(successUrl);
}
/**
* Verifies the user when the external provider redirects back to our
* application.
*
*
* @param requestParams
* request parameters, received from the provider
* @return Profile object containing the profile information
* @throws Exception
*/
@Override
public Profile verifyResponse(final Map<String, String> requestParams)
throws Exception {
return doVerifyResponse(requestParams);
}
/**
* @param requestParams
* @return
* @throws Exception
*/
private Profile doVerifyResponse(final Map<String, String> requestParams)
throws Exception {
LOG.info("Retrieving Access Token in verify response function");
if (requestParams.get("error") != null
&& "access_denied".equals(requestParams.get("error"))) {
throw new UserDeniedPermissionException();
}
accessGrant = authenticationStrategy.verifyResponse(requestParams,
MethodType.POST.toString());
if (accessGrant != null) {
LOG.fine("Obtaining user profile");
return getProfile();
} else {
throw new SocialAuthException("Access token not found");
}
}
/**
* Gets the list of contacts of the user. this may not be available for all
* providers.
*
* @return List of contact objects representing Contacts. Only name will be
* available
*/
@Override
public List<Contact> getContactList() throws Exception {
LOG.warning("WARNING: Not implemented for SalesForce");
throw new SocialAuthException(
"Retrieving contacts is not implemented for SalesForce");
}
/**
* Updates the status on the chosen provider if available. This may not be
* implemented for all providers.
*
* @param msg
* Message to be shown as user's status
* @throws Exception
*/
@Override
public Response updateStatus(final String msg) throws Exception {
LOG.warning("WARNING: Not implemented for SalesForce");
throw new SocialAuthException(
"Update Status is not implemented for SalesForce");
}
/**
* Logout
*/
@Override
public void logout() {
accessGrant = null;
authenticationStrategy.logout();
}
/**
* @return
* @throws Exception
*/
private Profile getProfile() throws Exception {
if (accessGrant.getAttribute("id") != null) {
profileURL = (String) accessGrant.getAttribute("id");
}
LOG.fine("Profile URL : " + profileURL);
Profile p = new Profile();
Map<String, String> headerParam = new HashMap<String, String>();
headerParam.put("Authorization", "OAuth " + accessGrant.getKey());
headerParam.put("Content-Type", "application/json");
headerParam.put("Accept", "application/json");
Response serviceResponse;
try {
serviceResponse = authenticationStrategy.executeFeed(profileURL,
MethodType.GET.toString(), null, headerParam, null);
// HttpUtil.doHttpRequest(profileURL, "GET", null, headerParam);
} catch (Exception e) {
throw new SocialAuthException(
"Failed to retrieve the user profile from " + profileURL,
e);
}
String result;
try {
result = serviceResponse
.getResponseBodyAsString(Constants.ENCODING);
LOG.fine("User Profile :" + result);
} catch (Exception e) {
throw new SocialAuthException("Failed to read response from "
+ profileURL, e);
}
try {
JsonObject resp = Json.createReader(new StringReader(result)).readObject();
if (resp.containsKey("user_id")) {
p.setValidatedId(resp.getString("user_id"));
}
if (resp.containsKey("first_name")) {
p.setFirstName(resp.getString("first_name"));
}
if (resp.containsKey("last_name")) {
p.setLastName(resp.getString("last_name"));
}
p.setDisplayName(resp.getString("display_name"));
p.setEmail(resp.getString("email"));
String locale = resp.getString("locale");
if (locale != null) {
String a[] = locale.split("_");
p.setLanguage(a[0]);
p.setCountry(a[1]);
}
if (resp.containsKey("photos")) {
JsonObject photosResp = resp.getJsonObject("photos");
if (p.getProfileImageURL() == null
|| p.getProfileImageURL().length() <= 0) {
p.setProfileImageURL(photosResp.getString("thumbnail"));
}
}
serviceResponse.close();
p.setProviderId(getProviderId());
userProfile = p;
return p;
} catch (Exception e) {
throw new SocialAuthException(
"Failed to parse the user profile json : " + result, e);
}
}
/**
*
* @param p
* Permission object which can be Permission.AUHTHENTICATE_ONLY,
* Permission.ALL, Permission.DEFAULT
*/
@Override
public void setPermission(final Permission p) {
LOG.fine("Permission requested : " + p.toString());
this.scope = p;
authenticationStrategy.setPermission(scope);
authenticationStrategy.setScope(getScope());
}
/**
* Makes HTTP request to a given URL.It attaches access token in URL.
*
* @param url
* URL to make HTTP request.
* @param methodType
* Method type can be GET, POST or PUT
* @param params
* @param headerParams
* Parameters need to pass as Header Parameters
* @param body
* Request Body
* @return Response object
* @throws Exception
*/
@Override
public Response api(final String url, final String methodType,
final Map<String, String> params,
final Map<String, String> headerParams, final String body)
throws Exception {
LOG.info("Calling api function for url : " + url);
Response response = null;
try {
response = authenticationStrategy.executeFeed(url, methodType,
params, headerParams, body);
} catch (Exception e) {
throw new SocialAuthException(
"Error while making request to URL : " + url, e);
}
return response;
}
/**
* Retrieves the user profile.
*
* @return Profile object containing the profile information.
*/
@Override
public Profile getUserProfile() throws Exception {
if (userProfile == null && accessGrant != null) {
getProfile();
}
return userProfile;
}
@Override
public AccessGrant getAccessGrant() {
return accessGrant;
}
@Override
public String getProviderId() {
return config.getId();
}
@Override
public Response uploadImage(final String message, final String fileName,
final InputStream inputStream) throws Exception {
LOG.warning("WARNING: Not implemented for SalesForce");
throw new SocialAuthException(
"Upload Image is not implemented for SalesForce");
}
private String getScope() {
String scopeStr = null;
if (Permission.AUTHENTICATE_ONLY.equals(scope)) {
scopeStr = AuthPerms;
} else if (Permission.CUSTOM.equals(scope)) {
scopeStr = config.getCustomPermissions();
} else {
scopeStr = AllPerms;
}
return scopeStr;
}
@Override
protected List<String> getPluginsList() {
List<String> list = new ArrayList<String>();
if (config.getRegisteredPlugins() != null
&& config.getRegisteredPlugins().length > 0) {
list.addAll(Arrays.asList(config.getRegisteredPlugins()));
}
return list;
}
@Override
protected OAuthStrategyBase getOauthStrategy() {
return authenticationStrategy;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ui.components;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.ui.ComboBox;
import com.intellij.ui.*;
import com.intellij.ui.components.fields.ExpandableTextField;
import com.intellij.ui.components.fields.ExtendableTextComponent;
import com.intellij.ui.components.fields.ExtendableTextField;
import com.intellij.ui.components.panels.HorizontalLayout;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.*;
import java.util.Arrays;
import java.util.List;
import java.util.function.Consumer;
public class TestTextFieldAction extends DumbAwareAction {
private JFrame frame;
@Override
public void actionPerformed(@NotNull AnActionEvent event) {
if (frame != null && frame.isVisible()) {
frame.dispose();
frame = null;
}
else {
if (frame == null) {
frame = new JFrame("Test Text Fields");
frame.add(new View());
frame.pack();
frame.setLocationRelativeTo(null);
}
frame.setVisible(true);
frame.toFront();
}
}
private enum Fill {None, Both, Horizontal, Vertical}
private static final class View extends JPanel {
private final JCheckBox columns = new JCheckBox("20 columns");
private final JCheckBox opaque = new JCheckBox("Opaque");
private final JCheckBox gradient = new JCheckBox("Gradient");
private final JComboBox fill = new ComboBox<>(Fill.values());
private final JPanel control = new JPanel(new HorizontalLayout(5));
private final JPanel center = new JPanel(new GridBagLayout()) {
@Override
protected void paintComponent(Graphics g) {
if (g instanceof Graphics2D && gradient.isSelected()) {
Graphics2D g2d = (Graphics2D)g;
Rectangle bounds = new Rectangle(getWidth(), getHeight());
g2d.setPaint(new LinearGradientPaint(
bounds.x, bounds.y, bounds.width, bounds.height, new float[]{0, 1},
new Color[]{JBColor.LIGHT_GRAY, JBColor.DARK_GRAY}));
g2d.fillRect(bounds.x, bounds.y, bounds.width, bounds.height);
}
else {
super.paintComponent(g);
}
}
};
private final List<JTextField> fields = Arrays.asList(
new JBTextField(),
new JBTextField() {{
putClientProperty("JTextField.variant", "search");
}},
new SearchTextField(false).getTextEditor(),
new SearchTextField(true).getTextEditor(),
new ExpandableTextField(),
new ExtendableTextField() {{
setExtensions(
new ExtendableTextComponent.Extension() {
@Override
public Icon getIcon(boolean hovered) {
return AllIcons.General.GearPlain;
}
@Override
public String getTooltip() {
return "Settings";
}
@Override
public boolean isIconBeforeText() {
return true;
}
},
new ExtendableTextComponent.Extension() {
@Override
public Icon getIcon(boolean hovered) {
return hovered ? AllIcons.General.ContextHelp : AllIcons.General.Locate;
}
@Override
public String getTooltip() {
return "Locate";
}
@Override
public boolean isIconBeforeText() {
return true;
}
},
new ExtendableTextComponent.Extension() {
private final Icon icon = new AnimatedIcon.FS();
@Override
public Icon getIcon(boolean hovered) {
return !hovered ? icon : AllIcons.Process.FS.Step_passive;
}
@Override
public String getTooltip() {
return "Refresh";
}
},
new ExtendableTextComponent.Extension() {
private final Icon fading = new AnimatedIcon.Fading(AllIcons.Ide.FatalError);
private final Icon blinking = new AnimatedIcon.Blinking(AllIcons.Ide.FatalError);
@Override
public Icon getIcon(boolean hovered) {
return hovered ? fading : blinking;
}
},
new ExtendableTextComponent.Extension() {
private final TextIcon icon = new TextIcon("empty", null, null, 1);
@Override
public Icon getIcon(boolean hovered) {
if (null == getActionOnClick()) {
icon.setFont(RelativeFont.SMALL.derive(getFont()));
icon.setBackground(getForeground());
icon.setForeground(getBackground());
return icon;
}
return hovered ? AllIcons.Actions.CloseHovered : AllIcons.Actions.Close;
}
@Override
public String getTooltip() {
return "Clear";
}
@Override
public Runnable getActionOnClick() {
return getText().isEmpty() ? null : () -> setText(null);
}
});
}});
private View() {
super(new BorderLayout(10, 10));
setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10));
add(BorderLayout.NORTH, control);
add(BorderLayout.CENTER, center);
control.add(columns);
columns.setSelected(true);
columns.addChangeListener(event -> updateColumns());
updateColumns();
control.add(opaque);
opaque.addChangeListener(event -> updateOpaque());
updateOpaque();
control.add(gradient);
gradient.addChangeListener(event -> updateGradient());
updateGradient();
control.add(new JLabel("Fill:"));
control.add(fill);
fill.addItemListener(event -> updateFill());
updateFill();
}
private void updateColumns() {
int amount = columns.isSelected() ? 20 : 0;
update(field -> field.setColumns(amount));
}
private void updateOpaque() {
boolean state = opaque.isSelected();
update(field -> field.setOpaque(state));
}
private void updateGradient() {
update(field -> {
});
}
private void updateFill() {
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx = 1;
gbc.weightx = 1;
gbc.weighty = 1;
gbc.anchor = GridBagConstraints.CENTER;
gbc.fill = fill.getSelectedIndex();
if (gbc.fill < 0) gbc.fill = 0;
center.removeAll();
update(field -> center.add(field, gbc));
}
private void update(Consumer<? super JTextField> consumer) {
fields.forEach(consumer);
center.revalidate();
center.repaint();
}
}
}
| |
/*
* Copyright (C) 2013-2019 Timothy Baxendale
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
* USA
*/
package org.tbax.baxshops.serialization;
import org.bukkit.OfflinePlayer;
import org.bukkit.inventory.ItemStack;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.tbax.baxshops.BaxEntry;
import org.tbax.baxshops.BaxShop;
import org.tbax.baxshops.Format;
import java.text.ParseException;
import java.util.*;
@SuppressWarnings("unused")
public class SafeMap implements Map<String, Object>
{
private final Map<String, Object> argMap;
public SafeMap(Map<String, Object> map)
{
argMap = map;
}
public SafeMap()
{
argMap = new HashMap<>();
}
public boolean getBoolean(String key)
{
return getBoolean(key, false);
}
public boolean getBoolean(String key, boolean defaultValue)
{
try {
return (boolean) getOrDefault(key, defaultValue);
}
catch (ClassCastException e) {
return defaultValue;
}
}
public Object put(String key, boolean value)
{
return argMap.put(key, value);
}
public int getInteger(String key)
{
return getInteger(key, 0);
}
public int getInteger(String key, int defaultValue)
{
try {
return (int) getOrDefault(key, defaultValue);
}
catch (ClassCastException e) {
return defaultValue;
}
}
public Object put(String key, int value)
{
return argMap.put(key, value);
}
public String getString(String key)
{
return getString(key, "");
}
public String getString(String key, String defaultValue)
{
try {
return (String) getOrDefault(key, defaultValue);
}
catch (ClassCastException e) {
return defaultValue;
}
}
public double getDouble(String key)
{
return getDouble(key, 0);
}
public double getDouble(String key, double defaultValue)
{
try {
return (double) getOrDefault(key, defaultValue);
}
catch (ClassCastException e) {
return defaultValue;
}
}
public Object put(String key, double value)
{
return argMap.put(key, value);
}
public UUID getUUID(String key)
{
return getUUID(key, null);
}
public UUID getUUID(String key, UUID defaultValue)
{
try {
return UUID.fromString(getString(key));
}
catch (IllegalArgumentException e) {
return defaultValue;
}
}
public Object put(String key, UUID value)
{
return argMap.put(key, value == null ? null : value.toString());
}
public Object put(String key, OfflinePlayer value)
{
if (value == null) {
return argMap.put(key, null);
}
else {
return put(key, value.getUniqueId());
}
}
public <E> List<E> getList(String key)
{
return getList(key, new ArrayList<>());
}
@SuppressWarnings("unchecked")
public <E> List<E> getList(String key, List<E> defaultValue)
{
try {
return (List<E>)getOrDefault(key, defaultValue);
}
catch (ClassCastException e) {
return defaultValue;
}
}
public <E> Set<E> getSet(String key)
{
return getSet(key, new HashSet<>());
}
@SuppressWarnings("unchecked")
public <E> Set<E> getSet(String key, Set<E> defaultValue)
{
try {
return new HashSet<>((List<E>) getOrDefault(key, defaultValue));
}
catch (ClassCastException e) {
return defaultValue;
}
}
public <E> Object put(String key, Set<E> value)
{
return argMap.put(key, value == null ? new ArrayList<>() : new ArrayList<>(value));
}
public <E> Deque<E> getDeque(String key)
{
return getDeque(key, new ArrayDeque<>());
}
@SuppressWarnings("unchecked")
public <E> Deque<E> getDeque(String key, Deque<E> defaultValue)
{
try {
return new ArrayDeque<>((List<E>) getOrDefault(key, defaultValue));
}
catch (ClassCastException e) {
return defaultValue;
}
}
public <E> Object put(String key, Deque<E> value)
{
return argMap.put(key, value == null ? new ArrayList<>() : new ArrayList<>(value));
}
public ItemStack getItemStack(String key)
{
return getItemStack(key, null);
}
public ItemStack getItemStack(String key, ItemStack defaultValue)
{
try {
return (ItemStack)getOrDefault(key, defaultValue);
}
catch (ClassCastException e) {
return defaultValue;
}
}
public Object put(String key, ItemStack value)
{
return argMap.put(key, value);
}
public Date getDate(String key)
{
return getDate(key, null);
}
public Date getDate(String key, Date defaultValue)
{
try {
return Format.DATE_FORMAT.parse((String)get(key));
}
catch (ClassCastException | ParseException | NullPointerException e) {
return defaultValue;
}
}
public Object put(String key, Date value)
{
return argMap.put(key, value == null ? null : Format.date(value));
}
@Override
public int size()
{
return argMap.size();
}
@Override
public boolean isEmpty()
{
return argMap.isEmpty();
}
@Override
public boolean containsKey(Object key)
{
return argMap.containsKey(key);
}
@Override
public boolean containsValue(Object value)
{
return argMap.containsValue(value);
}
@Override
public Object get(Object key)
{
return get((String)key, null);
}
public Object get(String key, Object defaultValue)
{
if (argMap.containsKey(key))
return argMap.get(key);
return defaultValue;
}
@Override
public @Nullable Object put(String key, Object value)
{
return argMap.put(key, value);
}
@Override
public Object remove(Object key)
{
return argMap.remove(key);
}
@Override
public void putAll(@NotNull Map<? extends String, ?> m)
{
argMap.putAll(m);
}
@Override
public void clear()
{
argMap.clear();
}
@Override
public @NotNull Set<String> keySet()
{
return argMap.keySet();
}
@Override
public @NotNull Collection<Object> values()
{
return argMap.values();
}
@Override
public @NotNull Set<Entry<String, Object>> entrySet()
{
return argMap.entrySet();
}
public BaxEntry getBaxEntry(String key)
{
return getBaxEntry(key, null);
}
public BaxEntry getBaxEntry(String key, BaxEntry defaultValue)
{
try {
return (BaxEntry)getOrDefault(key, defaultValue);
}
catch (ClassCastException e) {
return defaultValue;
}
}
public BaxShop getBaxShop(String key)
{
return getBaxShop(key, null);
}
public BaxShop getBaxShop(String key, BaxShop defaultValue)
{
try {
return (BaxShop)getOrDefault(key, defaultValue);
}
catch (ClassCastException e) {
return defaultValue;
}
}
public Object put(String key, BaxShop value)
{
return argMap.put(key, value);
}
public long getLong(String key)
{
return getLong(key, 0);
}
public long getLong(String key, long defaultValue)
{
try {
return (long)getOrDefault(key, defaultValue);
}
catch (ClassCastException e) {
try {
return (int)getOrDefault(key, defaultValue);
}
catch (ClassCastException e1) {
return defaultValue;
}
}
}
public Object put(String key, long value)
{
return argMap.put(key, value);
}
}
| |
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.config;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.annotation.PostConstruct;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.config.materials.ScmMaterialConfig;
import com.thoughtworks.go.config.materials.dependency.DependencyMaterialConfig;
import com.thoughtworks.go.config.merge.MergeConfigOrigin;
import com.thoughtworks.go.config.merge.MergeEnvironmentConfig;
import com.thoughtworks.go.config.merge.MergePipelineConfigs;
import com.thoughtworks.go.config.preprocessor.SkipParameterResolution;
import com.thoughtworks.go.config.remote.*;
import com.thoughtworks.go.domain.ConfigErrors;
import com.thoughtworks.go.domain.JobConfigVisitor;
import com.thoughtworks.go.domain.NullTask;
import com.thoughtworks.go.domain.PipelineGroupVisitor;
import com.thoughtworks.go.domain.PipelineGroups;
import com.thoughtworks.go.domain.PiplineConfigVisitor;
import com.thoughtworks.go.domain.Task;
import com.thoughtworks.go.domain.TaskConfigVisitor;
import com.thoughtworks.go.domain.config.Admin;
import com.thoughtworks.go.domain.materials.MaterialConfig;
import com.thoughtworks.go.domain.packagerepository.PackageDefinition;
import com.thoughtworks.go.domain.packagerepository.PackageRepositories;
import com.thoughtworks.go.domain.packagerepository.PackageRepository;
import com.thoughtworks.go.domain.scm.SCM;
import com.thoughtworks.go.domain.scm.SCMs;
import com.thoughtworks.go.security.GoCipher;
import com.thoughtworks.go.util.GoConstants;
import com.thoughtworks.go.util.DFSCycleDetector;
import com.thoughtworks.go.util.Node;
import com.thoughtworks.go.util.Pair;
import static com.thoughtworks.go.util.ExceptionUtils.bomb;
import static com.thoughtworks.go.util.ExceptionUtils.bombIfNull;
/**
* @understands the configuration for cruise
*/
@ConfigTag("cruise")
public class BasicCruiseConfig implements CruiseConfig {
@ConfigSubtag @SkipParameterResolution private ServerConfig serverConfig = new ServerConfig();
@ConfigSubtag @SkipParameterResolution private com.thoughtworks.go.domain.packagerepository.PackageRepositories packageRepositories = new PackageRepositories();
@ConfigSubtag @SkipParameterResolution private SCMs scms = new SCMs();
@ConfigSubtag @SkipParameterResolution private ConfigReposConfig configRepos = new ConfigReposConfig();
@ConfigSubtag(label = "groups") private PipelineGroups groups = new PipelineGroups();
@ConfigSubtag(label = "templates") @SkipParameterResolution private TemplatesConfig templatesConfig = new TemplatesConfig();
@ConfigSubtag @SkipParameterResolution private EnvironmentsConfig environments = new EnvironmentsConfig();
@ConfigSubtag @SkipParameterResolution private Agents agents = new Agents();
private CruiseStrategy strategy;
//This is set reflective by the MagicalGoConfigXmlLoader
private String md5;
private ConfigErrors errors = new ConfigErrors();
private ConcurrentMap<CaseInsensitiveString, PipelineConfig> pipelineNameToConfigMap = new ConcurrentHashMap<CaseInsensitiveString, PipelineConfig>();
private List<PipelineConfig> allPipelineConfigs;
public BasicCruiseConfig() {
strategy = new BasicStrategy();
}
public BasicCruiseConfig(BasicCruiseConfig main, PartialConfig... parts){
List<PartialConfig> partList = Arrays.asList(parts);
createMergedConfig(main, partList);
}
public BasicCruiseConfig(BasicCruiseConfig main,List<PartialConfig> parts)
{
createMergedConfig(main, parts);
}
private void createMergedConfig(BasicCruiseConfig main, List<PartialConfig> partList) {
this.serverConfig = main.serverConfig;
this.packageRepositories = main.packageRepositories;
this.scms = main.scms;
this.templatesConfig = main.templatesConfig;
this.agents = main.agents;
this.configRepos = main.configRepos;
MergeStrategy mergeStrategy = new MergeStrategy(main,partList);
this.strategy = mergeStrategy;
groups = mergeStrategy.mergePipelineConfigs();
environments = mergeStrategy.mergeEnvironmentConfigs();
}
// for tests
public BasicCruiseConfig(PipelineConfigs... groups) {
for (PipelineConfigs pipelineConfigs : groups) {
this.groups.add(pipelineConfigs);
}
strategy = new BasicStrategy();
}
@Override
@PostConstruct
public void initializeServer() {
serverConfig.ensureServerIdExists();
}
private interface CruiseStrategy {
void addEnvironment(BasicEnvironmentConfig config);
void setEnvironments(EnvironmentsConfig environments);
void setGroup(PipelineGroups pipelineGroups);
void makePipelineUseTemplate(CaseInsensitiveString pipelineName, CaseInsensitiveString templateName);
void updateGroup(PipelineConfigs pipelineConfigs, String groupName);
ConfigOrigin getOrigin();
void setOrigins(ConfigOrigin origins);
String getMd5();
CruiseConfig getLocal();
void addPipeline(String groupName, PipelineConfig pipelineConfig);
void addPipelineWithoutValidation(String groupName, PipelineConfig pipelineConfig);
void update(String groupName, String pipelineName, PipelineConfig pipeline);
}
private class BasicStrategy implements CruiseStrategy {
private ConfigOrigin origin;
public BasicStrategy()
{
origin = new FileConfigOrigin();
}
@Override
public void addEnvironment(BasicEnvironmentConfig config) {
environments.add(config);
}
@Override
public void setEnvironments(EnvironmentsConfig environments) {
environments = environments;
}
@Override
public void setGroup(PipelineGroups pipelineGroups) {
groups = pipelineGroups;
}
@Override
public void makePipelineUseTemplate(CaseInsensitiveString pipelineName, CaseInsensitiveString templateName) {
pipelineConfigByName(pipelineName).templatize(templateName);
}
@Override
public void updateGroup(PipelineConfigs pipelineConfigs, String groupName) {
PipelineConfigs old = groups.findGroup(groupName);
int index = groups.indexOf(old);
groups.set(index, pipelineConfigs);
}
@Override
public ConfigOrigin getOrigin() {
return origin;
}
@Override
public void setOrigins(ConfigOrigin origins) {
origin = origins;
for(EnvironmentConfig env : environments)
{
env.setOrigins(origins);
}
for(PipelineConfigs pipes : groups)
{
pipes.setOrigins(origins);
}
}
@Override
public String getMd5() {
return md5;
}
@Override
public CruiseConfig getLocal() {
return BasicCruiseConfig.this;
}
@Override
public void addPipeline(String groupName, PipelineConfig pipelineConfig) {
groups.addPipeline(groupName, pipelineConfig);
}
@Override
public void addPipelineWithoutValidation(String groupName, PipelineConfig pipelineConfig) {
groups.addPipelineWithoutValidation(sanitizedGroupName(groupName), pipelineConfig);
}
@Override
public void update(String groupName, String pipelineName, PipelineConfig pipeline) {
if (groups.isEmpty()) {
PipelineConfigs configs = new BasicPipelineConfigs();
configs.add(pipeline);
groups.add(configs);
}
groups.update(groupName, pipelineName, pipeline);
}
}
private class MergeStrategy implements CruiseStrategy {
/*
Skip validating main configuration when merged. For 2 reasons:
- partial configurations may not be valid by themselves
- to not duplicate errors copied in final cruise config (main has references
to the same instances that are part of merged config - see the constructor)
Main configuration is still validated within its own scope, explicitly, at the right moment,
But that is done higher in services.
*/
@IgnoreTraversal private BasicCruiseConfig main;
private List<PartialConfig> parts = new ArrayList<PartialConfig>();
public MergeStrategy(BasicCruiseConfig main,List<PartialConfig> parts) {
this.main = main;
this.parts.addAll(parts);
}
private EnvironmentsConfig mergeEnvironmentConfigs() {
EnvironmentsConfig environments = new EnvironmentsConfig();
//first add environment configs from main
List<EnvironmentConfig> allEnvConfigs = new ArrayList<EnvironmentConfig>();
for(EnvironmentConfig envConfig : this.main.getEnvironments())
{
allEnvConfigs.add(envConfig);
}
// then add from each part
for (PartialConfig part : this.parts) {
for(EnvironmentConfig partPipesConf : part.getEnvironments())
{
allEnvConfigs.add(partPipesConf);
}
}
// lets group them by environment name
Map<CaseInsensitiveString, List<EnvironmentConfig>> map = new HashMap<CaseInsensitiveString, List<EnvironmentConfig>>();
for(EnvironmentConfig env : allEnvConfigs)
{
CaseInsensitiveString key = env.name();
if (map.get(key) == null) {
map.put(key, new ArrayList<EnvironmentConfig>());
}
map.get(key).add(env);
}
for(List<EnvironmentConfig> oneEnv : map.values())
{
if(oneEnv.size() == 1)
environments.add(oneEnv.get(0));
else
environments.add(new MergeEnvironmentConfig(oneEnv));
}
return environments;
}
private PipelineGroups mergePipelineConfigs() {
PipelineGroups groups = new PipelineGroups();
// first add pipeline configs from main part
List<PipelineConfigs> allPipelineConfigs = new ArrayList<PipelineConfigs>();
for(PipelineConfigs partPipesConf : this.main.getGroups())
{
allPipelineConfigs.add(partPipesConf);
}
// then add from each part
for (PartialConfig part : this.parts) {
for(PipelineConfigs partPipesConf : part.getGroups())
{
allPipelineConfigs.add(partPipesConf);
}
}
//there may be duplicated names and conflicts in general in the PipelineConfigs
// lets group them by 'pipeline group' name
Map<String, List<PipelineConfigs>> map = new HashMap<String, List<PipelineConfigs>>();
for (PipelineConfigs pipes : allPipelineConfigs) {
String key = pipes.getGroup();
if (map.get(key) == null) {
map.put(key, new ArrayList<PipelineConfigs>());
}
map.get(key).add(pipes);
}
for(List<PipelineConfigs> oneGroup : map.values())
{
if(oneGroup.size() == 1)
groups.add(oneGroup.get(0));
else
groups.add(new MergePipelineConfigs(oneGroup));
}
return groups;
}
@Override
public void addEnvironment(BasicEnvironmentConfig config) {
//validate at global scope
environments.validateNotADuplicate(config);
// but append to main config
main.addEnvironment(config);
//TODO add rather than reconstruct
environments = mergeEnvironmentConfigs();
}
@Override
public void setEnvironments(EnvironmentsConfig environments) {
// this was called only from tests
throw bomb("Cannot set environments in merged configuration");
}
@Override
public void setGroup(PipelineGroups pipelineGroups) {
// this was called only from tests
throw bomb("Cannot set groups in merged configuration");
}
@Override
public void makePipelineUseTemplate(CaseInsensitiveString pipelineName, CaseInsensitiveString templateName) {
PipelineConfig config = pipelineConfigByName(pipelineName);
if(!config.isLocal())
throw bomb("Cannot extract template from remote pipeline");
this.main.makePipelineUseTemplate(pipelineName,templateName);
}
@Override
public void updateGroup(PipelineConfigs pipelineConfigs, String groupName) {
// this was called only from tests
throw bomb("Cannot set group in merged configuration");
}
@Override
public ConfigOrigin getOrigin() {
MergeConfigOrigin origins = new MergeConfigOrigin(this.main.getOrigin());
for(PartialConfig part : this.parts)
{
origins.add(part.getOrigin());
}
return origins;
}
@Override
public void setOrigins(ConfigOrigin origins) {
throw bomb("Cannot set origins on merged config");
}
@Override
public String getMd5() {
return this.main.getMd5();
}
@Override
public CruiseConfig getLocal() {
return this.main;
}
@Override
public void addPipeline(String groupName, PipelineConfig pipelineConfig) {
// validate at global level
this.verifyUniqueNameInParts(pipelineConfig);
this.main.addPipeline(groupName,pipelineConfig);
//TODO add rather than reconstruct
groups = this.mergePipelineConfigs();
}
@Override
public void addPipelineWithoutValidation(String groupName, PipelineConfig pipelineConfig) {
this.verifyUniqueNameInParts(pipelineConfig);
this.main.addPipelineWithoutValidation(groupName,pipelineConfig);
//TODO add rather than reconstruct
groups = this.mergePipelineConfigs();
}
private void verifyUniqueNameInParts(PipelineConfig pipelineConfig) {
for(PartialConfig part : this.parts)
{
for(PipelineConfigs partGroup : part.getGroups())
{
if(partGroup.hasPipeline(pipelineConfig.name())){
throw bomb("Pipeline called '" + pipelineConfig.name() +
"' is already defined in configuration repository " +
part.getOrigin().displayName());
}
}
}
}
@Override
public void update(String groupName, String pipelineName, PipelineConfig pipeline) {
// this was called only from tests
throw bomb("Cannot update pipeline group in merged configuration");
}
}
@Override
public void validate(ValidationContext validationContext) {
areThereCyclicDependencies();
}
@Override
public Hashtable<CaseInsensitiveString, Node> getDependencyTable() {
final Hashtable<CaseInsensitiveString, Node> hashtable = new Hashtable<CaseInsensitiveString, Node>();
this.accept(new PiplineConfigVisitor() {
public void visit(PipelineConfig pipelineConfig) {
hashtable.put(pipelineConfig.name(), pipelineConfig.getDependenciesAsNode());
}
});
return hashtable;
}
private void areThereCyclicDependencies() {
final DFSCycleDetector dfsCycleDetector = new DFSCycleDetector();
final Hashtable<CaseInsensitiveString, Node> dependencyTable = getDependencyTable();
List<PipelineConfig> pipelineConfigs = this.getAllPipelineConfigs();
for (PipelineConfig pipelineConfig : pipelineConfigs) {
try {
dfsCycleDetector.topoSort(pipelineConfig.name(), dependencyTable);
} catch (Exception e) {
addToErrorsBaseOnMaterialsIfDoesNotExist(e.getMessage(), pipelineConfig.materialConfigs(), pipelineConfigs);
}
}
}
private void addToErrorsBaseOnMaterialsIfDoesNotExist(String errorMessage, MaterialConfigs materialConfigs, List<PipelineConfig> pipelineConfigs) {
for (PipelineConfig config : pipelineConfigs) {
if (config.materialConfigs().errors().getAll().contains(errorMessage)) {
return;
}
}
materialConfigs.addError("base", errorMessage);
}
@Override
public ConfigErrors errors() {
return errors;
}
@Override
public void addError(String fieldName, String message) {
errors.add(fieldName, message);
}
@Override
public StageConfig stageConfigByName(final CaseInsensitiveString pipelineName, final CaseInsensitiveString stageName) {
StageConfig stageConfig = pipelineConfigByName(pipelineName).findBy(stageName);
StageNotFoundException.bombIfNull(stageConfig, pipelineName, stageName);
return stageConfig;
}
@Override
public JobConfig findJob(String pipelineName, String stageName, String jobName) {
return pipelineConfigByName(new CaseInsensitiveString(pipelineName))
.findBy(new CaseInsensitiveString(stageName))
.jobConfigByConfigName(new CaseInsensitiveString(jobName));
}
@Override
public PipelineConfig pipelineConfigByName(final CaseInsensitiveString name) {
if (pipelineNameToConfigMap.containsKey(name)) {
return pipelineNameToConfigMap.get(name);
}
PipelineConfig pipelineConfig = getPipelineConfigByName(name);
if (pipelineConfig == null) {
throw new PipelineNotFoundException("Pipeline '" + name + "' not found.");
}
pipelineNameToConfigMap.putIfAbsent(pipelineConfig.name(), pipelineConfig);
return pipelineConfig;
}
@Override
public boolean hasStageConfigNamed(final CaseInsensitiveString pipelineName, final CaseInsensitiveString stageName, boolean ignoreCase) {
PipelineConfig pipelineConfig = getPipelineConfigByName(pipelineName);
if (pipelineConfig == null) {
return false;
}
return pipelineConfig.findBy(stageName) != null;
}
@Override
public PipelineConfig getPipelineConfigByName(CaseInsensitiveString pipelineName) {
return pipelinesFromAllGroups().findBy(pipelineName);
}
@Override
public boolean hasPipelineNamed(final CaseInsensitiveString pipelineName) {
PipelineConfig pipelineConfig = getPipelineConfigByName(pipelineName);
return pipelineConfig != null;
}
@Override
public boolean hasNextStage(final CaseInsensitiveString pipelineName, final CaseInsensitiveString lastStageName) {
PipelineConfig pipelineConfig = getPipelineConfigByName(pipelineName);
if (pipelineConfig == null) {
return false;
}
return pipelineConfig.nextStage(lastStageName) != null;
}
@Override
public boolean hasPreviousStage(final CaseInsensitiveString pipelineName, final CaseInsensitiveString stageName) {
PipelineConfig pipelineConfig = getPipelineConfigByName(pipelineName);
if (pipelineConfig == null) {
return false;
}
return pipelineConfig.previousStage(stageName) != null;
}
@Override
public StageConfig nextStage(final CaseInsensitiveString pipelineName, final CaseInsensitiveString lastStageName) {
StageConfig stageConfig = pipelineConfigByName(pipelineName).nextStage(lastStageName);
bombIfNull(stageConfig, "Build stage after '" + lastStageName + "' not found.");
return stageConfig;
}
@Override
public StageConfig previousStage(final CaseInsensitiveString pipelineName, final CaseInsensitiveString lastStageName) {
StageConfig stageConfig = pipelineConfigByName(pipelineName).previousStage(lastStageName);
bombIfNull(stageConfig, "Build stage after '" + lastStageName + "' not found.");
return stageConfig;
}
@Override
public JobConfig jobConfigByName(String pipelineName, String stageName, String jobInstanceName, boolean ignoreCase) {
JobConfig jobConfig = stageConfigByName(new CaseInsensitiveString(pipelineName), new CaseInsensitiveString(stageName)).jobConfigByInstanceName(jobInstanceName,
ignoreCase);
bombIfNull(jobConfig,
String.format("Job [%s] is not found in pipeline [%s] stage [%s].", jobInstanceName,
pipelineName, stageName));
return jobConfig;
}
@Override
public Agents agents() {
return agents;
}
@Override
public ServerConfig server() {
return serverConfig;
}
@Override
public MailHost mailHost() {
return serverConfig.mailHost();
}
@Override
public EnvironmentsConfig getEnvironments() {
return environments;
}
private PipelineConfigs pipelinesFromAllGroups() {
//#2388 - hack to flatten all pipelines. We need a "pipelineGroup" model
return new BasicPipelineConfigs(allPipelines().toArray(new PipelineConfig[0]));
}
@Override
public Map<String, List<Authorization.PrivilegeType>> groupsAffectedByDeletionOfRole(final String roleName) {
Map<String, List<Authorization.PrivilegeType>> result = new HashMap<String, List<Authorization.PrivilegeType>>();
for (PipelineConfigs group : groups) {
final List<Authorization.PrivilegeType> privileges = group.getAuthorization().privilagesOfRole(new CaseInsensitiveString(roleName));
if (privileges.size() > 0) {
result.put(group.getGroup(), privileges);
}
}
return result;
}
@Override
public Set<Pair<PipelineConfig, StageConfig>> stagesWithPermissionForRole(final String roleName) {
Set<Pair<PipelineConfig, StageConfig>> result = new HashSet<Pair<PipelineConfig, StageConfig>>();
for (PipelineConfig pipelineConfig : allPipelines()) {
result.addAll(pipelineConfig.stagesWithPermissionForRole(new CaseInsensitiveString(roleName)));
}
return result;
}
@Override
public void removeRole(Role roleToDelete) {
if (doesAdminConfigContainRole(roleToDelete.getName().toString())) {
server().security().adminsConfig().removeRole(roleToDelete);
}
for (PipelineConfigs group : this.getGroups()) {
group.cleanupAllUsagesOfRole(roleToDelete);
}
server().security().deleteRole(roleToDelete);
}
@Override
public boolean doesAdminConfigContainRole(String roleToDelete) {
SecurityConfig security = server().security();
Role role = security.roleNamed(roleToDelete);
if (role == null) {
return false;
}
return security.adminsConfig().isAdminRole(Arrays.asList(role));
}
@Override
public List<PipelineConfig> allPipelines() {
List<PipelineConfig> configs = new ArrayList<PipelineConfig>();
for (PipelineConfigs group : groups) {
for (PipelineConfig pipeline : group) {
configs.add(pipeline);
}
}
return configs;
}
@Override
public PipelineConfigs pipelines(String groupName) {
PipelineGroups pipelineGroups = this.getGroups();
for (PipelineConfigs pipelineGroup : pipelineGroups) {
if (pipelineGroup.isNamed(groupName)) {
return pipelineGroup;
}
}
throw new RuntimeException("");
}
// TODO - #2491 - rename jobConfig to job
@Override
public boolean hasBuildPlan(final CaseInsensitiveString pipelineName, final CaseInsensitiveString stageName, String buildName, boolean ignoreCase) {
if (!hasStageConfigNamed(pipelineName, stageName, ignoreCase)) {
return false;
}
StageConfig stageConfig = stageConfigByName(pipelineName, stageName);
return stageConfig != null && stageConfig.jobConfigByInstanceName(buildName, ignoreCase) != null;
}
@Override
public int schemaVersion() {
return GoConstants.CONFIG_SCHEMA_VERSION;
}
@Override
public CruiseConfig getLocal() {
return strategy.getLocal();
}
@Override
public ConfigReposConfig getConfigRepos() {
return configRepos;
}
@Override
public void setConfigRepos(ConfigReposConfig repos) {
configRepos = repos;
}
@Override
public boolean requiresApproval(final CaseInsensitiveString pipelineName, final CaseInsensitiveString stageName) {
PipelineConfig pipelineConfig = getPipelineConfigByName(pipelineName);
if (pipelineConfig == null) {
return false;
}
final StageConfig stageConfig = pipelineConfig.findBy(stageName);
return stageConfig != null && stageConfig.requiresApproval();
}
@Override
public void accept(JobConfigVisitor visitor) {
for (PipelineConfig pipelineConfig : pipelinesFromAllGroups()) {
for (StageConfig stageConfig : pipelineConfig) {
for (JobConfig jobConfig : stageConfig.allBuildPlans()) {
visitor.visit(pipelineConfig, stageConfig, jobConfig);
}
}
}
}
@Override
public void accept(TaskConfigVisitor visitor) {
for (PipelineConfig pipelineConfig : pipelinesFromAllGroups()) {
for (StageConfig stageConfig : pipelineConfig) {
for (JobConfig jobConfig : stageConfig.allBuildPlans()) {
for (Task task : jobConfig.tasks()) {
if (!(task instanceof NullTask)) {
visitor.visit(pipelineConfig, stageConfig, jobConfig, task);
}
}
}
}
}
}
@Override
public void accept(final PiplineConfigVisitor visitor) {
accept(new PipelineGroupVisitor() {
public void visit(PipelineConfigs group) {
group.accept(visitor);
}
});
}
@Override
public void setGroup(PipelineGroups pipelineGroups) {
this.strategy.setGroup(pipelineGroups);
}
@Override
public PipelineGroups getGroups() {
return groups;
}
// when adding pipelines, groups or environments we must make sure that both merged and basic scopes are updated
@Override
public void addPipeline(String groupName, PipelineConfig pipelineConfig) {
this.strategy.addPipeline(groupName, pipelineConfig);
}
@Override
public void addPipelineWithoutValidation(String groupName, PipelineConfig pipelineConfig) {
this.strategy.addPipelineWithoutValidation(groupName, pipelineConfig);
}
@Override
public void update(String groupName, String pipelineName, PipelineConfig pipeline) {
this.strategy.update(groupName,pipelineName,pipeline);
}
@Override
public boolean exist(int pipelineIndex) {
return pipelineIndex < pipelinesFromAllGroups().size();
}
@Override
public boolean hasPipeline() {
return pipelinesFromAllGroups().isEmpty();
}
@Override
public PipelineConfig find(String groupName, int pipelineIndex) {
return groups.findPipeline(groupName, pipelineIndex);
}
//only for test
@Override
public int numberOfPipelines() {
return pipelinesFromAllGroups().size();
}
@Override
public int numbersOfPipeline(String groupName) {
return pipelines(groupName).size();
}
@Override
public void groups(List<String> allGroup) {
for (PipelineConfigs group : groups) {
group.add(allGroup);
}
}
@Override
public boolean exist(String groupName, String pipelineName) {
PipelineConfigs configs = groups.findGroup(groupName);
PipelineConfig pipelineConfig = configs.findBy(new CaseInsensitiveString(pipelineName));
return pipelineConfig != null;
}
@Override
public List<Task> tasksForJob(String pipelineName, String stageName, String jobName) {
return jobConfigByName(pipelineName, stageName, jobName, true).tasks();
}
@Override
public boolean isSmtpEnabled() {
MailHost mailHost = server().mailHost();
return mailHost != null && !mailHost.equals(new MailHost(new GoCipher()));
}
@Override
public boolean isInFirstGroup(final CaseInsensitiveString pipelineName) {
if (groups.isEmpty()) {
throw new IllegalStateException("No pipeline group defined yet!");
}
return groups.first().hasPipeline(pipelineName);
}
@Override
public boolean hasMultiplePipelineGroups() {
return groups.size() > 1;
}
@Override
public void accept(PipelineGroupVisitor visitor) {
groups.accept(visitor);
}
@Override
public boolean isSecurityEnabled() {
return server().isSecurityEnabled();
}
@Override
public void setServerConfig(ServerConfig serverConfig) {
this.serverConfig = serverConfig;
}
@Override
public String adminEmail() {
return server().mailHost().getAdminMail();
}
@Override
public boolean hasPipelineGroup(String groupName) {
return groups.hasGroup(groupName);
}
@Override
public PipelineConfigs findGroup(String groupName) {
return groups.findGroup(groupName);
}
@Override
public void updateGroup(PipelineConfigs pipelineConfigs, String groupName) {
this.strategy.updateGroup(pipelineConfigs, groupName);
}
@Override
public boolean isMailHostConfigured() {
return !new MailHost(new GoCipher()).equals(mailHost());
}
@Override
public List<PipelineConfig> getAllPipelineConfigs() {
if (allPipelineConfigs == null) {
List<PipelineConfig> configs = new ArrayList<PipelineConfig>();
PipelineGroups groups = getGroups();
for (PipelineConfigs group : groups) {
for(PipelineConfig pipelineConfig : group)
{
configs.add(pipelineConfig);
}
}
allPipelineConfigs = configs;
}
return allPipelineConfigs;
}
@Override
public List<CaseInsensitiveString> getAllPipelineNames() {
List<CaseInsensitiveString> names = new ArrayList<CaseInsensitiveString>();
for (PipelineConfig config : getAllPipelineConfigs()) {
names.add(config.name());
}
return names;
}
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BasicCruiseConfig config = (BasicCruiseConfig) o;
if (agents != null ? !agents.equals(config.agents) : config.agents != null) {
return false;
}
if (groups != null ? !groups.equals(config.groups) : config.groups != null) {
return false;
}
if (serverConfig != null ? !serverConfig.equals(config.serverConfig) : config.serverConfig != null) {
return false;
}
if (environments != null ? !environments.equals(config.environments) : config.environments != null) {
return false;
}
if (templatesConfig != null ? !templatesConfig.equals(config.templatesConfig) : config.templatesConfig != null) {
return false;
}
return true;
}
public int hashCode() {
int result;
result = (serverConfig != null ? serverConfig.hashCode() : 0);
result = 31 * result + (groups != null ? groups.hashCode() : 0);
result = 31 * result + (agents != null ? agents.hashCode() : 0);
result = 31 * result + (environments != null ? environments.hashCode() : 0);
result = 31 * result + (templatesConfig != null ? templatesConfig.hashCode() : 0);
return result;
}
@Override
public boolean isAdministrator(String username) {
return hasAdminPrivileges(new AdminUser(new CaseInsensitiveString(username)));
}
@Override
public boolean doesRoleHaveAdminPrivileges(String rolename) {
return hasAdminPrivileges(new AdminRole(new CaseInsensitiveString(rolename)));
}
private boolean hasAdminPrivileges(Admin admin) {
return server().security().isAdmin(admin);
}
// For tests
@Override
public void setEnvironments(EnvironmentsConfig environments) {
this.strategy.setEnvironments(environments);
}
@Override
public Set<MaterialConfig> getAllUniqueMaterialsBelongingToAutoPipelines() {
return getUniqueMaterials(true,true);
}
@Override
public Set<MaterialConfig> getAllUniqueMaterialsBelongingToAutoPipelinesAndConfigRepos() {
return getUniqueMaterials(true,false);
}
@Override
public Set<MaterialConfig> getAllUniqueMaterials() {
return getUniqueMaterials(false,true);
}
private Set<MaterialConfig> getUniqueMaterials(boolean ignoreManualPipelines,boolean ignoreConfigRepos) {
Set<MaterialConfig> materialConfigs = new HashSet<MaterialConfig>();
Set<Map> uniqueMaterials = new HashSet<Map>();
for (PipelineConfig pipelineConfig : pipelinesFromAllGroups()) {
for (MaterialConfig materialConfig : pipelineConfig.materialConfigs()) {
if (!uniqueMaterials.contains(materialConfig.getSqlCriteria())) {
boolean shouldSkipPolling = !materialConfig.isAutoUpdate();
boolean scmOrPackageMaterial = !(materialConfig instanceof DependencyMaterialConfig);
if (ignoreManualPipelines && scmOrPackageMaterial && shouldSkipPolling) {
continue;
}
materialConfigs.add(materialConfig);
uniqueMaterials.add(materialConfig.getSqlCriteria());
}
}
}
if(!ignoreConfigRepos)
{
for(ConfigRepoConfig configRepo : this.configRepos)
{
MaterialConfig materialConfig = configRepo.getMaterialConfig();
if (!uniqueMaterials.contains(materialConfig.getSqlCriteria())) {
materialConfigs.add(materialConfig);
uniqueMaterials.add(materialConfig.getSqlCriteria());
}
}
}
return materialConfigs;
}
private Set<MaterialConfig> getUniqueMaterialConfigs(boolean ignoreManualPipelines) {
Set<MaterialConfig> materialConfigs = new HashSet<MaterialConfig>();
Set<Map> uniqueMaterials = new HashSet<Map>();
for (PipelineConfig pipelineConfig : pipelinesFromAllGroups()) {
for (MaterialConfig materialConfig : pipelineConfig.materialConfigs()) {
if (!uniqueMaterials.contains(materialConfig.getSqlCriteria())) {
if (ignoreManualPipelines && !materialConfig.isAutoUpdate() && materialConfig instanceof ScmMaterialConfig) {
continue;
}
materialConfigs.add(materialConfig);
uniqueMaterials.add(materialConfig.getSqlCriteria());
}
}
}
return materialConfigs;
}
@Override
public Set<StageConfig> getStagesUsedAsMaterials(PipelineConfig pipelineConfig) {
Set<String> stagesUsedAsMaterials = new HashSet<String>();
for (MaterialConfig materialConfig : getAllUniqueMaterials()) {
if (materialConfig instanceof DependencyMaterialConfig) {
DependencyMaterialConfig dep = (DependencyMaterialConfig) materialConfig;
stagesUsedAsMaterials.add(dep.getPipelineName() + "|" + dep.getStageName());
}
}
Set<StageConfig> stages = new HashSet<StageConfig>();
for (StageConfig stage : pipelineConfig) {
if (stagesUsedAsMaterials.contains(pipelineConfig.name() + "|" + stage.name())) {
stages.add(stage);
}
}
return stages;
}
@Override
public EnvironmentConfig addEnvironment(String environmentName) {
BasicEnvironmentConfig environmentConfig = new BasicEnvironmentConfig(new CaseInsensitiveString(environmentName));
this.addEnvironment(environmentConfig);
return environmentConfig;
}
@Override
public void addEnvironment(BasicEnvironmentConfig config) {
this.strategy.addEnvironment(config);
}
@Override
public Boolean isPipelineLocked(String pipelineName) {
PipelineConfig pipelineConfig = pipelineConfigByName(new CaseInsensitiveString(pipelineName));
if (pipelineConfig.hasExplicitLock()) {
return pipelineConfig.explicitLock();
}
return false;
}
@Override
public Set<Resource> getAllResources() {
final HashSet<Resource> resources = new HashSet<Resource>();
accept(new JobConfigVisitor() {
public void visit(PipelineConfig pipelineConfig, StageConfig stageConfig, JobConfig jobConfig) {
resources.addAll(jobConfig.resources());
}
});
for (AgentConfig agent : agents) {
resources.addAll(agent.getResources());
}
return resources;
}
@Override
public TemplatesConfig getTemplates() {
return templatesConfig;
}
@Override
public PipelineTemplateConfig findTemplate(CaseInsensitiveString templateName) {
for (PipelineTemplateConfig config : templatesConfig) {
if (templateName.equals(config.name())) {
return config;
}
}
return null;
}
@Override
public void addTemplate(PipelineTemplateConfig pipelineTemplate) {
templatesConfig.add(pipelineTemplate);
}
@Override
public PipelineTemplateConfig getTemplateByName(CaseInsensitiveString pipeline) {
PipelineTemplateConfig template = getTemplates().templateByName(pipeline);
if (template == null) {
throw bomb(String.format("Template %s was not found.", pipeline));
}
return template;
}
@Override
public void setTemplates(TemplatesConfig templates) {
this.templatesConfig = templates;
}
@Override
public void makePipelineUseTemplate(CaseInsensitiveString pipelineName, CaseInsensitiveString templateName) {
this.strategy.makePipelineUseTemplate(pipelineName, templateName);
}
@Override
public Iterable<PipelineConfig> getDownstreamPipelines(String pipelineName) {
ArrayList<PipelineConfig> configs = new ArrayList<PipelineConfig>();
for (PipelineConfig pipelineConfig : pipelinesFromAllGroups()) {
if (pipelineConfig.dependsOn(new CaseInsensitiveString(pipelineName))) {
configs.add(pipelineConfig);
}
}
return configs;
}
@Override
public boolean hasVariableInScope(String pipelineName, String variableName) {
EnvironmentConfig environmentConfig = environments.findEnvironmentForPipeline(new CaseInsensitiveString(pipelineName));
if (environmentConfig != null) {
if (environmentConfig.hasVariable(variableName)) {
return true;
}
}
return pipelineConfigByName(new CaseInsensitiveString(pipelineName)).hasVariableInScope(variableName);
}
@Override
public EnvironmentVariablesConfig variablesFor(String pipelineName) {
EnvironmentVariablesConfig pipelineVariables = pipelineConfigByName(new CaseInsensitiveString(pipelineName)).getVariables();
EnvironmentConfig environment = this.environments.findEnvironmentForPipeline(new CaseInsensitiveString(pipelineName));
return environment != null ? environment.getVariables().overrideWith(pipelineVariables) : pipelineVariables;
}
@Override
public boolean isGroupAdministrator(final CaseInsensitiveString userName) {
final List<Role> roles = server().security().memberRoleFor(userName);
FindPipelineGroupAdminstrator finder = new FindPipelineGroupAdminstrator(userName, roles);
groups.accept(finder);
return finder.isGroupAdmin;
}
@Override
public String getMd5() {
return this.strategy.getMd5();
}
@Override
public List<ConfigErrors> getAllErrors() {
return getAllErrors(this);
}
private List<ConfigErrors> getAllErrors(Validatable v) {
final List<ConfigErrors> allErrors = new ArrayList<ConfigErrors>();
new GoConfigGraphWalker(v).walk(new ErrorCollectingHandler(allErrors) {
@Override
public void handleValidation(Validatable validatable, ValidationContext context) {
// do nothing here
}
});
return allErrors;
}
@Override
public List<ConfigErrors> getAllErrorsExceptFor(Validatable skipValidatable) {
List<ConfigErrors> all = getAllErrors();
if (skipValidatable != null) {
all.removeAll(getAllErrors(skipValidatable));
}
return all;
}
@Override
public List<ConfigErrors> validateAfterPreprocess() {
final List<ConfigErrors> allErrors = new ArrayList<ConfigErrors>();
new GoConfigGraphWalker(this).walk(new ErrorCollectingHandler(allErrors) {
@Override
public void handleValidation(Validatable validatable, ValidationContext context) {
validatable.validate(context);
}
});
return allErrors;
}
@Override
public void copyErrorsTo(CruiseConfig to) {
copyErrors(this, to);
}
public static void copyErrors(Object from, Object to) {
GoConfigParallelGraphWalker walker = new GoConfigParallelGraphWalker(from, to);
walker.walk(new GoConfigParallelGraphWalker.Handler() {
public void handle(Validatable rawObject, Validatable objectWithErrors) {
rawObject.errors().addAll(objectWithErrors.errors());
}
});
}
@Override
public PipelineConfigs findGroupOfPipeline(PipelineConfig pipelineConfig) {
String groupName = getGroups().findGroupNameByPipeline(pipelineConfig.name());
return findGroup(groupName);
}
@Override
public PipelineConfig findPipelineUsingThisPipelineAsADependency(String pipelineName) {
List<PipelineConfig> configs = getAllPipelineConfigs();
for (PipelineConfig config : configs) {
DependencyMaterialConfig materialConfig = config.materialConfigs().findDependencyMaterial(new CaseInsensitiveString(pipelineName));
if (materialConfig != null) {
return config;
}
}
return null;
}
@Override
public Map<String, List<PipelineConfig>> generatePipelineVsDownstreamMap() {
List<PipelineConfig> pipelineConfigs = getAllPipelineConfigs();
Map<String, List<PipelineConfig>> result = new HashMap<String, List<PipelineConfig>>();
for (PipelineConfig currentPipeline : pipelineConfigs) {
String currentPipelineName = currentPipeline.name().toString();
if (!result.containsKey(currentPipelineName)) {
result.put(currentPipelineName, new ArrayList<PipelineConfig>());
}
for (MaterialConfig materialConfig : currentPipeline.materialConfigs()) {
if (materialConfig instanceof DependencyMaterialConfig) {
String pipelineWhichTriggersMe = ((DependencyMaterialConfig) materialConfig).getPipelineName().toString();
if (!result.containsKey(pipelineWhichTriggersMe)) {
result.put(pipelineWhichTriggersMe, new ArrayList<PipelineConfig>());
}
result.get(pipelineWhichTriggersMe).add(currentPipeline);
}
}
}
return result;
}
@Override
public List<PipelineConfig> pipelinesForFetchArtifacts(String pipelineName) {
PipelineConfig currentPipeline = pipelineConfigByName(new CaseInsensitiveString(pipelineName));
List<PipelineConfig> pipelinesForFetchArtifact = currentPipeline.allFirstLevelUpstreamPipelines(this);
pipelinesForFetchArtifact.add(currentPipeline);
return pipelinesForFetchArtifact;
}
@Override
public Map<CaseInsensitiveString, List<CaseInsensitiveString>> templatesWithPipelinesForUser(String username) {
HashMap<CaseInsensitiveString, List<CaseInsensitiveString>> templateToPipelines = new HashMap<CaseInsensitiveString, List<CaseInsensitiveString>>();
for (PipelineTemplateConfig template : getTemplates()) {
if (isAdministrator(username) || template.getAuthorization().getAdminsConfig().isAdmin(new AdminUser(new CaseInsensitiveString(username)), null)) {
templateToPipelines.put(template.name(), new ArrayList<CaseInsensitiveString>());
}
}
for (PipelineConfig pipelineConfig : getAllPipelineConfigs()) {
CaseInsensitiveString name = pipelineConfig.getTemplateName();
if (pipelineConfig.hasTemplate() && templateToPipelines.containsKey(name)) {
templateToPipelines.get(name).add(pipelineConfig.name());
}
}
return templateToPipelines;
}
@Override
public boolean isArtifactCleanupProhibited(String pipelineName, String stageName) {
if (!hasStageConfigNamed(new CaseInsensitiveString(pipelineName), new CaseInsensitiveString(stageName), true)) {
return false;
}
StageConfig stageConfig = stageConfigByName(new CaseInsensitiveString(pipelineName), new CaseInsensitiveString(stageName));
return stageConfig.isArtifactCleanupProhibited();
}
@Override
public MaterialConfig materialConfigFor(String fingerprint) {
for (MaterialConfig materialConfig : getUniqueMaterialConfigs(false)) {
if (materialConfig.getFingerprint().equals(fingerprint)) {
return materialConfig;
}
}
return null;
}
@Override
public String sanitizedGroupName(String name) {
return BasicPipelineConfigs.sanitizedGroupName(name);
}
@Override
public void removePackageRepository(String id) {
packageRepositories.removePackageRepository(id);
}
@Override
public PackageRepositories getPackageRepositories() {
return packageRepositories;
}
@Override
public void savePackageRepository(final PackageRepository packageRepository) {
packageRepository.clearEmptyConfigurations();
if (packageRepository.isNew()) {
packageRepository.setId(UUID.randomUUID().toString());
packageRepositories.add(packageRepository);
} else {
PackageRepository existingPackageRepository = packageRepositories.find(packageRepository.getRepoId());
existingPackageRepository.setName(packageRepository.getName());
existingPackageRepository.setPluginConfiguration(packageRepository.getPluginConfiguration());
existingPackageRepository.setConfiguration(packageRepository.getConfiguration());
}
}
@Override
public void savePackageDefinition(PackageDefinition packageDefinition) {
packageDefinition.clearEmptyConfigurations();
PackageRepository packageRepository = packageRepositories.find(packageDefinition.getRepository().getId());
packageDefinition.setId(UUID.randomUUID().toString());
packageRepository.addPackage(packageDefinition);
}
@Override
public void setPackageRepositories(PackageRepositories packageRepositories) {
this.packageRepositories = packageRepositories;
}
@Override
public SCMs getSCMs() {
return scms;
}
@Override
public void setSCMs(SCMs scms) {
this.scms = scms;
}
@Override
public boolean canDeletePackageRepository(PackageRepository repository) {
return groups.canDeletePackageRepository(repository);
}
@Override
public boolean canDeletePluggableSCMMaterial(SCM scmConfig) {
return groups.canDeletePluggableSCMMaterial(scmConfig);
}
@Override
public ConfigOrigin getOrigin() {
return strategy.getOrigin();
}
@Override
public void setOrigins(ConfigOrigin origins) {
this.strategy.setOrigins(origins);
}
private static class FindPipelineGroupAdminstrator implements PipelineGroupVisitor {
private final CaseInsensitiveString username;
private final List<Role> roles;
private boolean isGroupAdmin;
public FindPipelineGroupAdminstrator(CaseInsensitiveString username, List<Role> roles) {
this.username = username;
this.roles = roles;
}
public void visit(PipelineConfigs pipelineConfigs) {
if (pipelineConfigs.getAuthorization().isUserAnAdmin(username, roles)) {
isGroupAdmin = true;
}
}
}
private static abstract class ErrorCollectingHandler implements GoConfigGraphWalker.Handler {
private final List<ConfigErrors> allErrors;
public ErrorCollectingHandler(List<ConfigErrors> allErrors) {
this.allErrors = allErrors;
}
public void handle(Validatable validatable, ValidationContext context) {
handleValidation(validatable, context);
ConfigErrors configErrors = validatable.errors();
if (!configErrors.isEmpty()) {
allErrors.add(configErrors);
}
}
public abstract void handleValidation(Validatable validatable, ValidationContext context);
}
}
| |
/*
* Copyright 2002-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.jms.connection;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import javax.jms.QueueConnection;
import javax.jms.QueueConnectionFactory;
import javax.jms.TopicConnection;
import javax.jms.TopicConnectionFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.core.NamedThreadLocal;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
/**
* An adapter for a target JMS {@link javax.jms.ConnectionFactory}, applying the
* given user credentials to every standard {@code createConnection()} call,
* that is, implicitly invoking {@code createConnection(username, password)}
* on the target. All other methods simply delegate to the corresponding methods
* of the target ConnectionFactory.
*
* <p>Can be used to proxy a target JNDI ConnectionFactory that does not have user
* credentials configured. Client code can work with the ConnectionFactory without
* passing in username and password on every {@code createConnection()} call.
*
* <p>In the following example, client code can simply transparently work
* with the preconfigured "myConnectionFactory", implicitly accessing
* "myTargetConnectionFactory" with the specified user credentials.
*
* <pre class="code">
* <bean id="myTargetConnectionFactory" class="org.springframework.jndi.JndiObjectFactoryBean">
* <property name="jndiName" value="java:comp/env/jms/mycf"/>
* </bean>
*
* <bean id="myConnectionFactory" class="org.springframework.jms.connection.UserCredentialsConnectionFactoryAdapter">
* <property name="targetConnectionFactory" ref="myTargetConnectionFactory"/>
* <property name="username" value="myusername"/>
* <property name="password" value="mypassword"/>
* </bean></pre>
*
* <p>If the "username" is empty, this proxy will simply delegate to the standard
* {@code createConnection()} method of the target ConnectionFactory.
* This can be used to keep a UserCredentialsConnectionFactoryAdapter bean
* definition just for the <i>option</i> of implicitly passing in user credentials
* if the particular target ConnectionFactory requires it.
*
* @author Juergen Hoeller
* @since 1.2
* @see #createConnection
* @see #createQueueConnection
* @see #createTopicConnection
*/
public class UserCredentialsConnectionFactoryAdapter
implements ConnectionFactory, QueueConnectionFactory, TopicConnectionFactory, InitializingBean {
private ConnectionFactory targetConnectionFactory;
private String username;
private String password;
private final ThreadLocal<JmsUserCredentials> threadBoundCredentials =
new NamedThreadLocal<JmsUserCredentials>("Current JMS user credentials");
/**
* Set the target ConnectionFactory that this ConnectionFactory should delegate to.
*/
public void setTargetConnectionFactory(ConnectionFactory targetConnectionFactory) {
Assert.notNull(targetConnectionFactory, "'targetConnectionFactory' must not be null");
this.targetConnectionFactory = targetConnectionFactory;
}
/**
* Set the username that this adapter should use for retrieving Connections.
* Default is no specific user.
*/
public void setUsername(String username) {
this.username = username;
}
/**
* Set the password that this adapter should use for retrieving Connections.
* Default is no specific password.
*/
public void setPassword(String password) {
this.password = password;
}
@Override
public void afterPropertiesSet() {
if (this.targetConnectionFactory == null) {
throw new IllegalArgumentException("Property 'targetConnectionFactory' is required");
}
}
/**
* Set user credententials for this proxy and the current thread.
* The given username and password will be applied to all subsequent
* {@code createConnection()} calls on this ConnectionFactory proxy.
* <p>This will override any statically specified user credentials,
* that is, values of the "username" and "password" bean properties.
* @param username the username to apply
* @param password the password to apply
* @see #removeCredentialsFromCurrentThread
*/
public void setCredentialsForCurrentThread(String username, String password) {
this.threadBoundCredentials.set(new JmsUserCredentials(username, password));
}
/**
* Remove any user credentials for this proxy from the current thread.
* Statically specified user credentials apply again afterwards.
* @see #setCredentialsForCurrentThread
*/
public void removeCredentialsFromCurrentThread() {
this.threadBoundCredentials.remove();
}
/**
* Determine whether there are currently thread-bound credentials,
* using them if available, falling back to the statically specified
* username and password (i.e. values of the bean properties) else.
* @see #doCreateConnection
*/
@Override
public final Connection createConnection() throws JMSException {
JmsUserCredentials threadCredentials = this.threadBoundCredentials.get();
if (threadCredentials != null) {
return doCreateConnection(threadCredentials.username, threadCredentials.password);
}
else {
return doCreateConnection(this.username, this.password);
}
}
/**
* Delegate the call straight to the target ConnectionFactory.
*/
@Override
public Connection createConnection(String username, String password) throws JMSException {
return doCreateConnection(username, password);
}
/**
* This implementation delegates to the {@code createConnection(username, password)}
* method of the target ConnectionFactory, passing in the specified user credentials.
* If the specified username is empty, it will simply delegate to the standard
* {@code createConnection()} method of the target ConnectionFactory.
* @param username the username to use
* @param password the password to use
* @return the Connection
* @see javax.jms.ConnectionFactory#createConnection(String, String)
* @see javax.jms.ConnectionFactory#createConnection()
*/
protected Connection doCreateConnection(String username, String password) throws JMSException {
Assert.state(this.targetConnectionFactory != null, "'targetConnectionFactory' is required");
if (StringUtils.hasLength(username)) {
return this.targetConnectionFactory.createConnection(username, password);
}
else {
return this.targetConnectionFactory.createConnection();
}
}
/**
* Determine whether there are currently thread-bound credentials,
* using them if available, falling back to the statically specified
* username and password (i.e. values of the bean properties) else.
* @see #doCreateQueueConnection
*/
@Override
public final QueueConnection createQueueConnection() throws JMSException {
JmsUserCredentials threadCredentials = this.threadBoundCredentials.get();
if (threadCredentials != null) {
return doCreateQueueConnection(threadCredentials.username, threadCredentials.password);
}
else {
return doCreateQueueConnection(this.username, this.password);
}
}
/**
* Delegate the call straight to the target QueueConnectionFactory.
*/
@Override
public QueueConnection createQueueConnection(String username, String password) throws JMSException {
return doCreateQueueConnection(username, password);
}
/**
* This implementation delegates to the {@code createQueueConnection(username, password)}
* method of the target QueueConnectionFactory, passing in the specified user credentials.
* If the specified username is empty, it will simply delegate to the standard
* {@code createQueueConnection()} method of the target ConnectionFactory.
* @param username the username to use
* @param password the password to use
* @return the Connection
* @see javax.jms.QueueConnectionFactory#createQueueConnection(String, String)
* @see javax.jms.QueueConnectionFactory#createQueueConnection()
*/
protected QueueConnection doCreateQueueConnection(String username, String password) throws JMSException {
Assert.state(this.targetConnectionFactory != null, "'targetConnectionFactory' is required");
if (!(this.targetConnectionFactory instanceof QueueConnectionFactory)) {
throw new javax.jms.IllegalStateException("'targetConnectionFactory' is not a QueueConnectionFactory");
}
QueueConnectionFactory queueFactory = (QueueConnectionFactory) this.targetConnectionFactory;
if (StringUtils.hasLength(username)) {
return queueFactory.createQueueConnection(username, password);
}
else {
return queueFactory.createQueueConnection();
}
}
/**
* Determine whether there are currently thread-bound credentials,
* using them if available, falling back to the statically specified
* username and password (i.e. values of the bean properties) else.
* @see #doCreateTopicConnection
*/
@Override
public final TopicConnection createTopicConnection() throws JMSException {
JmsUserCredentials threadCredentials = this.threadBoundCredentials.get();
if (threadCredentials != null) {
return doCreateTopicConnection(threadCredentials.username, threadCredentials.password);
}
else {
return doCreateTopicConnection(this.username, this.password);
}
}
/**
* Delegate the call straight to the target TopicConnectionFactory.
*/
@Override
public TopicConnection createTopicConnection(String username, String password) throws JMSException {
return doCreateTopicConnection(username, password);
}
/**
* This implementation delegates to the {@code createTopicConnection(username, password)}
* method of the target TopicConnectionFactory, passing in the specified user credentials.
* If the specified username is empty, it will simply delegate to the standard
* {@code createTopicConnection()} method of the target ConnectionFactory.
* @param username the username to use
* @param password the password to use
* @return the Connection
* @see javax.jms.TopicConnectionFactory#createTopicConnection(String, String)
* @see javax.jms.TopicConnectionFactory#createTopicConnection()
*/
protected TopicConnection doCreateTopicConnection(String username, String password) throws JMSException {
Assert.state(this.targetConnectionFactory != null, "'targetConnectionFactory' is required");
if (!(this.targetConnectionFactory instanceof TopicConnectionFactory)) {
throw new javax.jms.IllegalStateException("'targetConnectionFactory' is not a TopicConnectionFactory");
}
TopicConnectionFactory queueFactory = (TopicConnectionFactory) this.targetConnectionFactory;
if (StringUtils.hasLength(username)) {
return queueFactory.createTopicConnection(username, password);
}
else {
return queueFactory.createTopicConnection();
}
}
/**
* Inner class used as ThreadLocal value.
*/
private static class JmsUserCredentials {
public final String username;
public final String password;
private JmsUserCredentials(String username, String password) {
this.username = username;
this.password = password;
}
@Override
public String toString() {
return "JmsUserCredentials[username='" + this.username + "',password='" + this.password + "']";
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.nio.file.AccessDeniedException;
import java.util.Collection;
import java.util.List;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Queue;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RawLocalFileSystem;
import org.apache.hadoop.fs.Trash;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.ReflectionUtils;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Queues;
import com.google.common.io.BaseEncoding;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValue;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.deprecation.DeprecationUtils;
import org.apache.gobblin.util.executors.ScalingThreadPoolExecutor;
import org.apache.gobblin.writer.DataWriter;
/**
* A utility class for working with Hadoop.
*/
@Slf4j
public class HadoopUtils {
public static final String HDFS_ILLEGAL_TOKEN_REGEX = "[\\s:\\\\]";
/**
* A {@link Collection} of all known {@link FileSystem} schemes that do not support atomic renames or copies.
*
* <p>
* The following important properties are useful to remember when writing code that is compatible with S3:
* <ul>
* <li>Renames are not atomic, and require copying the entire source file to the destination file</li>
* <li>Writes to S3 using {@link FileSystem#create(Path)} will first go to the local filesystem, when the stream
* is closed the local file will be uploaded to S3</li>
* </ul>
* </p>
*/
public static final Collection<String> FS_SCHEMES_NON_ATOMIC =
ImmutableSortedSet.orderedBy(String.CASE_INSENSITIVE_ORDER).add("s3").add("s3a").add("s3n").build();
public static final String MAX_FILESYSTEM_QPS = "filesystem.throttling.max.filesystem.qps";
private static final List<String> DEPRECATED_KEYS = Lists.newArrayList("gobblin.copy.max.filesystem.qps");
private static final int MAX_RENAME_TRIES = 3;
public static Configuration newConfiguration() {
Configuration conf = new Configuration();
// Explicitly check for S3 environment variables, so that Hadoop can access s3 and s3n URLs.
// h/t https://github.com/apache/spark/blob/master/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
String awsAccessKeyId = System.getenv("AWS_ACCESS_KEY_ID");
String awsSecretAccessKey = System.getenv("AWS_SECRET_ACCESS_KEY");
if (awsAccessKeyId != null && awsSecretAccessKey != null) {
conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
conf.set("fs.s3n.awsAccessKeyId", awsAccessKeyId);
conf.set("fs.s3n.awsSecretAccessKey", awsSecretAccessKey);
}
// Add a new custom filesystem mapping
conf.set("fs.sftp.impl", "org.apache.gobblin.source.extractor.extract.sftp.SftpLightWeightFileSystem");
conf.set("fs.sftp.impl.disable.cache", "true");
return conf;
}
/**
* @deprecated Use {@link FileListUtils#listFilesRecursively(FileSystem, Path)}.
*/
@Deprecated
public static List<FileStatus> listStatusRecursive(FileSystem fileSystem, Path path) throws IOException {
List<FileStatus> results = Lists.newArrayList();
walk(results, fileSystem, path);
return results;
}
/**
* Get the path as a string without schema or authority.
*
* E.g. Converts sftp://user/data/file.txt to /user/data/file.txt
*/
public static String toUriPath(Path path) {
return path.toUri().getPath();
}
/**
* A wrapper around {@link FileSystem#delete(Path, boolean)} which throws {@link IOException} if the given
* {@link Path} exists, and {@link FileSystem#delete(Path, boolean)} returns False.
*/
public static void deletePath(FileSystem fs, Path f, boolean recursive) throws IOException {
if (fs.exists(f) && !fs.delete(f, recursive)) {
throw new IOException("Failed to delete: " + f);
}
}
/**
* Calls deletePath() on each directory in the given list of directories to delete.
* If moveToTrash is set, it will be moved to trash according to the file system trash policy.
*/
public static void deleteDirectories(FileSystem fs, List<String> directoriesToDelete, boolean recursive, boolean moveToTrash) throws IOException {
for (String directory : directoriesToDelete) {
if (moveToTrash) {
moveToTrash(fs, new Path(directory));
} else {
deletePath(fs, new Path(directory), recursive);
}
}
}
/**
* A wrapper around {@link FileSystem#delete(Path, boolean)} that only deletes a given {@link Path} if it is present
* on the given {@link FileSystem}.
*/
public static void deleteIfExists(FileSystem fs, Path path, boolean recursive) throws IOException {
if (fs.exists(path)) {
deletePath(fs, path, recursive);
}
}
public static void deletePathAndEmptyAncestors(FileSystem fs, Path f, boolean recursive) throws IOException {
deletePath(fs, f, recursive);
Path parent = f.getParent();
while (parent != null) {
if (fs.exists(parent) && fs.listStatus(parent).length == 0) {
deletePath(fs, parent, true);
parent = parent.getParent();
} else {
break;
}
}
}
/**
* Delete files according to the regular expression provided
* @param fs Filesystem object
* @param path base path
* @param regex regular expression to select files to delete
* @throws IOException
*/
public static void deletePathByRegex(FileSystem fs, final Path path, final String regex) throws IOException {
FileStatus[] statusList = fs.listStatus(path, path1 -> path1.getName().matches(regex));
for (final FileStatus oldJobFile : statusList) {
HadoopUtils.deletePath(fs, oldJobFile.getPath(), true);
}
}
/**
* Moves the object to the filesystem trash according to the file system policy.
* @param fs FileSystem object
* @param path Path to the object to be moved to trash.
* @throws IOException
*/
public static void moveToTrash(FileSystem fs, Path path) throws IOException {
Trash trash = new Trash(fs, new Configuration());
trash.moveToTrash(path);
}
/**
* Renames a src {@link Path} on fs {@link FileSystem} to a dst {@link Path}. If fs is a {@link LocalFileSystem} and
* src is a directory then {@link File#renameTo} is called directly to avoid a directory rename race condition where
* {@link org.apache.hadoop.fs.RawLocalFileSystem#rename} copies the conflicting src directory into dst resulting in
* an extra nested level, such as /root/a/b/c/e/e where e is repeated.
*
* @param fs the {@link FileSystem} where the src {@link Path} exists
* @param src the source {@link Path} which will be renamed
* @param dst the {@link Path} to rename to
* @return true if rename succeeded, false if rename failed.
* @throws IOException if rename failed for reasons other than target exists.
*/
public static boolean renamePathHandleLocalFSRace(FileSystem fs, Path src, Path dst) throws IOException {
if (DecoratorUtils.resolveUnderlyingObject(fs) instanceof LocalFileSystem && fs.isDirectory(src)) {
LocalFileSystem localFs = (LocalFileSystem) DecoratorUtils.resolveUnderlyingObject(fs);
File srcFile = localFs.pathToFile(src);
File dstFile = localFs.pathToFile(dst);
return srcFile.renameTo(dstFile);
}
else {
return fs.rename(src, dst);
}
}
/**
* A wrapper around {@link FileContext#rename(Path, Path, Options.Rename...)}.
*/
public static void renamePath(FileContext fc, Path oldName, Path newName) throws IOException {
renamePath(fc, oldName, newName, false);
}
/**
* A wrapper around {@link FileContext#rename(Path, Path, Options.Rename...)}}.
*/
public static void renamePath(FileContext fc, Path oldName, Path newName, boolean overwrite)
throws IOException {
Options.Rename renameOptions = (overwrite) ? Options.Rename.OVERWRITE : Options.Rename.NONE;
fc.rename(oldName, newName, renameOptions);
}
/**
* A wrapper around {@link FileSystem#rename(Path, Path)} which throws {@link IOException} if
* {@link FileSystem#rename(Path, Path)} returns False.
*/
public static void renamePath(FileSystem fs, Path oldName, Path newName) throws IOException {
renamePath(fs, oldName, newName, false);
}
/**
* A wrapper around {@link FileSystem#rename(Path, Path)} which throws {@link IOException} if
* {@link FileSystem#rename(Path, Path)} returns False.
*/
public static void renamePath(FileSystem fs, Path oldName, Path newName, boolean overwrite) throws IOException {
//In default implementation of rename with rewrite option in FileSystem, if the parent dir of dst does not exist, it will throw exception,
//Which will fail some of our job unintentionally. So we only call that method when fs is an instance of DistributedFileSystem to avoid inconsistency problem
if(fs instanceof DistributedFileSystem) {
Options.Rename renameOptions = (overwrite) ? Options.Rename.OVERWRITE : Options.Rename.NONE;
((DistributedFileSystem) fs).rename(oldName, newName, renameOptions);
} else {
if (!fs.exists(oldName)) {
throw new FileNotFoundException(String.format("Failed to rename %s to %s: src not found", oldName, newName));
}
if (fs.exists(newName)) {
if (overwrite) {
HadoopUtils.moveToTrash(fs, newName);
} else {
throw new FileAlreadyExistsException(String.format("Failed to rename %s to %s: dst already exists", oldName, newName));
}
}
if (!fs.rename(oldName, newName)) {
throw new IOException(String.format("Failed to rename %s to %s", oldName, newName));
}
}
}
/**
* Moves a src {@link Path} from a srcFs {@link FileSystem} to a dst {@link Path} on a dstFs {@link FileSystem}. If
* the srcFs and the dstFs have the same scheme, and neither of them or S3 schemes, then the {@link Path} is simply
* renamed. Otherwise, the data is from the src {@link Path} to the dst {@link Path}. So this method can handle copying
* data between different {@link FileSystem} implementations.
*
* @param srcFs the source {@link FileSystem} where the src {@link Path} exists
* @param src the source {@link Path} which will me moved
* @param dstFs the destination {@link FileSystem} where the dst {@link Path} should be created
* @param dst the {@link Path} to move data to
*/
public static void movePath(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, Configuration conf)
throws IOException {
movePath(srcFs, src, dstFs, dst, false, conf);
}
/**
* Moves a src {@link Path} from a srcFs {@link FileSystem} to a dst {@link Path} on a dstFs {@link FileSystem}. If
* the srcFs and the dstFs have the same scheme, and neither of them or S3 schemes, then the {@link Path} is simply
* renamed. Otherwise, the data is from the src {@link Path} to the dst {@link Path}. So this method can handle copying
* data between different {@link FileSystem} implementations.
*
* @param srcFs the source {@link FileSystem} where the src {@link Path} exists
* @param src the source {@link Path} which will me moved
* @param dstFs the destination {@link FileSystem} where the dst {@link Path} should be created
* @param dst the {@link Path} to move data to
* @param overwrite true if the destination should be overwritten; otherwise, false
*/
public static void movePath(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, boolean overwrite,
Configuration conf) throws IOException {
if (srcFs.getUri().getScheme().equals(dstFs.getUri().getScheme())
&& !FS_SCHEMES_NON_ATOMIC.contains(srcFs.getUri().getScheme())
&& !FS_SCHEMES_NON_ATOMIC.contains(dstFs.getUri().getScheme())) {
renamePath(srcFs, src, dst);
} else {
copyPath(srcFs, src, dstFs, dst, true, overwrite, conf);
}
}
/**
* Copies data from a src {@link Path} to a dst {@link Path}.
*
* <p>
* This method should be used in preference to
* {@link FileUtil#copy(FileSystem, Path, FileSystem, Path, boolean, boolean, Configuration)}, which does not handle
* clean up of incomplete files if there is an error while copying data.
* </p>
*
* <p>
* TODO this method does not handle cleaning up any local files leftover by writing to S3.
* </p>
*
* @param srcFs the source {@link FileSystem} where the src {@link Path} exists
* @param src the {@link Path} to copy from the source {@link FileSystem}
* @param dstFs the destination {@link FileSystem} where the dst {@link Path} should be created
* @param dst the {@link Path} to copy data to
*/
public static void copyPath(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, Configuration conf)
throws IOException {
copyPath(srcFs, src, dstFs, dst, false, false, conf);
}
/**
* Copies data from a src {@link Path} to a dst {@link Path}.
*
* <p>
* This method should be used in preference to
* {@link FileUtil#copy(FileSystem, Path, FileSystem, Path, boolean, boolean, Configuration)}, which does not handle
* clean up of incomplete files if there is an error while copying data.
* </p>
*
* <p>
* TODO this method does not handle cleaning up any local files leftover by writing to S3.
* </p>
*
* @param srcFs the source {@link FileSystem} where the src {@link Path} exists
* @param src the {@link Path} to copy from the source {@link FileSystem}
* @param dstFs the destination {@link FileSystem} where the dst {@link Path} should be created
* @param dst the {@link Path} to copy data to
* @param overwrite true if the destination should be overwritten; otherwise, false
*/
public static void copyPath(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, boolean overwrite,
Configuration conf) throws IOException {
copyPath(srcFs, src, dstFs, dst, false, overwrite, conf);
}
private static void copyPath(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, boolean deleteSource,
boolean overwrite, Configuration conf) throws IOException {
Preconditions.checkArgument(srcFs.exists(src),
String.format("Cannot copy from %s to %s because src does not exist", src, dst));
Preconditions.checkArgument(overwrite || !dstFs.exists(dst),
String.format("Cannot copy from %s to %s because dst exists", src, dst));
try {
boolean isSourceFileSystemLocal = srcFs instanceof LocalFileSystem || srcFs instanceof RawLocalFileSystem;
if (isSourceFileSystemLocal) {
try {
dstFs.copyFromLocalFile(deleteSource, overwrite, src, dst);
} catch (IOException e) {
throw new IOException(String.format("Failed to copy %s to %s", src, dst), e);
}
} else if (!FileUtil.copy(srcFs, src, dstFs, dst, deleteSource, overwrite, conf)) {
throw new IOException(String.format("Failed to copy %s to %s", src, dst));
}
} catch (Throwable t1) {
try {
deleteIfExists(dstFs, dst, true);
} catch (Throwable t2) {
// Do nothing
}
throw t1;
}
}
/**
* Copies a src {@link Path} from a srcFs {@link FileSystem} to a dst {@link Path} on a dstFs {@link FileSystem}. If
* either the srcFs or dstFs are S3 {@link FileSystem}s (as dictated by {@link #FS_SCHEMES_NON_ATOMIC}) then data is directly
* copied from the src to the dst. Otherwise data is first copied to a tmp {@link Path}, which is then renamed to the
* dst.
*
* @param srcFs the source {@link FileSystem} where the src {@link Path} exists
* @param src the {@link Path} to copy from the source {@link FileSystem}
* @param dstFs the destination {@link FileSystem} where the dst {@link Path} should be created
* @param dst the {@link Path} to copy data to
* @param tmp the temporary {@link Path} to use when copying data
* @param overwriteDst true if the destination and tmp path should should be overwritten, false otherwise
*/
public static void copyFile(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, Path tmp, boolean overwriteDst,
Configuration conf) throws IOException {
Preconditions.checkArgument(srcFs.isFile(src),
String.format("Cannot copy from %s to %s because src is not a file", src, dst));
if (FS_SCHEMES_NON_ATOMIC.contains(srcFs.getUri().getScheme())
|| FS_SCHEMES_NON_ATOMIC.contains(dstFs.getUri().getScheme())) {
copyFile(srcFs, src, dstFs, dst, overwriteDst, conf);
} else {
copyFile(srcFs, src, dstFs, tmp, overwriteDst, conf);
try {
boolean renamed = false;
if (overwriteDst && dstFs.exists(dst)) {
try {
deletePath(dstFs, dst, true);
} finally {
renamePath(dstFs, tmp, dst);
renamed = true;
}
}
if (!renamed) {
renamePath(dstFs, tmp, dst);
}
} finally {
deletePath(dstFs, tmp, true);
}
}
}
/**
* Copy a file from a srcFs {@link FileSystem} to a dstFs {@link FileSystem}. The src {@link Path} must be a file,
* that is {@link FileSystem#isFile(Path)} must return true for src.
*
* <p>
* If overwrite is specified to true, this method may delete the dst directory even if the copy from src to dst fails.
* </p>
*
* @param srcFs the src {@link FileSystem} to copy the file from
* @param src the src {@link Path} to copy
* @param dstFs the destination {@link FileSystem} to write to
* @param dst the destination {@link Path} to write to
* @param overwrite true if the dst {@link Path} should be overwritten, false otherwise
*/
public static void copyFile(FileSystem srcFs, Path src, FileSystem dstFs, Path dst, boolean overwrite,
Configuration conf) throws IOException {
Preconditions.checkArgument(srcFs.isFile(src),
String.format("Cannot copy from %s to %s because src is not a file", src, dst));
Preconditions.checkArgument(overwrite || !dstFs.exists(dst),
String.format("Cannot copy from %s to %s because dst exists", src, dst));
try (InputStream in = srcFs.open(src); OutputStream out = dstFs.create(dst, overwrite)) {
IOUtils.copyBytes(in, out, conf, false);
} catch (Throwable t1) {
try {
deleteIfExists(dstFs, dst, true);
} catch (Throwable t2) {
// Do nothing
}
throw t1;
}
}
private static void walk(List<FileStatus> results, FileSystem fileSystem, Path path) throws IOException {
for (FileStatus status : fileSystem.listStatus(path)) {
if (!status.isDirectory()) {
results.add(status);
} else {
walk(results, fileSystem, status.getPath());
}
}
}
/**
* This method is an additive implementation of the {@link FileSystem#rename(Path, Path)} method. It moves all the
* files/directories under 'from' path to the 'to' path without overwriting existing directories in the 'to' path.
*
* <p>
* The rename operation happens at the first non-existent sub-directory. If a directory at destination path already
* exists, it recursively tries to move sub-directories. If all the sub-directories also exist at the destination,
* a file level move is done
* </p>
*
* @param fileSystem on which the data needs to be moved
* @param from path of the data to be moved
* @param to path of the data to be moved
*/
public static void renameRecursively(FileSystem fileSystem, Path from, Path to) throws IOException {
log.info(String.format("Recursively renaming %s in %s to %s.", from, fileSystem.getUri(), to));
FileSystem throttledFS = getOptionallyThrottledFileSystem(fileSystem, 10000);
ExecutorService executorService = ScalingThreadPoolExecutor.newScalingThreadPool(1, 100, 100,
ExecutorsUtils.newThreadFactory(Optional.of(log), Optional.of("rename-thread-%d")));
Queue<Future<?>> futures = Queues.newConcurrentLinkedQueue();
try {
if (!fileSystem.exists(from)) {
throw new IOException("Trying to rename a path that does not exist! " + from);
}
futures.add(executorService
.submit(new RenameRecursively(throttledFS, fileSystem.getFileStatus(from), to, executorService, futures)));
int futuresUsed = 0;
while (!futures.isEmpty()) {
try {
futures.poll().get();
futuresUsed++;
} catch (ExecutionException | InterruptedException ee) {
throw new IOException(ee.getCause());
}
}
log.info(String.format("Recursive renaming of %s to %s. (details: used %d futures)", from, to, futuresUsed));
} finally {
ExecutorsUtils.shutdownExecutorService(executorService, Optional.of(log), 1, TimeUnit.SECONDS);
}
}
/**
* Calls {@link #getOptionallyThrottledFileSystem(FileSystem, int)} parsing the qps from the input {@link State}
* at key {@link #MAX_FILESYSTEM_QPS}.
* @throws IOException
*/
public static FileSystem getOptionallyThrottledFileSystem(FileSystem fs, State state) throws IOException {
DeprecationUtils.renameDeprecatedKeys(state, MAX_FILESYSTEM_QPS, DEPRECATED_KEYS);
if (state.contains(MAX_FILESYSTEM_QPS)) {
return getOptionallyThrottledFileSystem(fs, state.getPropAsInt(MAX_FILESYSTEM_QPS));
}
return fs;
}
/**
* Get a throttled {@link FileSystem} that limits the number of queries per second to a {@link FileSystem}. If
* the input qps is <= 0, no such throttling will be performed.
* @throws IOException
*/
public static FileSystem getOptionallyThrottledFileSystem(FileSystem fs, int qpsLimit) throws IOException {
if (fs instanceof Decorator) {
for (Object obj : DecoratorUtils.getDecoratorLineage(fs)) {
if (obj instanceof RateControlledFileSystem) {
// Already rate controlled
return fs;
}
}
}
if (qpsLimit > 0) {
try {
RateControlledFileSystem newFS = new RateControlledFileSystem(fs, qpsLimit);
newFS.startRateControl();
return newFS;
} catch (ExecutionException ee) {
throw new IOException("Could not create throttled FileSystem.", ee);
}
}
return fs;
}
@AllArgsConstructor
private static class RenameRecursively implements Runnable {
private final FileSystem fileSystem;
private final FileStatus from;
private final Path to;
private final ExecutorService executorService;
private final Queue<Future<?>> futures;
@Override
public void run() {
try {
// Attempt to move safely if directory, unsafely if file (for performance, files are much less likely to collide on target)
boolean moveSucessful;
try {
moveSucessful = this.from.isDirectory() ? safeRenameIfNotExists(this.fileSystem, this.from.getPath(), this.to) : unsafeRenameIfNotExists(this.fileSystem, this.from.getPath(), this.to);
} catch (AccessDeniedException e) {
// If an AccessDeniedException occurs for a directory then assume that it exists and continue the
// recursive renaming. If the error occurs for a file then re-raise the exception since the existence check
// is required to determine whether to copy the file.
if (this.from.isDirectory()) {
moveSucessful = false;
} else {
throw e;
}
}
if (!moveSucessful) {
if (this.from.isDirectory()) {
for (FileStatus fromFile : this.fileSystem.listStatus(this.from.getPath())) {
Path relativeFilePath = new Path(StringUtils.substringAfter(fromFile.getPath().toString(),
this.from.getPath().toString() + Path.SEPARATOR));
Path toFilePath = new Path(this.to, relativeFilePath);
this.futures.add(this.executorService.submit(
new RenameRecursively(this.fileSystem, fromFile, toFilePath, this.executorService, this.futures)));
}
} else {
log.info(String.format("File already exists %s. Will not rewrite", this.to));
}
}
} catch (IOException ioe) {
throw new RuntimeException("Failed to rename " + this.from.getPath() + " to " + this.to, ioe);
}
}
}
/**
* Renames from to to if to doesn't exist in a thread-safe way. This method is necessary because
* {@link FileSystem#rename} is inconsistent across file system implementations, e.g. in some of them rename(foo, bar)
* will create bar/foo if bar already existed, but it will only create bar if it didn't.
*
* <p>
* The thread-safety is only guaranteed among calls to this method. An external modification to the relevant
* target directory could still cause unexpected results in the renaming.
* </p>
*
* @param fs filesystem where rename will be executed.
* @param from origin {@link Path}.
* @param to target {@link Path}.
* @return true if rename succeeded, false if the target already exists.
* @throws IOException if rename failed for reasons other than target exists.
*/
public synchronized static boolean safeRenameIfNotExists(FileSystem fs, Path from, Path to) throws IOException {
return unsafeRenameIfNotExists(fs, from, to);
}
/**
* Renames from to to if to doesn't exist in a non-thread-safe way.
*
* @param fs filesystem where rename will be executed.
* @param from origin {@link Path}.
* @param to target {@link Path}.
* @return true if rename succeeded, false if the target already exists.
* @throws IOException if rename failed for reasons other than target exists.
*/
public static boolean unsafeRenameIfNotExists(FileSystem fs, Path from, Path to) throws IOException {
if (!fs.exists(to)) {
if (!fs.exists(to.getParent())) {
fs.mkdirs(to.getParent());
}
if (!renamePathHandleLocalFSRace(fs, from, to)) {
if (!fs.exists(to)) {
throw new IOException(String.format("Failed to rename %s to %s.", from, to));
}
return false;
}
return true;
}
return false;
}
/**
* A thread safe variation of {@link #renamePath(FileSystem, Path, Path)} which can be used in
* multi-threaded/multi-mapper environment. The rename operation always happens at file level hence directories are
* not overwritten under the 'to' path.
*
* <p>
* If the contents of destination 'to' path is not expected to be modified concurrently, use
* {@link #renamePath(FileSystem, Path, Path)} which is faster and more optimized
* </p>
*
* <b>NOTE: This does not seem to be working for all {@link FileSystem} implementations. Use
* {@link #renameRecursively(FileSystem, Path, Path)}</b>
*
* @param fileSystem on which the data needs to be moved
* @param from path of the data to be moved
* @param to path of the data to be moved
*
*/
public static void safeRenameRecursively(FileSystem fileSystem, Path from, Path to) throws IOException {
for (FileStatus fromFile : FileListUtils.listFilesRecursively(fileSystem, from)) {
Path relativeFilePath =
new Path(StringUtils.substringAfter(fromFile.getPath().toString(), from.toString() + Path.SEPARATOR));
Path toFilePath = new Path(to, relativeFilePath);
if (!fileSystem.exists(toFilePath)) {
boolean renamed = false;
// underlying file open can fail with file not found error due to some race condition
// when the parent directory is created in another thread, so retry a few times
for (int i = 0; !renamed && i < MAX_RENAME_TRIES; i++) {
try {
renamed = fileSystem.rename(fromFile.getPath(), toFilePath);
break;
} catch (FileNotFoundException e) {
if (i + 1 >= MAX_RENAME_TRIES) {
throw e;
}
}
}
if (!renamed) {
throw new IOException(String.format("Failed to rename %s to %s.", fromFile.getPath(), toFilePath));
}
log.info(String.format("Renamed %s to %s", fromFile.getPath(), toFilePath));
} else {
log.info(String.format("File already exists %s. Will not rewrite", toFilePath));
}
}
}
public static Configuration getConfFromState(State state) {
return getConfFromState(state, Optional.<String> absent());
}
/**
* Provides Hadoop configuration given state.
* It also supports decrypting values on "encryptedPath".
* Note that this encryptedPath path will be removed from full path of each config key and leaving only child path on the key(s).
* If there's same config path as child path, the one stripped will have higher priority.
*
* e.g:
* - encryptedPath: writer.fs.encrypted
* before: writer.fs.encrypted.secret
* after: secret
*
* Common use case for these encryptedPath:
* When there's have encrypted credential in job property but you'd like Filesystem to get decrypted value.
*
* @param srcConfig source config.
* @param encryptedPath Optional. If provided, config that is on this path will be decrypted. @see ConfigUtils.resolveEncrypted
* Note that config on encryptedPath will be included in the end result even it's not part of includeOnlyPath
* @return Hadoop Configuration.
*/
public static Configuration getConfFromState(State state, Optional<String> encryptedPath) {
Config config = ConfigFactory.parseProperties(state.getProperties());
if (encryptedPath.isPresent()) {
config = ConfigUtils.resolveEncrypted(config, encryptedPath);
}
Configuration conf = newConfiguration();
for (Entry<String, ConfigValue> entry : config.entrySet()) {
conf.set(entry.getKey(), entry.getValue().unwrapped().toString());
}
return conf;
}
public static Configuration getConfFromProperties(Properties properties) {
Configuration conf = newConfiguration();
for (String propName : properties.stringPropertyNames()) {
conf.set(propName, properties.getProperty(propName));
}
return conf;
}
public static State getStateFromConf(Configuration conf) {
State state = new State();
for (Entry<String, String> entry : conf) {
state.setProp(entry.getKey(), entry.getValue());
}
return state;
}
/**
* Set the group associated with a given path.
*
* @param fs the {@link FileSystem} instance used to perform the file operation
* @param path the given path
* @param group the group associated with the path
* @throws IOException
*/
public static void setGroup(FileSystem fs, Path path, String group) throws IOException {
fs.setOwner(path, fs.getFileStatus(path).getOwner(), group);
}
/**
* Serialize a {@link Writable} object into a string.
*
* @param writable the {@link Writable} object to be serialized
* @return a string serialized from the {@link Writable} object
* @throws IOException if there's something wrong with the serialization
*/
public static String serializeToString(Writable writable) throws IOException {
try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
DataOutputStream dataOutputStream = new DataOutputStream(byteArrayOutputStream)) {
writable.write(dataOutputStream);
return BaseEncoding.base64().encode(byteArrayOutputStream.toByteArray());
}
}
/**
* Deserialize a {@link Writable} object from a string.
*
* @param writableClass the {@link Writable} implementation class
* @param serializedWritableStr the string containing a serialized {@link Writable} object
* @return a {@link Writable} deserialized from the string
* @throws IOException if there's something wrong with the deserialization
*/
public static Writable deserializeFromString(Class<? extends Writable> writableClass, String serializedWritableStr)
throws IOException {
return deserializeFromString(writableClass, serializedWritableStr, new Configuration());
}
/**
* Deserialize a {@link Writable} object from a string.
*
* @param writableClass the {@link Writable} implementation class
* @param serializedWritableStr the string containing a serialized {@link Writable} object
* @param configuration a {@link Configuration} object containing Hadoop configuration properties
* @return a {@link Writable} deserialized from the string
* @throws IOException if there's something wrong with the deserialization
*/
public static Writable deserializeFromString(Class<? extends Writable> writableClass, String serializedWritableStr,
Configuration configuration) throws IOException {
byte[] writableBytes = BaseEncoding.base64().decode(serializedWritableStr);
try (ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(writableBytes);
DataInputStream dataInputStream = new DataInputStream(byteArrayInputStream)) {
Writable writable = ReflectionUtils.newInstance(writableClass, configuration);
writable.readFields(dataInputStream);
return writable;
}
}
/**
* Given a {@link FsPermission} objects, set a key, value pair in the given {@link State} for the writer to
* use when creating files. This method should be used in conjunction with {@link #deserializeWriterFilePermissions(State, int, int)}.
*/
public static void serializeWriterFilePermissions(State state, int numBranches, int branchId,
FsPermission fsPermissions) {
serializeFsPermissions(state,
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_PERMISSIONS, numBranches, branchId),
fsPermissions);
}
/**
* Given a {@link FsPermission} objects, set a key, value pair in the given {@link State} for the writer to
* use when creating files. This method should be used in conjunction with {@link #deserializeWriterDirPermissions(State, int, int)}.
*/
public static void serializeWriterDirPermissions(State state, int numBranches, int branchId,
FsPermission fsPermissions) {
serializeFsPermissions(state,
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_DIR_PERMISSIONS, numBranches, branchId),
fsPermissions);
}
/**
* Helper method that serializes a {@link FsPermission} object.
*/
private static void serializeFsPermissions(State state, String key, FsPermission fsPermissions) {
state.setProp(key, String.format("%04o", fsPermissions.toShort()));
}
/**
* Given a {@link String} in octal notation, set a key, value pair in the given {@link State} for the writer to
* use when creating files. This method should be used in conjunction with {@link #deserializeWriterFilePermissions(State, int, int)}.
*/
public static void setWriterFileOctalPermissions(State state, int numBranches, int branchId,
String octalPermissions) {
state.setProp(
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_PERMISSIONS, numBranches, branchId),
octalPermissions);
}
/**
* Given a {@link String} in octal notation, set a key, value pair in the given {@link State} for the writer to
* use when creating directories. This method should be used in conjunction with {@link #deserializeWriterDirPermissions(State, int, int)}.
*/
public static void setWriterDirOctalPermissions(State state, int numBranches, int branchId, String octalPermissions) {
state.setProp(
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_DIR_PERMISSIONS, numBranches, branchId),
octalPermissions);
}
/**
* Deserializes a {@link FsPermission}s object that should be used when a {@link DataWriter} is writing a file.
*/
public static FsPermission deserializeWriterFilePermissions(State state, int numBranches, int branchId) {
return new FsPermission(state.getPropAsShortWithRadix(
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_PERMISSIONS, numBranches, branchId),
FsPermission.getDefault().toShort(), ConfigurationKeys.PERMISSION_PARSING_RADIX));
}
/**
* Deserializes a {@link FsPermission}s object that should be used when a {@link DataWriter} is creating directories.
*/
public static FsPermission deserializeWriterDirPermissions(State state, int numBranches, int branchId) {
return new FsPermission(state.getPropAsShortWithRadix(
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_DIR_PERMISSIONS, numBranches, branchId),
FsPermission.getDefault().toShort(), ConfigurationKeys.PERMISSION_PARSING_RADIX));
}
/**
* Get {@link FsPermission} from a {@link State} object.
*
* @param props A {@link State} containing properties.
* @param propName The property name for the permission. If not contained in the given state,
* defaultPermission will be used.
* @param defaultPermission default permission if propName is not contained in props.
* @return An {@link FsPermission} object.
*/
public static FsPermission deserializeFsPermission(State props, String propName, FsPermission defaultPermission) {
short mode = props.getPropAsShortWithRadix(propName, defaultPermission.toShort(),
ConfigurationKeys.PERMISSION_PARSING_RADIX);
return new FsPermission(mode);
}
/**
* Remove illegal HDFS path characters from the given path. Illegal characters will be replaced
* with the given substitute.
*/
public static String sanitizePath(String path, String substitute) {
Preconditions.checkArgument(substitute.replaceAll(HDFS_ILLEGAL_TOKEN_REGEX, "").equals(substitute),
"substitute contains illegal characters: " + substitute);
return path.replaceAll(HDFS_ILLEGAL_TOKEN_REGEX, substitute);
}
/**
* Remove illegal HDFS path characters from the given path. Illegal characters will be replaced
* with the given substitute.
*/
public static Path sanitizePath(Path path, String substitute) {
return new Path(sanitizePath(path.toString(), substitute));
}
/**
* Try to set owner and permissions for the path. Will not throw exception.
*/
public static void setPermissions(Path location, Optional<String> owner, Optional<String> group, FileSystem fs,
FsPermission permission) {
try {
if (!owner.isPresent()) {
return;
}
if (!group.isPresent()) {
return;
}
fs.setOwner(location, owner.get(), group.get());
fs.setPermission(location, permission);
if (!fs.isDirectory(location)) {
return;
}
for (FileStatus fileStatus : fs.listStatus(location)) {
setPermissions(fileStatus.getPath(), owner, group, fs, permission);
}
} catch (IOException e) {
log.warn("Exception occurred while trying to change permissions : " + e.getMessage());
}
}
public static boolean hasContent(FileSystem fs, Path path)
throws IOException {
if (!fs.isDirectory(path)) {
return true;
}
boolean content = false;
for (FileStatus fileStatus : fs.listStatus(path)) {
content = content || hasContent(fs, fileStatus.getPath());
if (content) {
break;
}
}
return content;
}
/**
* Add "gobblin-site.xml" as a {@link Configuration} resource.
*/
public static void addGobblinSite() {
Configuration.addDefaultResource("gobblin-site.xml");
}
/**
* Get a {@link FileSystem} object for the uri specified at {@link ConfigurationKeys#SOURCE_FILEBASED_FS_URI}.
* @throws IOException
*/
public static FileSystem getSourceFileSystem(State state) throws IOException {
Configuration conf = HadoopUtils.getConfFromState(state, Optional.of(ConfigurationKeys.SOURCE_FILEBASED_ENCRYPTED_CONFIG_PATH));
String uri = state.getProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, ConfigurationKeys.LOCAL_FS_URI);
return HadoopUtils.getOptionallyThrottledFileSystem(FileSystem.get(URI.create(uri), conf), state);
}
/**
* Get a {@link FileSystem} object for the uri specified at {@link ConfigurationKeys#WRITER_FILE_SYSTEM_URI}.
* @throws IOException
*/
public static FileSystem getWriterFileSystem(State state, int numBranches, int branchId)
throws IOException {
return HadoopUtils.getOptionallyThrottledFileSystem(WriterUtils.getWriterFS(state, numBranches, branchId), state);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support.replication;
import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.UnavailableShardsException;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.ActiveShardCount;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.action.support.replication.ReplicationOperation.ReplicaResponse;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ESAllocationTestCase;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.AllocationId;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardClosedException;
import org.elasticsearch.index.shard.ReplicationGroup;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardNotFoundException;
import org.elasticsearch.indices.IndexClosedException;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.cluster.ClusterStateChanges;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.CapturingTransport;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.MockTcpTransport;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequest;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportResponseOptions;
import org.elasticsearch.transport.TransportService;
import org.hamcrest.Matcher;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state;
import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.stateWithActivePrimary;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_WAIT_FOR_ACTIVE_SHARDS;
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
import static org.elasticsearch.test.ClusterServiceUtils.setState;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class TransportReplicationActionTests extends ESTestCase {
/**
* takes a request that was sent by a {@link TransportReplicationAction} and captured
* and returns the underlying request if it's wrapped or the original (cast to the expected type).
*
* This will throw a {@link ClassCastException} if the request is of the wrong type.
*/
public static <R extends ReplicationRequest> R resolveRequest(TransportRequest requestOrWrappedRequest) {
if (requestOrWrappedRequest instanceof TransportReplicationAction.ConcreteShardRequest) {
requestOrWrappedRequest = ((TransportReplicationAction.ConcreteShardRequest<?>)requestOrWrappedRequest).getRequest();
}
return (R) requestOrWrappedRequest;
}
private static ThreadPool threadPool;
private ClusterService clusterService;
private TransportService transportService;
private CapturingTransport transport;
private TestAction action;
private ShardStateAction shardStateAction;
/* *
* TransportReplicationAction needs an instance of IndexShard to count operations.
* indexShards is reset to null before each test and will be initialized upon request in the tests.
*/
@BeforeClass
public static void beforeClass() {
threadPool = new TestThreadPool("ShardReplicationTests");
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
transport = new CapturingTransport();
clusterService = createClusterService(threadPool);
transportService = transport.createCapturingTransportService(clusterService.getSettings(), threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet());
transportService.start();
transportService.acceptIncomingRequests();
shardStateAction = new ShardStateAction(Settings.EMPTY, clusterService, transportService, null, null, threadPool);
action = new TestAction(Settings.EMPTY, "internal:testAction", transportService, clusterService, shardStateAction, threadPool);
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
clusterService.close();
}
@AfterClass
public static void afterClass() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
threadPool = null;
}
<T> void assertListenerThrows(String msg, PlainActionFuture<T> listener, Class<?> klass) throws InterruptedException {
try {
listener.get();
fail(msg);
} catch (ExecutionException ex) {
assertThat(ex.getCause(), instanceOf(klass));
}
}
public void testBlocks() throws ExecutionException, InterruptedException {
Request request = new Request();
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
TestAction action = new TestAction(Settings.EMPTY, "internal:testActionWithBlocks",
transportService, clusterService, shardStateAction, threadPool) {
@Override
protected ClusterBlockLevel globalBlockLevel() {
return ClusterBlockLevel.WRITE;
}
};
ClusterBlocks.Builder block = ClusterBlocks.builder().addGlobalBlock(new ClusterBlock(1, "non retryable", false, true,
false, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block));
TestAction.ReroutePhase reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
assertListenerThrows("primary phase should fail operation", listener, ClusterBlockException.class);
assertPhase(task, "failed");
block = ClusterBlocks.builder()
.addGlobalBlock(new ClusterBlock(1, "retryable", true, true, false, RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block));
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(task, new Request().timeout("5ms"), listener);
reroutePhase.run();
assertListenerThrows("failed to timeout on retryable block", listener, ClusterBlockException.class);
assertPhase(task, "failed");
assertFalse(request.isRetrySet.get());
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(task, request = new Request(), listener);
reroutePhase.run();
assertFalse("primary phase should wait on retryable block", listener.isDone());
assertPhase(task, "waiting_for_retry");
assertTrue(request.isRetrySet.get());
block = ClusterBlocks.builder().addGlobalBlock(new ClusterBlock(1, "non retryable", false, true, false,
RestStatus.SERVICE_UNAVAILABLE, ClusterBlockLevel.ALL));
setState(clusterService, ClusterState.builder(clusterService.state()).blocks(block));
assertListenerThrows("primary phase should fail operation when moving from a retryable block to a non-retryable one", listener,
ClusterBlockException.class);
assertIndexShardUninitialized();
action = new TestAction(Settings.EMPTY, "internal:testActionWithNoBlocks", transportService, clusterService, shardStateAction,
threadPool) {
@Override
protected ClusterBlockLevel globalBlockLevel() {
return null;
}
};
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(task, new Request().timeout("5ms"), listener);
reroutePhase.run();
assertListenerThrows("should fail with an IndexNotFoundException when no blocks checked", listener, IndexNotFoundException.class);
}
public void assertIndexShardUninitialized() {
assertEquals(0, count.get());
}
public void testNotStartedPrimary() throws InterruptedException, ExecutionException {
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
// no replicas in oder to skip the replication part
setState(clusterService, state(index, true,
randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED));
ReplicationTask task = maybeTask();
logger.debug("--> using initial state:\n{}", clusterService.state());
Request request = new Request(shardId).timeout("1ms");
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
TestAction.ReroutePhase reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
assertListenerThrows("unassigned primary didn't cause a timeout", listener, UnavailableShardsException.class);
assertPhase(task, "failed");
assertTrue(request.isRetrySet.get());
request = new Request(shardId);
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
assertFalse("unassigned primary didn't cause a retry", listener.isDone());
assertPhase(task, "waiting_for_retry");
assertTrue(request.isRetrySet.get());
setState(clusterService, state(index, true, ShardRoutingState.STARTED));
logger.debug("--> primary assigned state:\n{}", clusterService.state());
final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id());
final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId();
final List<CapturingTransport.CapturedRequest> capturedRequests =
transport.getCapturedRequestsByTargetNodeAndClear().get(primaryNodeId);
assertThat(capturedRequests, notNullValue());
assertThat(capturedRequests.size(), equalTo(1));
assertThat(capturedRequests.get(0).action, equalTo("internal:testAction[p]"));
assertIndexShardCounter(0);
}
/**
* When relocating a primary shard, there is a cluster state update at the end of relocation where the active primary is switched from
* the relocation source to the relocation target. If relocation source receives and processes this cluster state
* before the relocation target, there is a time span where relocation source believes active primary to be on
* relocation target and relocation target believes active primary to be on relocation source. This results in replication
* requests being sent back and forth.
* <p>
* This test checks that replication request is not routed back from relocation target to relocation source in case of
* stale index routing table on relocation target.
*/
public void testNoRerouteOnStaleClusterState() throws InterruptedException, ExecutionException {
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
ClusterState state = state(index, true, ShardRoutingState.RELOCATING);
String relocationTargetNode = state.getRoutingTable().shardRoutingTable(shardId).primaryShard().relocatingNodeId();
state = ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()).localNodeId(relocationTargetNode)).build();
setState(clusterService, state);
logger.debug("--> relocation ongoing state:\n{}", clusterService.state());
Request request = new Request(shardId).timeout("1ms").routedBasedOnClusterVersion(clusterService.state().version() + 1);
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
TestAction.ReroutePhase reroutePhase = action.new ReroutePhase(null, request, listener);
reroutePhase.run();
assertListenerThrows("cluster state too old didn't cause a timeout", listener, UnavailableShardsException.class);
assertTrue(request.isRetrySet.compareAndSet(true, false));
request = new Request(shardId).routedBasedOnClusterVersion(clusterService.state().version() + 1);
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(null, request, listener);
reroutePhase.run();
assertFalse("cluster state too old didn't cause a retry", listener.isDone());
assertTrue(request.isRetrySet.get());
// finish relocation
ShardRouting relocationTarget = clusterService.state().getRoutingTable().shardRoutingTable(shardId)
.shardsWithState(ShardRoutingState.INITIALIZING).get(0);
AllocationService allocationService = ESAllocationTestCase.createAllocationService();
ClusterState updatedState = allocationService.applyStartedShards(state, Collections.singletonList(relocationTarget));
setState(clusterService, updatedState);
logger.debug("--> relocation complete state:\n{}", clusterService.state());
IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id());
final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId();
final List<CapturingTransport.CapturedRequest> capturedRequests =
transport.getCapturedRequestsByTargetNodeAndClear().get(primaryNodeId);
assertThat(capturedRequests, notNullValue());
assertThat(capturedRequests.size(), equalTo(1));
assertThat(capturedRequests.get(0).action, equalTo("internal:testAction[p]"));
assertIndexShardCounter(0);
}
public void testUnknownIndexOrShardOnReroute() throws InterruptedException {
final String index = "test";
// no replicas in oder to skip the replication part
setState(clusterService, state(index, true,
randomBoolean() ? ShardRoutingState.INITIALIZING : ShardRoutingState.UNASSIGNED));
logger.debug("--> using initial state:\n{}", clusterService.state());
Request request = new Request(new ShardId("unknown_index", "_na_", 0)).timeout("1ms");
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
TestAction.ReroutePhase reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
assertListenerThrows("must throw index not found exception", listener, IndexNotFoundException.class);
assertPhase(task, "failed");
assertTrue(request.isRetrySet.get());
request = new Request(new ShardId(index, "_na_", 10)).timeout("1ms");
listener = new PlainActionFuture<>();
reroutePhase = action.new ReroutePhase(null, request, listener);
reroutePhase.run();
assertListenerThrows("must throw shard not found exception", listener, ShardNotFoundException.class);
assertFalse(request.isRetrySet.get()); //TODO I'd have expected this to be true but we fail too early?
}
public void testClosedIndexOnReroute() throws InterruptedException {
final String index = "test";
// no replicas in oder to skip the replication part
setState(clusterService, new ClusterStateChanges(xContentRegistry(), threadPool).closeIndices(state(index, true,
ShardRoutingState.UNASSIGNED), new CloseIndexRequest(index)));
logger.debug("--> using initial state:\n{}", clusterService.state());
Request request = new Request(new ShardId("test", "_na_", 0)).timeout("1ms");
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
ClusterBlockLevel indexBlockLevel = randomBoolean() ? ClusterBlockLevel.WRITE : null;
TestAction action = new TestAction(Settings.EMPTY, "internal:testActionWithBlocks", transportService,
clusterService, shardStateAction, threadPool) {
@Override
protected ClusterBlockLevel indexBlockLevel() {
return indexBlockLevel;
}
};
TestAction.ReroutePhase reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
if (indexBlockLevel == ClusterBlockLevel.WRITE) {
assertListenerThrows("must throw block exception", listener, ClusterBlockException.class);
} else {
assertListenerThrows("must throw index closed exception", listener, IndexClosedException.class);
}
assertPhase(task, "failed");
assertFalse(request.isRetrySet.get());
}
public void testStalePrimaryShardOnReroute() throws InterruptedException {
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
// no replicas in order to skip the replication part
setState(clusterService, stateWithActivePrimary(index, true, randomInt(3)));
logger.debug("--> using initial state:\n{}", clusterService.state());
Request request = new Request(shardId);
boolean timeout = randomBoolean();
if (timeout) {
request.timeout("0s");
} else {
request.timeout("1h");
}
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
TestAction.ReroutePhase reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear();
assertThat(capturedRequests, arrayWithSize(1));
assertThat(capturedRequests[0].action, equalTo("internal:testAction[p]"));
assertPhase(task, "waiting_on_primary");
assertFalse(request.isRetrySet.get());
transport.handleRemoteError(capturedRequests[0].requestId, randomRetryPrimaryException(shardId));
if (timeout) {
// we always try at least one more time on timeout
assertThat(listener.isDone(), equalTo(false));
capturedRequests = transport.getCapturedRequestsAndClear();
assertThat(capturedRequests, arrayWithSize(1));
assertThat(capturedRequests[0].action, equalTo("internal:testAction[p]"));
assertPhase(task, "waiting_on_primary");
transport.handleRemoteError(capturedRequests[0].requestId, randomRetryPrimaryException(shardId));
assertListenerThrows("must throw index not found exception", listener, ElasticsearchException.class);
assertPhase(task, "failed");
} else {
assertThat(listener.isDone(), equalTo(false));
// generate a CS change
setState(clusterService, clusterService.state());
capturedRequests = transport.getCapturedRequestsAndClear();
assertThat(capturedRequests, arrayWithSize(1));
assertThat(capturedRequests[0].action, equalTo("internal:testAction[p]"));
}
}
private Exception randomRetryPrimaryException(ShardId shardId) {
return randomFrom(
new ShardNotFoundException(shardId),
new IndexNotFoundException(shardId.getIndex()),
new IndexShardClosedException(shardId),
new AlreadyClosedException(shardId + " primary is closed"),
new ReplicationOperation.RetryOnPrimaryException(shardId, "hello")
);
}
public void testRoutePhaseExecutesRequest() {
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
ReplicationTask task = maybeTask();
setState(clusterService, stateWithActivePrimary(index, randomBoolean(), 3));
logger.debug("using state: \n{}", clusterService.state());
final IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().index(index).shard(shardId.id());
final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId();
Request request = new Request(shardId);
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
TestAction.ReroutePhase reroutePhase = action.new ReroutePhase(task, request, listener);
reroutePhase.run();
assertThat(request.shardId(), equalTo(shardId));
logger.info("--> primary is assigned to [{}], checking request forwarded", primaryNodeId);
final List<CapturingTransport.CapturedRequest> capturedRequests =
transport.getCapturedRequestsByTargetNodeAndClear().get(primaryNodeId);
assertThat(capturedRequests, notNullValue());
assertThat(capturedRequests.size(), equalTo(1));
if (clusterService.state().nodes().getLocalNodeId().equals(primaryNodeId)) {
assertThat(capturedRequests.get(0).action, equalTo("internal:testAction[p]"));
assertPhase(task, "waiting_on_primary");
} else {
assertThat(capturedRequests.get(0).action, equalTo("internal:testAction"));
assertPhase(task, "rerouted");
}
assertFalse(request.isRetrySet.get());
assertIndexShardUninitialized();
}
public void testPrimaryPhaseExecutesOrDelegatesRequestToRelocationTarget() throws Exception {
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
ClusterState state = stateWithActivePrimary(index, true, randomInt(5));
setState(clusterService, state);
Request request = new Request(shardId).timeout("1ms");
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
AtomicBoolean executed = new AtomicBoolean();
ShardRouting primaryShard = state.getRoutingTable().shardRoutingTable(shardId).primaryShard();
final long primaryTerm = state.metaData().index(index).primaryTerm(shardId.id());
boolean executeOnPrimary = true;
// whether shard has been marked as relocated already (i.e. relocation completed)
if (primaryShard.relocating() && randomBoolean()) {
isRelocated.set(true);
executeOnPrimary = false;
}
action.new AsyncPrimaryAction(request, primaryShard.allocationId().getId(), primaryTerm, createTransportChannel(listener), task) {
@Override
protected ReplicationOperation<Request, Request, TransportReplicationAction.PrimaryResult<Request, TestResponse>>
createReplicatedOperation(
Request request,
ActionListener<TransportReplicationAction.PrimaryResult<Request, TestResponse>> actionListener,
TransportReplicationAction<Request, Request, TestResponse>.PrimaryShardReference primaryShardReference) {
return new NoopReplicationOperation(request, actionListener) {
@Override
public void execute() throws Exception {
assertPhase(task, "primary");
assertFalse(executed.getAndSet(true));
super.execute();
}
};
}
}.run();
if (executeOnPrimary) {
assertTrue(executed.get());
assertTrue(listener.isDone());
listener.get();
assertPhase(task, "finished");
assertFalse(request.isRetrySet.get());
} else {
assertFalse(executed.get());
assertIndexShardCounter(0); // it should have been freed.
final List<CapturingTransport.CapturedRequest> requests =
transport.capturedRequestsByTargetNode().get(primaryShard.relocatingNodeId());
assertThat(requests, notNullValue());
assertThat(requests.size(), equalTo(1));
assertThat("primary request was not delegated to relocation target", requests.get(0).action, equalTo("internal:testAction[p]"));
assertThat("primary term not properly set on primary delegation",
((TransportReplicationAction.ConcreteShardRequest<Request>)requests.get(0).request).getPrimaryTerm(), equalTo(primaryTerm));
assertPhase(task, "primary_delegation");
transport.handleResponse(requests.get(0).requestId, new TestResponse());
assertTrue(listener.isDone());
listener.get();
assertPhase(task, "finished");
assertFalse(request.isRetrySet.get());
}
}
public void testPrimaryPhaseExecutesDelegatedRequestOnRelocationTarget() throws Exception {
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
ClusterState state = state(index, true, ShardRoutingState.RELOCATING);
final ShardRouting primaryShard = state.getRoutingTable().shardRoutingTable(shardId).primaryShard();
final long primaryTerm = state.metaData().index(index).primaryTerm(shardId.id());
String primaryTargetNodeId = primaryShard.relocatingNodeId();
// simulate execution of the primary phase on the relocation target node
state = ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()).localNodeId(primaryTargetNodeId)).build();
setState(clusterService, state);
Request request = new Request(shardId).timeout("1ms");
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
AtomicBoolean executed = new AtomicBoolean();
action.new AsyncPrimaryAction(request, primaryShard.allocationId().getRelocationId(), primaryTerm,
createTransportChannel(listener), task) {
@Override
protected ReplicationOperation<Request, Request, TransportReplicationAction.PrimaryResult<Request, TestResponse>>
createReplicatedOperation(
Request request,
ActionListener<TransportReplicationAction.PrimaryResult<Request, TestResponse>> actionListener,
TransportReplicationAction<Request, Request, TestResponse>.PrimaryShardReference primaryShardReference) {
return new NoopReplicationOperation(request, actionListener) {
@Override
public void execute() throws Exception {
assertPhase(task, "primary");
assertFalse(executed.getAndSet(true));
super.execute();
}
};
}
@Override
public void onFailure(Exception e) {
throw new RuntimeException(e);
}
}.run();
assertThat(executed.get(), equalTo(true));
assertPhase(task, "finished");
assertFalse(request.isRetrySet.get());
}
public void testPrimaryReference() throws Exception {
final IndexShard shard = mock(IndexShard.class);
AtomicBoolean closed = new AtomicBoolean();
Releasable releasable = () -> {
if (closed.compareAndSet(false, true) == false) {
fail("releasable is closed twice");
}
};
TestAction.PrimaryShardReference primary = action.new PrimaryShardReference(shard, releasable);
final Request request = new Request();
Request replicaRequest = (Request) primary.perform(request).replicaRequest;
final ElasticsearchException exception = new ElasticsearchException("testing");
primary.failShard("test", exception);
verify(shard).failShard("test", exception);
primary.close();
assertTrue(closed.get());
}
public void testReplicaProxy() throws InterruptedException, ExecutionException {
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
ClusterState state = stateWithActivePrimary(index, true, 1 + randomInt(3), randomInt(2));
logger.info("using state: {}", state);
setState(clusterService, state);
ReplicationOperation.Replicas proxy = action.newReplicasProxy(state.metaData().index(index).primaryTerm(0));
// check that at unknown node fails
PlainActionFuture<ReplicaResponse> listener = new PlainActionFuture<>();
ShardRoutingState routingState = randomFrom(ShardRoutingState.INITIALIZING, ShardRoutingState.STARTED,
ShardRoutingState.RELOCATING);
proxy.performOn(
TestShardRouting.newShardRouting(shardId, "NOT THERE",
routingState == ShardRoutingState.RELOCATING ? state.nodes().iterator().next().getId() : null, false, routingState),
new Request(),
randomNonNegativeLong(),
randomNonNegativeLong(),
listener);
assertTrue(listener.isDone());
assertListenerThrows("non existent node should throw a NoNodeAvailableException", listener, NoNodeAvailableException.class);
final IndexShardRoutingTable shardRoutings = state.routingTable().shardRoutingTable(shardId);
final ShardRouting replica = randomFrom(shardRoutings.replicaShards().stream()
.filter(ShardRouting::assignedToNode).collect(Collectors.toList()));
listener = new PlainActionFuture<>();
proxy.performOn(replica, new Request(), randomNonNegativeLong(), randomNonNegativeLong(), listener);
assertFalse(listener.isDone());
CapturingTransport.CapturedRequest[] captures = transport.getCapturedRequestsAndClear();
assertThat(captures, arrayWithSize(1));
if (randomBoolean()) {
final TransportReplicationAction.ReplicaResponse response =
new TransportReplicationAction.ReplicaResponse(randomLong(), randomLong());
transport.handleResponse(captures[0].requestId, response);
assertTrue(listener.isDone());
assertThat(listener.get(), equalTo(response));
} else if (randomBoolean()) {
transport.handleRemoteError(captures[0].requestId, new ElasticsearchException("simulated"));
assertTrue(listener.isDone());
assertListenerThrows("listener should reflect remote error", listener, ElasticsearchException.class);
} else {
transport.handleError(captures[0].requestId, new TransportException("simulated"));
assertTrue(listener.isDone());
assertListenerThrows("listener should reflect remote error", listener, TransportException.class);
}
AtomicReference<Object> failure = new AtomicReference<>();
AtomicReference<Object> ignoredFailure = new AtomicReference<>();
AtomicBoolean success = new AtomicBoolean();
proxy.failShardIfNeeded(replica, "test", new ElasticsearchException("simulated"),
() -> success.set(true), failure::set, ignoredFailure::set
);
CapturingTransport.CapturedRequest[] shardFailedRequests = transport.getCapturedRequestsAndClear();
// A replication action doesn't not fail the request
assertEquals(0, shardFailedRequests.length);
}
public void testSeqNoIsSetOnPrimary() throws Exception {
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
// we use one replica to check the primary term was set on the operation and sent to the replica
setState(clusterService,
state(index, true, ShardRoutingState.STARTED, randomFrom(ShardRoutingState.INITIALIZING, ShardRoutingState.STARTED)));
logger.debug("--> using initial state:\n{}", clusterService.state());
final ShardRouting routingEntry = clusterService.state().getRoutingTable().index("test").shard(0).primaryShard();
final long primaryTerm = clusterService.state().metaData().index(index).primaryTerm(shardId.id());
Request request = new Request(shardId);
TransportReplicationAction.ConcreteShardRequest<Request> concreteShardRequest =
new TransportReplicationAction.ConcreteShardRequest<>(request, routingEntry.allocationId().getId(), primaryTerm);
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
final IndexShard shard = mock(IndexShard.class);
when(shard.getPendingPrimaryTerm()).thenReturn(primaryTerm);
when(shard.routingEntry()).thenReturn(routingEntry);
when(shard.isRelocatedPrimary()).thenReturn(false);
IndexShardRoutingTable shardRoutingTable = clusterService.state().routingTable().shardRoutingTable(shardId);
Set<String> inSyncIds = randomBoolean() ? Collections.singleton(routingEntry.allocationId().getId()) :
clusterService.state().metaData().index(index).inSyncAllocationIds(0);
when(shard.getReplicationGroup()).thenReturn(
new ReplicationGroup(shardRoutingTable,
inSyncIds,
shardRoutingTable.getAllAllocationIds()));
doAnswer(invocation -> {
((ActionListener<Releasable>)invocation.getArguments()[0]).onResponse(() -> {});
return null;
}).when(shard).acquirePrimaryOperationPermit(any(), anyString(), anyObject());
TestAction action =
new TestAction(Settings.EMPTY, "internal:testSeqNoIsSetOnPrimary", transportService, clusterService, shardStateAction,
threadPool) {
@Override
protected IndexShard getIndexShard(ShardId shardId) {
return shard;
}
};
TransportReplicationAction<Request, Request, TestResponse>.PrimaryOperationTransportHandler primaryPhase =
action.new PrimaryOperationTransportHandler();
primaryPhase.messageReceived(concreteShardRequest, createTransportChannel(listener), null);
CapturingTransport.CapturedRequest[] requestsToReplicas = transport.capturedRequests();
assertThat(requestsToReplicas, arrayWithSize(1));
assertThat(((TransportReplicationAction.ConcreteShardRequest<Request>) requestsToReplicas[0].request).getPrimaryTerm(),
equalTo(primaryTerm));
}
public void testCounterOnPrimary() throws Exception {
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
// no replica, we only want to test on primary
final ClusterState state = state(index, true, ShardRoutingState.STARTED);
setState(clusterService, state);
logger.debug("--> using initial state:\n{}", clusterService.state());
final ShardRouting primaryShard = state.routingTable().shardRoutingTable(shardId).primaryShard();
final long primaryTerm = state.metaData().index(index).primaryTerm(shardId.id());
Request request = new Request(shardId);
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
ReplicationTask task = maybeTask();
int i = randomInt(3);
final boolean throwExceptionOnCreation = i == 1;
final boolean throwExceptionOnRun = i == 2;
final boolean respondWithError = i == 3;
action.new AsyncPrimaryAction(request, primaryShard.allocationId().getId(), primaryTerm, createTransportChannel(listener), task) {
@Override
protected ReplicationOperation<Request, Request, TransportReplicationAction.PrimaryResult<Request, TestResponse>>
createReplicatedOperation(
Request request,
ActionListener<TransportReplicationAction.PrimaryResult<Request, TestResponse>> actionListener,
TransportReplicationAction<Request, Request, TestResponse>.PrimaryShardReference primaryShardReference) {
assertIndexShardCounter(1);
if (throwExceptionOnCreation) {
throw new ElasticsearchException("simulated exception, during createReplicatedOperation");
}
return new NoopReplicationOperation(request, actionListener) {
@Override
public void execute() throws Exception {
assertIndexShardCounter(1);
assertPhase(task, "primary");
if (throwExceptionOnRun) {
throw new ElasticsearchException("simulated exception, during performOnPrimary");
} else if (respondWithError) {
this.resultListener.onFailure(new ElasticsearchException("simulated exception, as a response"));
} else {
super.execute();
}
}
};
}
}.run();
assertIndexShardCounter(0);
assertTrue(listener.isDone());
assertPhase(task, "finished");
try {
listener.get();
} catch (ExecutionException e) {
if (throwExceptionOnCreation || throwExceptionOnRun || respondWithError) {
Throwable cause = e.getCause();
assertThat(cause, instanceOf(ElasticsearchException.class));
assertThat(cause.getMessage(), containsString("simulated"));
} else {
throw e;
}
}
}
public void testReplicasCounter() throws Exception {
final ShardId shardId = new ShardId("test", "_na_", 0);
final ClusterState state = state(shardId.getIndexName(), true, ShardRoutingState.STARTED, ShardRoutingState.STARTED);
setState(clusterService, state);
final ShardRouting replicaRouting = state.getRoutingTable().shardRoutingTable(shardId).replicaShards().get(0);
boolean throwException = randomBoolean();
final ReplicationTask task = maybeTask();
TestAction action = new TestAction(Settings.EMPTY, "internal:testActionWithExceptions", transportService, clusterService,
shardStateAction, threadPool) {
@Override
protected ReplicaResult shardOperationOnReplica(Request request, IndexShard replica) {
assertIndexShardCounter(1);
assertPhase(task, "replica");
if (throwException) {
throw new ElasticsearchException("simulated");
}
return new ReplicaResult();
}
};
final TestAction.ReplicaOperationTransportHandler replicaOperationTransportHandler = action.new ReplicaOperationTransportHandler();
try {
replicaOperationTransportHandler.messageReceived(
new TransportReplicationAction.ConcreteReplicaRequest<>(
new Request().setShardId(shardId), replicaRouting.allocationId().getId(), randomNonNegativeLong(),
randomNonNegativeLong(), randomNonNegativeLong()),
createTransportChannel(new PlainActionFuture<>()), task);
} catch (ElasticsearchException e) {
assertThat(e.getMessage(), containsString("simulated"));
assertTrue(throwException);
}
assertPhase(task, "finished");
// operation should have finished and counter decreased because no outstanding replica requests
assertIndexShardCounter(0);
}
/**
* This test ensures that replication operations adhere to the {@link IndexMetaData#SETTING_WAIT_FOR_ACTIVE_SHARDS} setting
* when the request is using the default value for waitForActiveShards.
*/
public void testDefaultWaitForActiveShardsUsesIndexSetting() throws Exception {
final String indexName = "test";
final ShardId shardId = new ShardId(indexName, "_na_", 0);
// test wait_for_active_shards index setting used when the default is set on the request
int numReplicas = randomIntBetween(0, 5);
int idxSettingWaitForActiveShards = randomIntBetween(0, numReplicas + 1);
ClusterState state = stateWithActivePrimary(indexName, randomBoolean(), numReplicas);
IndexMetaData indexMetaData = state.metaData().index(indexName);
Settings indexSettings = Settings.builder().put(indexMetaData.getSettings())
.put(SETTING_WAIT_FOR_ACTIVE_SHARDS.getKey(), Integer.toString(idxSettingWaitForActiveShards))
.build();
MetaData.Builder metaDataBuilder = MetaData.builder(state.metaData())
.put(IndexMetaData.builder(indexMetaData).settings(indexSettings).build(), true);
state = ClusterState.builder(state).metaData(metaDataBuilder).build();
setState(clusterService, state);
Request request = new Request(shardId).waitForActiveShards(ActiveShardCount.DEFAULT); // set to default so index settings are used
action.resolveRequest(state.metaData().index(indexName), request);
assertEquals(ActiveShardCount.from(idxSettingWaitForActiveShards), request.waitForActiveShards());
// test wait_for_active_shards when default not set on the request (request value should be honored over index setting)
int requestWaitForActiveShards = randomIntBetween(0, numReplicas + 1);
request = new Request(shardId).waitForActiveShards(ActiveShardCount.from(requestWaitForActiveShards));
action.resolveRequest(state.metaData().index(indexName), request);
assertEquals(ActiveShardCount.from(requestWaitForActiveShards), request.waitForActiveShards());
}
/** test that a primary request is rejected if it arrives at a shard with a wrong allocation id or term */
public void testPrimaryActionRejectsWrongAidOrWrongTerm() throws Exception {
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
setState(clusterService, state(index, true, ShardRoutingState.STARTED));
final ShardRouting primary = clusterService.state().routingTable().shardRoutingTable(shardId).primaryShard();
final long primaryTerm = clusterService.state().metaData().index(shardId.getIndexName()).primaryTerm(shardId.id());
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
final boolean wrongAllocationId = randomBoolean();
final long requestTerm = wrongAllocationId && randomBoolean() ? primaryTerm : primaryTerm + randomIntBetween(1, 10);
Request request = new Request(shardId).timeout("1ms");
action.new PrimaryOperationTransportHandler().messageReceived(
new TransportReplicationAction.ConcreteShardRequest<>(request,
wrongAllocationId ? "_not_a_valid_aid_" : primary.allocationId().getId(),
requestTerm),
createTransportChannel(listener), maybeTask()
);
try {
listener.get();
fail("using a wrong aid didn't fail the operation");
} catch (ExecutionException execException) {
Throwable throwable = execException.getCause();
logger.debug("got exception:" , throwable);
assertTrue(throwable.getClass() + " is not a retry exception", action.retryPrimaryException(throwable));
if (wrongAllocationId) {
assertThat(throwable.getMessage(), containsString("expected aID [_not_a_valid_aid_] but found [" +
primary.allocationId().getId() + "]"));
} else {
assertThat(throwable.getMessage(), containsString("expected aID [" + primary.allocationId().getId() + "] with term [" +
requestTerm + "] but found [" + primaryTerm + "]"));
}
}
}
/** test that a replica request is rejected if it arrives at a shard with a wrong allocation id */
public void testReplicaActionRejectsWrongAid() throws Exception {
final String index = "test";
final ShardId shardId = new ShardId(index, "_na_", 0);
ClusterState state = state(index, false, ShardRoutingState.STARTED, ShardRoutingState.STARTED);
final ShardRouting replica = state.routingTable().shardRoutingTable(shardId).replicaShards().get(0);
// simulate execution of the node holding the replica
state = ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()).localNodeId(replica.currentNodeId())).build();
setState(clusterService, state);
PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
Request request = new Request(shardId).timeout("1ms");
action.new ReplicaOperationTransportHandler().messageReceived(
new TransportReplicationAction.ConcreteReplicaRequest<>(request, "_not_a_valid_aid_", randomNonNegativeLong(),
randomNonNegativeLong(), randomNonNegativeLong()),
createTransportChannel(listener), maybeTask()
);
try {
listener.get();
fail("using a wrong aid didn't fail the operation");
} catch (ExecutionException execException) {
Throwable throwable = execException.getCause();
if (action.retryPrimaryException(throwable) == false) {
throw new AssertionError("thrown exception is not retriable", throwable);
}
assertThat(throwable.getMessage(), containsString("_not_a_valid_aid_"));
}
}
/**
* test throwing a {@link org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException}
* causes a retry
*/
public void testRetryOnReplica() throws Exception {
final ShardId shardId = new ShardId("test", "_na_", 0);
ClusterState state = state(shardId.getIndexName(), true, ShardRoutingState.STARTED, ShardRoutingState.STARTED);
final ShardRouting replica = state.getRoutingTable().shardRoutingTable(shardId).replicaShards().get(0);
final long primaryTerm = state.metaData().index(shardId.getIndexName()).primaryTerm(shardId.id());
// simulate execution of the node holding the replica
state = ClusterState.builder(state).nodes(DiscoveryNodes.builder(state.nodes()).localNodeId(replica.currentNodeId())).build();
setState(clusterService, state);
AtomicBoolean throwException = new AtomicBoolean(true);
final ReplicationTask task = maybeTask();
TestAction action = new TestAction(Settings.EMPTY, "internal:testActionWithExceptions", transportService, clusterService,
shardStateAction, threadPool) {
@Override
protected ReplicaResult shardOperationOnReplica(Request request, IndexShard replica) {
assertPhase(task, "replica");
if (throwException.get()) {
throw new RetryOnReplicaException(shardId, "simulation");
}
return new ReplicaResult();
}
};
final TestAction.ReplicaOperationTransportHandler replicaOperationTransportHandler = action.new ReplicaOperationTransportHandler();
final PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
final Request request = new Request().setShardId(shardId);
final long checkpoint = randomNonNegativeLong();
final long maxSeqNoOfUpdatesOrDeletes = randomNonNegativeLong();
replicaOperationTransportHandler.messageReceived(
new TransportReplicationAction.ConcreteReplicaRequest<>(request, replica.allocationId().getId(),
primaryTerm, checkpoint, maxSeqNoOfUpdatesOrDeletes),
createTransportChannel(listener), task);
if (listener.isDone()) {
listener.get(); // fail with the exception if there
fail("listener shouldn't be done");
}
// no retry yet
List<CapturingTransport.CapturedRequest> capturedRequests =
transport.getCapturedRequestsByTargetNodeAndClear().get(replica.currentNodeId());
assertThat(capturedRequests, nullValue());
// release the waiting
throwException.set(false);
setState(clusterService, state);
capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear().get(replica.currentNodeId());
assertThat(capturedRequests, notNullValue());
assertThat(capturedRequests.size(), equalTo(1));
final CapturingTransport.CapturedRequest capturedRequest = capturedRequests.get(0);
assertThat(capturedRequest.action, equalTo("internal:testActionWithExceptions[r]"));
assertThat(capturedRequest.request, instanceOf(TransportReplicationAction.ConcreteReplicaRequest.class));
assertThat(((TransportReplicationAction.ConcreteReplicaRequest) capturedRequest.request).getGlobalCheckpoint(),
equalTo(checkpoint));
assertThat(((TransportReplicationAction.ConcreteReplicaRequest) capturedRequest.request).getMaxSeqNoOfUpdatesOrDeletes(),
equalTo(maxSeqNoOfUpdatesOrDeletes));
assertConcreteShardRequest(capturedRequest.request, request, replica.allocationId());
}
public void testRetryOnReplicaWithRealTransport() throws Exception {
final ShardId shardId = new ShardId("test", "_na_", 0);
final ClusterState initialState = state(shardId.getIndexName(), true, ShardRoutingState.STARTED, ShardRoutingState.STARTED);
final ShardRouting replica = initialState.getRoutingTable().shardRoutingTable(shardId).replicaShards().get(0);
final long primaryTerm = initialState.metaData().index(shardId.getIndexName()).primaryTerm(shardId.id());
// simulate execution of the node holding the replica
final ClusterState stateWithNodes = ClusterState.builder(initialState)
.nodes(DiscoveryNodes.builder(initialState.nodes()).localNodeId(replica.currentNodeId())).build();
setState(clusterService, stateWithNodes);
AtomicBoolean throwException = new AtomicBoolean(true);
final ReplicationTask task = maybeTask();
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList());
final Transport transport = new MockTcpTransport(Settings.EMPTY, threadPool, BigArrays.NON_RECYCLING_INSTANCE,
new NoneCircuitBreakerService(), namedWriteableRegistry, new NetworkService(Collections.emptyList()),
Version.CURRENT);
transportService = new MockTransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> clusterService.localNode(), null, Collections.emptySet());
transportService.start();
transportService.acceptIncomingRequests();
AtomicBoolean calledSuccessfully = new AtomicBoolean(false);
TestAction action = new TestAction(Settings.EMPTY, "internal:testActionWithExceptions", transportService, clusterService,
shardStateAction, threadPool) {
@Override
protected ReplicaResult shardOperationOnReplica(Request request, IndexShard replica) {
assertPhase(task, "replica");
if (throwException.get()) {
throw new RetryOnReplicaException(shardId, "simulation");
}
calledSuccessfully.set(true);
return new ReplicaResult();
}
};
final TestAction.ReplicaOperationTransportHandler replicaOperationTransportHandler = action.new ReplicaOperationTransportHandler();
final PlainActionFuture<TestResponse> listener = new PlainActionFuture<>();
final Request request = new Request().setShardId(shardId);
final long checkpoint = randomNonNegativeLong();
final long maxSeqNoOfUpdates = randomNonNegativeLong();
replicaOperationTransportHandler.messageReceived(
new TransportReplicationAction.ConcreteReplicaRequest<>(request, replica.allocationId().getId(),
primaryTerm, checkpoint, maxSeqNoOfUpdates),
createTransportChannel(listener), task);
if (listener.isDone()) {
listener.get(); // fail with the exception if there
fail("listener shouldn't be done");
}
// release the waiting
throwException.set(false);
// publish a new state (same as the old state with the version incremented)
setState(clusterService, stateWithNodes);
// Assert that the request was retried, this time successful
assertTrue("action should have been successfully called on retry but was not", calledSuccessfully.get());
transportService.stop();
}
private void assertConcreteShardRequest(TransportRequest capturedRequest, Request expectedRequest, AllocationId expectedAllocationId) {
final TransportReplicationAction.ConcreteShardRequest<?> concreteShardRequest =
(TransportReplicationAction.ConcreteShardRequest<?>) capturedRequest;
assertThat(concreteShardRequest.getRequest(), equalTo(expectedRequest));
assertThat(((Request)concreteShardRequest.getRequest()).isRetrySet.get(), equalTo(true));
assertThat(concreteShardRequest.getTargetAllocationID(), equalTo(expectedAllocationId.getId()));
}
private void assertIndexShardCounter(int expected) {
assertThat(count.get(), equalTo(expected));
}
private final AtomicInteger count = new AtomicInteger(0);
private final AtomicBoolean isRelocated = new AtomicBoolean(false);
/**
* Sometimes build a ReplicationTask for tracking the phase of the
* TransportReplicationAction. Since TransportReplicationAction has to work
* if the task as null just as well as if it is supplied this returns null
* half the time.
*/
private ReplicationTask maybeTask() {
return random().nextBoolean() ? new ReplicationTask(0, null, null, null, null, null) : null;
}
/**
* If the task is non-null this asserts that the phrase matches.
*/
private void assertPhase(@Nullable ReplicationTask task, String phase) {
assertPhase(task, equalTo(phase));
}
private void assertPhase(@Nullable ReplicationTask task, Matcher<String> phaseMatcher) {
if (task != null) {
assertThat(task.getPhase(), phaseMatcher);
}
}
public static class Request extends ReplicationRequest<Request> {
public AtomicBoolean processedOnPrimary = new AtomicBoolean();
public AtomicInteger processedOnReplicas = new AtomicInteger();
public AtomicBoolean isRetrySet = new AtomicBoolean(false);
public Request() {
}
Request(ShardId shardId) {
this();
this.shardId = shardId;
this.index = shardId.getIndexName();
this.waitForActiveShards = ActiveShardCount.NONE;
// keep things simple
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
}
@Override
public void onRetry() {
super.onRetry();
isRetrySet.set(true);
}
@Override
public String toString() {
return "Request{}";
}
}
static class TestResponse extends ReplicationResponse {
}
private class TestAction extends TransportReplicationAction<Request, Request, TestResponse> {
TestAction(Settings settings, String actionName, TransportService transportService,
ClusterService clusterService, ShardStateAction shardStateAction,
ThreadPool threadPool) {
super(settings, actionName, transportService, clusterService, mockIndicesService(clusterService), threadPool,
shardStateAction,
new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY),
Request::new, Request::new, ThreadPool.Names.SAME);
}
TestAction(Settings settings, String actionName, TransportService transportService,
ClusterService clusterService, ShardStateAction shardStateAction,
ThreadPool threadPool, boolean withDocumentFailureOnPrimary, boolean withDocumentFailureOnReplica) {
super(settings, actionName, transportService, clusterService, mockIndicesService(clusterService), threadPool,
shardStateAction,
new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY),
Request::new, Request::new, ThreadPool.Names.SAME);
}
@Override
protected TestResponse newResponseInstance() {
return new TestResponse();
}
@Override
protected PrimaryResult shardOperationOnPrimary(Request shardRequest, IndexShard primary) throws Exception {
boolean executedBefore = shardRequest.processedOnPrimary.getAndSet(true);
assert executedBefore == false : "request has already been executed on the primary";
return new PrimaryResult(shardRequest, new TestResponse());
}
@Override
protected ReplicaResult shardOperationOnReplica(Request request, IndexShard replica) {
request.processedOnReplicas.incrementAndGet();
return new ReplicaResult();
}
@Override
protected boolean resolveIndex() {
return false;
}
}
final IndicesService mockIndicesService(ClusterService clusterService) {
final IndicesService indicesService = mock(IndicesService.class);
when(indicesService.indexServiceSafe(any(Index.class))).then(invocation -> {
Index index = (Index)invocation.getArguments()[0];
final ClusterState state = clusterService.state();
final IndexMetaData indexSafe = state.metaData().getIndexSafe(index);
return mockIndexService(indexSafe, clusterService);
});
when(indicesService.indexService(any(Index.class))).then(invocation -> {
Index index = (Index) invocation.getArguments()[0];
final ClusterState state = clusterService.state();
if (state.metaData().hasIndex(index.getName())) {
return mockIndexService(clusterService.state().metaData().getIndexSafe(index), clusterService);
} else {
return null;
}
});
return indicesService;
}
final IndexService mockIndexService(final IndexMetaData indexMetaData, ClusterService clusterService) {
final IndexService indexService = mock(IndexService.class);
when(indexService.getShard(anyInt())).then(invocation -> {
int shard = (Integer) invocation.getArguments()[0];
final ShardId shardId = new ShardId(indexMetaData.getIndex(), shard);
if (shard > indexMetaData.getNumberOfShards()) {
throw new ShardNotFoundException(shardId);
}
return mockIndexShard(shardId, clusterService);
});
return indexService;
}
private IndexShard mockIndexShard(ShardId shardId, ClusterService clusterService) {
final IndexShard indexShard = mock(IndexShard.class);
doAnswer(invocation -> {
ActionListener<Releasable> callback = (ActionListener<Releasable>) invocation.getArguments()[0];
count.incrementAndGet();
callback.onResponse(count::decrementAndGet);
return null;
}).when(indexShard).acquirePrimaryOperationPermit(any(ActionListener.class), anyString(), anyObject());
doAnswer(invocation -> {
long term = (Long)invocation.getArguments()[0];
ActionListener<Releasable> callback = (ActionListener<Releasable>) invocation.getArguments()[3];
final long primaryTerm = indexShard.getPendingPrimaryTerm();
if (term < primaryTerm) {
throw new IllegalArgumentException(String.format(Locale.ROOT, "%s operation term [%d] is too old (current [%d])",
shardId, term, primaryTerm));
}
count.incrementAndGet();
callback.onResponse(count::decrementAndGet);
return null;
}).when(indexShard)
.acquireReplicaOperationPermit(anyLong(), anyLong(), anyLong(), any(ActionListener.class), anyString(), anyObject());
when(indexShard.routingEntry()).thenAnswer(invocationOnMock -> {
final ClusterState state = clusterService.state();
final RoutingNode node = state.getRoutingNodes().node(state.nodes().getLocalNodeId());
final ShardRouting routing = node.getByShardId(shardId);
if (routing == null) {
throw new ShardNotFoundException(shardId, "shard is no longer assigned to current node");
}
return routing;
});
when(indexShard.isRelocatedPrimary()).thenAnswer(invocationOnMock -> isRelocated.get());
doThrow(new AssertionError("failed shard is not supported")).when(indexShard).failShard(anyString(), any(Exception.class));
when(indexShard.getPendingPrimaryTerm()).thenAnswer(i ->
clusterService.state().metaData().getIndexSafe(shardId.getIndex()).primaryTerm(shardId.id()));
return indexShard;
}
class NoopReplicationOperation extends ReplicationOperation<Request, Request, TestAction.PrimaryResult<Request, TestResponse>> {
NoopReplicationOperation(Request request, ActionListener<TestAction.PrimaryResult<Request, TestResponse>> listener) {
super(request, null, listener, null, TransportReplicationActionTests.this.logger, "noop");
}
@Override
public void execute() throws Exception {
// Using the diamond operator (<>) prevents Eclipse from being able to compile this code
this.resultListener.onResponse(new TransportReplicationAction.PrimaryResult<Request, TestResponse>(null, new TestResponse()));
}
}
/**
* Transport channel that is needed for replica operation testing.
*/
public TransportChannel createTransportChannel(final PlainActionFuture<TestResponse> listener) {
return new TransportChannel() {
@Override
public String getProfileName() {
return "";
}
@Override
public void sendResponse(TransportResponse response) throws IOException {
listener.onResponse(((TestResponse) response));
}
@Override
public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException {
listener.onResponse(((TestResponse) response));
}
@Override
public void sendResponse(Exception exception) throws IOException {
listener.onFailure(exception);
}
@Override
public String getChannelType() {
return "replica_test";
}
};
}
}
| |
/*
* Copyright 2015 herd contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.finra.herd.core;
import java.sql.Timestamp;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import javax.xml.datatype.DatatypeConfigurationException;
import javax.xml.datatype.DatatypeFactory;
import javax.xml.datatype.XMLGregorianCalendar;
import org.apache.commons.lang3.time.DateUtils;
/**
* Provides additional utility methods for dates and times.
*/
public class HerdDateUtils extends DateUtils
{
/**
* Returns a calendar for the current day that does not have the time set.
*
* @return the current date.
*/
public static Calendar getCurrentCalendarNoTime()
{
// Get the current year, month, and day before we clear out the fields.
Calendar calendar = Calendar.getInstance();
int year = calendar.get(Calendar.YEAR);
int month = calendar.get(Calendar.MONTH);
int day = calendar.get(Calendar.DATE);
// Clear out ALL the fields of the calendar. If any are not cleared, we run the risk of something being set that we don't want.
calendar.clear();
// Update the calendar with just the year, month, and day.
calendar.set(year, month, day);
// Return the updated calendar.
return calendar;
}
/**
* Formats a "total milliseconds" duration.
*
* @param duration the duration in milliseconds to format.
*
* @return The formatted duration.
*/
public static String formatDuration(long duration)
{
// Initialize the result string.
StringBuilder result = new StringBuilder();
// Since we have to display the readable duration, append it initially.
long remainingDuration = duration;
// If the duration is 0, then just return 0 milliseconds.
if (remainingDuration == 0)
{
result.append("0 Milliseconds");
}
else
{
// Compute each duration separately.
remainingDuration = processDuration(remainingDuration, MILLIS_PER_DAY, "Day", result, false);
remainingDuration = processDuration(remainingDuration, MILLIS_PER_HOUR, "Hour", result, false);
remainingDuration = processDuration(remainingDuration, MILLIS_PER_MINUTE, "Minute", result, false);
remainingDuration = processDuration(remainingDuration, MILLIS_PER_SECOND, "Second", result, false);
// Compute the milliseconds.
long milliSeconds = remainingDuration;
if (milliSeconds > 0)
{
// Just display the final millisecond portion no matter what (i.e. the duration is 1).
processDuration(remainingDuration, 1, "Millisecond", result, true);
}
}
// Return the result.
return result.toString();
}
/**
* Process a single duration.
*
* @param remainingDuration the remaining duration in milliseconds.
* @param millisPerDuration the number of milliseconds per one duration (e.g. 1000 milliseconds in a single second).
* @param durationName the duration name (e.g. "Day").
* @param result the result string.
* @param displayZeroDuration Flag that indicates whether a "0" duration should be displayed or not.
*
* @return the new remaining duration in milliseconds after the current duration is substracted from the original remaining duration.
*/
private static long processDuration(long remainingDuration, long millisPerDuration, String durationName, StringBuilder result, boolean displayZeroDuration)
{
// Compute how many durations (e.g. "5" days).
long duration = remainingDuration / millisPerDuration;
// Compute the new remaining duration which is the previous remaining duration - the new duration in milliseconds.
long newRemainingDuration = remainingDuration - (duration * millisPerDuration);
// Only append the duration to the result if some time exists in the duration (e.g. we don't add "0 Days").
if (duration > 0 || displayZeroDuration)
{
// If the result previously had a value, add a comma to separate this duration from the previous durations (e.g. 5 days"," ...).
if (result.length() > 0)
{
result.append(", ");
}
// Append the duration along with the duration name (e.g. "5 day").
result.append(String.valueOf(duration)).append(' ').append(durationName);
// If the duration is not 1, then make it plural (e.g. 5 day"s").
if (duration != 1)
{
result.append('s');
}
}
// Return the new remaining duration so the calculation can continue.
return newRemainingDuration;
}
/**
* Gets the current date/time as an XMLGregorianCalendar with the default time zone in the default locale.
*
* @return the current date/time.
*/
public static XMLGregorianCalendar now()
{
return getXMLGregorianCalendarValue(null);
}
/**
* Gets an instance of XMLGregorianCalendar class initialized per the specified java.util.Date value. Returns the current date/time if date is null.
*
* @param date the java.util.Date value to be converted into XMLGregorianCalendar.
*
* @return the XMLGregorianCalendar instance initialized per specified date value
*/
public static XMLGregorianCalendar getXMLGregorianCalendarValue(Date date)
{
GregorianCalendar gregorianCalendar = new GregorianCalendar();
if (date != null)
{
gregorianCalendar.setTime(date);
}
try
{
return DatatypeFactory.newInstance().newXMLGregorianCalendar(gregorianCalendar);
}
catch (DatatypeConfigurationException ex)
{
throw new IllegalStateException("Failed to create a new instance of DataTypeFactory.", ex);
}
}
/**
* Adds a number of days to a timestamp returning a new object. The original {@code Timestamp} is unchanged.
*
* @param timestamp the timestamp, not null
* @param amount the amount to add, may be negative
*
* @return the new {@code Timestamp} with the amount added
*/
public static Timestamp addDays(Timestamp timestamp, int amount)
{
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(timestamp.getTime());
return new Timestamp(addDays(calendar.getTime(), amount).getTime());
}
/**
* Adds a number of minutes to a timestamp returning a new object. The original {@code Timestamp} is unchanged.
*
* @param timestamp the timestamp, not null
* @param amount the amount to add, may be negative
*
* @return the new {@code Timestamp} with the amount added
*/
public static Timestamp addMinutes(Timestamp timestamp, int amount)
{
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(timestamp.getTime());
return new Timestamp(addMinutes(calendar.getTime(), amount).getTime());
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.nested;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.sum.InternalSum;
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.DoubleStream;
public class NestedAggregatorTests extends AggregatorTestCase {
private static final String VALUE_FIELD_NAME = "number";
private static final String NESTED_OBJECT = "nested_object";
private static final String NESTED_OBJECT2 = "nested_object2";
private static final String NESTED_AGG = "nestedAgg";
private static final String MAX_AGG_NAME = "maxAgg";
private static final String SUM_AGG_NAME = "sumAgg";
public void testNoDocs() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
// intentionally not writing any docs
}
try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
NESTED_OBJECT);
MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME)
.field(VALUE_FIELD_NAME);
nestedBuilder.subAggregation(maxAgg);
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(
NumberFieldMapper.NumberType.LONG);
fieldType.setName(VALUE_FIELD_NAME);
Nested nested = search(newSearcher(indexReader, false, true),
new MatchAllDocsQuery(), nestedBuilder, fieldType);
assertEquals(NESTED_AGG, nested.getName());
assertEquals(0, nested.getDocCount());
InternalMax max = (InternalMax)
((InternalAggregation)nested).getProperty(MAX_AGG_NAME);
assertEquals(MAX_AGG_NAME, max.getName());
assertEquals(Double.NEGATIVE_INFINITY, max.getValue(), Double.MIN_VALUE);
}
}
}
public void testSingleNestingMax() throws IOException {
int numRootDocs = randomIntBetween(1, 20);
int expectedNestedDocs = 0;
double expectedMaxValue = Double.NEGATIVE_INFINITY;
try (Directory directory = newDirectory()) {
try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numRootDocs; i++) {
List<Document> documents = new ArrayList<>();
int numNestedDocs = randomIntBetween(0, 20);
expectedMaxValue = Math.max(expectedMaxValue,
generateMaxDocs(documents, numNestedDocs, i, NESTED_OBJECT, VALUE_FIELD_NAME));
expectedNestedDocs += numNestedDocs;
Document document = new Document();
document.add(new Field(UidFieldMapper.NAME, "type#" + i,
UidFieldMapper.Defaults.FIELD_TYPE));
document.add(new Field(TypeFieldMapper.NAME, "test",
TypeFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
iw.addDocuments(documents);
}
iw.commit();
}
try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
NESTED_OBJECT);
MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME)
.field(VALUE_FIELD_NAME);
nestedBuilder.subAggregation(maxAgg);
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(
NumberFieldMapper.NumberType.LONG);
fieldType.setName(VALUE_FIELD_NAME);
Nested nested = search(newSearcher(indexReader, false, true),
new MatchAllDocsQuery(), nestedBuilder, fieldType);
assertEquals(expectedNestedDocs, nested.getDocCount());
assertEquals(NESTED_AGG, nested.getName());
assertEquals(expectedNestedDocs, nested.getDocCount());
InternalMax max = (InternalMax)
((InternalAggregation)nested).getProperty(MAX_AGG_NAME);
assertEquals(MAX_AGG_NAME, max.getName());
assertEquals(expectedMaxValue, max.getValue(), Double.MIN_VALUE);
}
}
}
public void testDoubleNestingMax() throws IOException {
int numRootDocs = randomIntBetween(1, 20);
int expectedNestedDocs = 0;
double expectedMaxValue = Double.NEGATIVE_INFINITY;
try (Directory directory = newDirectory()) {
try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numRootDocs; i++) {
List<Document> documents = new ArrayList<>();
int numNestedDocs = randomIntBetween(0, 20);
expectedMaxValue = Math.max(expectedMaxValue,
generateMaxDocs(documents, numNestedDocs, i, NESTED_OBJECT + "." + NESTED_OBJECT2, VALUE_FIELD_NAME));
expectedNestedDocs += numNestedDocs;
Document document = new Document();
document.add(new Field(UidFieldMapper.NAME, "type#" + i,
UidFieldMapper.Defaults.FIELD_TYPE));
document.add(new Field(TypeFieldMapper.NAME, "test",
TypeFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
iw.addDocuments(documents);
}
iw.commit();
}
try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
NESTED_OBJECT + "." + NESTED_OBJECT2);
MaxAggregationBuilder maxAgg = new MaxAggregationBuilder(MAX_AGG_NAME)
.field(VALUE_FIELD_NAME);
nestedBuilder.subAggregation(maxAgg);
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(
NumberFieldMapper.NumberType.LONG);
fieldType.setName(VALUE_FIELD_NAME);
Nested nested = search(newSearcher(indexReader, false, true),
new MatchAllDocsQuery(), nestedBuilder, fieldType);
assertEquals(expectedNestedDocs, nested.getDocCount());
assertEquals(NESTED_AGG, nested.getName());
assertEquals(expectedNestedDocs, nested.getDocCount());
InternalMax max = (InternalMax)
((InternalAggregation)nested).getProperty(MAX_AGG_NAME);
assertEquals(MAX_AGG_NAME, max.getName());
assertEquals(expectedMaxValue, max.getValue(), Double.MIN_VALUE);
}
}
}
public void testOrphanedDocs() throws IOException {
int numRootDocs = randomIntBetween(1, 20);
int expectedNestedDocs = 0;
double expectedSum = 0;
try (Directory directory = newDirectory()) {
try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numRootDocs; i++) {
List<Document> documents = new ArrayList<>();
int numNestedDocs = randomIntBetween(0, 20);
expectedSum += generateSumDocs(documents, numNestedDocs, i, NESTED_OBJECT, VALUE_FIELD_NAME);
expectedNestedDocs += numNestedDocs;
Document document = new Document();
document.add(new Field(UidFieldMapper.NAME, "type#" + i,
UidFieldMapper.Defaults.FIELD_TYPE));
document.add(new Field(TypeFieldMapper.NAME, "test",
TypeFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
iw.addDocuments(documents);
}
//add some random nested docs that don't belong
List<Document> documents = new ArrayList<>();
int numOrphanedDocs = randomIntBetween(0, 20);
generateSumDocs(documents, numOrphanedDocs, 1234, "foo", VALUE_FIELD_NAME);
iw.addDocuments(documents);
iw.commit();
}
try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
NESTED_OBJECT);
SumAggregationBuilder sumAgg = new SumAggregationBuilder(SUM_AGG_NAME)
.field(VALUE_FIELD_NAME);
nestedBuilder.subAggregation(sumAgg);
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(
NumberFieldMapper.NumberType.LONG);
fieldType.setName(VALUE_FIELD_NAME);
Nested nested = search(newSearcher(indexReader, false, true),
new MatchAllDocsQuery(), nestedBuilder, fieldType);
assertEquals(expectedNestedDocs, nested.getDocCount());
assertEquals(NESTED_AGG, nested.getName());
assertEquals(expectedNestedDocs, nested.getDocCount());
InternalSum sum = (InternalSum)
((InternalAggregation)nested).getProperty(SUM_AGG_NAME);
assertEquals(SUM_AGG_NAME, sum.getName());
assertEquals(expectedSum, sum.getValue(), Double.MIN_VALUE);
}
}
}
public void testResetRootDocId() throws Exception {
IndexWriterConfig iwc = new IndexWriterConfig(null);
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
try (Directory directory = newDirectory()) {
try (RandomIndexWriter iw = new RandomIndexWriter(random(), directory, iwc)) {
List<Document> documents = new ArrayList<>();
// 1 segment with, 1 root document, with 3 nested sub docs
Document document = new Document();
document.add(new Field(UidFieldMapper.NAME, "type#1", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
document.add(new Field(TypeFieldMapper.NAME, "__nested_field", TypeFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
document = new Document();
document.add(new Field(UidFieldMapper.NAME, "type#1", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
document.add(new Field(TypeFieldMapper.NAME, "__nested_field", TypeFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
document = new Document();
document.add(new Field(UidFieldMapper.NAME, "type#1", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
document.add(new Field(TypeFieldMapper.NAME, "__nested_field", TypeFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
document = new Document();
document.add(new Field(UidFieldMapper.NAME, "type#1", UidFieldMapper.Defaults.FIELD_TYPE));
document.add(new Field(TypeFieldMapper.NAME, "test", TypeFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
iw.addDocuments(documents);
iw.commit();
documents.clear();
// 1 segment with:
// 1 document, with 1 nested subdoc
document = new Document();
document.add(new Field(UidFieldMapper.NAME, "type#2", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
document.add(new Field(TypeFieldMapper.NAME, "__nested_field", TypeFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
document = new Document();
document.add(new Field(UidFieldMapper.NAME, "type#2", UidFieldMapper.Defaults.FIELD_TYPE));
document.add(new Field(TypeFieldMapper.NAME, "test", TypeFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
iw.addDocuments(documents);
documents.clear();
// and 1 document, with 1 nested subdoc
document = new Document();
document.add(new Field(UidFieldMapper.NAME, "type#3", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
document.add(new Field(TypeFieldMapper.NAME, "__nested_field", TypeFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
document = new Document();
document.add(new Field(UidFieldMapper.NAME, "type#3", UidFieldMapper.Defaults.FIELD_TYPE));
document.add(new Field(TypeFieldMapper.NAME, "test", TypeFieldMapper.Defaults.FIELD_TYPE));
documents.add(document);
iw.addDocuments(documents);
iw.commit();
iw.close();
}
try (IndexReader indexReader = wrap(DirectoryReader.open(directory))) {
NestedAggregationBuilder nestedBuilder = new NestedAggregationBuilder(NESTED_AGG,
"nested_field");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(
NumberFieldMapper.NumberType.LONG);
fieldType.setName(VALUE_FIELD_NAME);
BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST);
bq.add(new TermQuery(new Term(UidFieldMapper.NAME, "type#2")), BooleanClause.Occur.MUST_NOT);
Nested nested = search(newSearcher(indexReader, false, true),
new ConstantScoreQuery(bq.build()), nestedBuilder, fieldType);
assertEquals(NESTED_AGG, nested.getName());
// The bug manifests if 6 docs are returned, because currentRootDoc isn't reset the previous child docs from the first segment are emitted as hits.
assertEquals(4L, nested.getDocCount());
}
}
}
private double generateMaxDocs(List<Document> documents, int numNestedDocs, int id, String path, String fieldName) {
return DoubleStream.of(generateDocuments(documents, numNestedDocs, id, path, fieldName))
.max().orElse(Double.NEGATIVE_INFINITY);
}
private double generateSumDocs(List<Document> documents, int numNestedDocs, int id, String path, String fieldName) {
return DoubleStream.of(generateDocuments(documents, numNestedDocs, id, path, fieldName)).sum();
}
private double[] generateDocuments(List<Document> documents, int numNestedDocs, int id, String path, String fieldName) {
double[] values = new double[numNestedDocs];
for (int nested = 0; nested < numNestedDocs; nested++) {
Document document = new Document();
document.add(new Field(UidFieldMapper.NAME, "type#" + id,
UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
document.add(new Field(TypeFieldMapper.NAME, "__" + path,
TypeFieldMapper.Defaults.FIELD_TYPE));
long value = randomNonNegativeLong() % 10000;
document.add(new SortedNumericDocValuesField(fieldName, value));
documents.add(document);
values[nested] = value;
}
return values;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.snapshots;
import org.elasticsearch.repositories.ShardGeneration;
import org.elasticsearch.repositories.ShardSnapshotResult;
import org.elasticsearch.snapshots.AbortedSnapshotException;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicReference;
/**
* Represent shard snapshot status
*/
public class IndexShardSnapshotStatus {
/**
* Snapshot stage
*/
public enum Stage {
/**
* Snapshot hasn't started yet
*/
INIT,
/**
* Index files are being copied
*/
STARTED,
/**
* Snapshot metadata is being written
*/
FINALIZE,
/**
* Snapshot completed successfully
*/
DONE,
/**
* Snapshot failed
*/
FAILURE,
/**
* Snapshot aborted
*/
ABORTED
}
private final AtomicReference<Stage> stage;
private final AtomicReference<ShardGeneration> generation;
private final AtomicReference<ShardSnapshotResult> shardSnapshotResult; // only set in stage DONE
private long startTime;
private long totalTime;
private int incrementalFileCount;
private int totalFileCount;
private int processedFileCount;
private long totalSize;
private long incrementalSize;
private long processedSize;
private long indexVersion;
private String failure;
private IndexShardSnapshotStatus(
final Stage stage,
final long startTime,
final long totalTime,
final int incrementalFileCount,
final int totalFileCount,
final int processedFileCount,
final long incrementalSize,
final long totalSize,
final long processedSize,
final String failure,
final ShardGeneration generation
) {
this.stage = new AtomicReference<>(Objects.requireNonNull(stage));
this.generation = new AtomicReference<>(generation);
this.shardSnapshotResult = new AtomicReference<>();
this.startTime = startTime;
this.totalTime = totalTime;
this.incrementalFileCount = incrementalFileCount;
this.totalFileCount = totalFileCount;
this.processedFileCount = processedFileCount;
this.totalSize = totalSize;
this.processedSize = processedSize;
this.incrementalSize = incrementalSize;
this.failure = failure;
}
public synchronized Copy moveToStarted(
final long startTime,
final int incrementalFileCount,
final int totalFileCount,
final long incrementalSize,
final long totalSize
) {
if (stage.compareAndSet(Stage.INIT, Stage.STARTED)) {
this.startTime = startTime;
this.incrementalFileCount = incrementalFileCount;
this.totalFileCount = totalFileCount;
this.incrementalSize = incrementalSize;
this.totalSize = totalSize;
} else if (isAborted()) {
throw new AbortedSnapshotException();
} else {
assert false : "Should not try to move stage [" + stage.get() + "] to [STARTED]";
throw new IllegalStateException(
"Unable to move the shard snapshot status to [STARTED]: " + "expecting [INIT] but got [" + stage.get() + "]"
);
}
return asCopy();
}
public synchronized Copy moveToFinalize(final long indexVersion) {
if (stage.compareAndSet(Stage.STARTED, Stage.FINALIZE)) {
this.indexVersion = indexVersion;
} else if (isAborted()) {
throw new AbortedSnapshotException();
} else {
assert false : "Should not try to move stage [" + stage.get() + "] to [FINALIZE]";
throw new IllegalStateException(
"Unable to move the shard snapshot status to [FINALIZE]: " + "expecting [STARTED] but got [" + stage.get() + "]"
);
}
return asCopy();
}
public synchronized void moveToDone(final long endTime, final ShardSnapshotResult shardSnapshotResult) {
assert shardSnapshotResult != null;
assert shardSnapshotResult.getGeneration() != null;
if (stage.compareAndSet(Stage.FINALIZE, Stage.DONE)) {
this.totalTime = Math.max(0L, endTime - startTime);
this.shardSnapshotResult.set(shardSnapshotResult);
this.generation.set(shardSnapshotResult.getGeneration());
} else {
assert false : "Should not try to move stage [" + stage.get() + "] to [DONE]";
throw new IllegalStateException(
"Unable to move the shard snapshot status to [DONE]: " + "expecting [FINALIZE] but got [" + stage.get() + "]"
);
}
}
public synchronized void abortIfNotCompleted(final String failure) {
if (stage.compareAndSet(Stage.INIT, Stage.ABORTED) || stage.compareAndSet(Stage.STARTED, Stage.ABORTED)) {
this.failure = failure;
}
}
public synchronized void moveToFailed(final long endTime, final String failure) {
if (stage.getAndSet(Stage.FAILURE) != Stage.FAILURE) {
this.totalTime = Math.max(0L, endTime - startTime);
this.failure = failure;
}
}
public ShardGeneration generation() {
return generation.get();
}
public ShardSnapshotResult getShardSnapshotResult() {
assert stage.get() == Stage.DONE : stage.get();
return shardSnapshotResult.get();
}
public boolean isAborted() {
return stage.get() == Stage.ABORTED;
}
/**
* Increments number of processed files
*/
public synchronized void addProcessedFile(long size) {
processedFileCount++;
processedSize += size;
}
/**
* Returns a copy of the current {@link IndexShardSnapshotStatus}. This method is
* intended to be used when a coherent state of {@link IndexShardSnapshotStatus} is needed.
*
* @return a {@link IndexShardSnapshotStatus.Copy}
*/
public synchronized IndexShardSnapshotStatus.Copy asCopy() {
return new IndexShardSnapshotStatus.Copy(
stage.get(),
startTime,
totalTime,
incrementalFileCount,
totalFileCount,
processedFileCount,
incrementalSize,
totalSize,
processedSize,
indexVersion,
failure
);
}
public static IndexShardSnapshotStatus newInitializing(ShardGeneration generation) {
return new IndexShardSnapshotStatus(Stage.INIT, 0L, 0L, 0, 0, 0, 0, 0, 0, null, generation);
}
public static IndexShardSnapshotStatus newFailed(final String failure) {
assert failure != null : "expecting non null failure for a failed IndexShardSnapshotStatus";
if (failure == null) {
throw new IllegalArgumentException("A failure description is required for a failed IndexShardSnapshotStatus");
}
return new IndexShardSnapshotStatus(Stage.FAILURE, 0L, 0L, 0, 0, 0, 0, 0, 0, failure, null);
}
public static IndexShardSnapshotStatus newDone(
final long startTime,
final long totalTime,
final int incrementalFileCount,
final int fileCount,
final long incrementalSize,
final long size,
ShardGeneration generation
) {
// The snapshot is done which means the number of processed files is the same as total
return new IndexShardSnapshotStatus(
Stage.DONE,
startTime,
totalTime,
incrementalFileCount,
fileCount,
incrementalFileCount,
incrementalSize,
size,
incrementalSize,
null,
generation
);
}
/**
* Returns an immutable state of {@link IndexShardSnapshotStatus} at a given point in time.
*/
public static class Copy {
private final Stage stage;
private final long startTime;
private final long totalTime;
private final int incrementalFileCount;
private final int totalFileCount;
private final int processedFileCount;
private final long totalSize;
private final long processedSize;
private final long incrementalSize;
private final long indexVersion;
private final String failure;
public Copy(
final Stage stage,
final long startTime,
final long totalTime,
final int incrementalFileCount,
final int totalFileCount,
final int processedFileCount,
final long incrementalSize,
final long totalSize,
final long processedSize,
final long indexVersion,
final String failure
) {
this.stage = stage;
this.startTime = startTime;
this.totalTime = totalTime;
this.incrementalFileCount = incrementalFileCount;
this.totalFileCount = totalFileCount;
this.processedFileCount = processedFileCount;
this.totalSize = totalSize;
this.processedSize = processedSize;
this.incrementalSize = incrementalSize;
this.indexVersion = indexVersion;
this.failure = failure;
}
public Stage getStage() {
return stage;
}
public long getStartTime() {
return startTime;
}
public long getTotalTime() {
return totalTime;
}
public int getIncrementalFileCount() {
return incrementalFileCount;
}
public int getTotalFileCount() {
return totalFileCount;
}
public int getProcessedFileCount() {
return processedFileCount;
}
public long getIncrementalSize() {
return incrementalSize;
}
public long getTotalSize() {
return totalSize;
}
public long getProcessedSize() {
return processedSize;
}
public long getIndexVersion() {
return indexVersion;
}
public String getFailure() {
return failure;
}
@Override
public String toString() {
return "index shard snapshot status ("
+ "stage="
+ stage
+ ", startTime="
+ startTime
+ ", totalTime="
+ totalTime
+ ", incrementalFileCount="
+ incrementalFileCount
+ ", totalFileCount="
+ totalFileCount
+ ", processedFileCount="
+ processedFileCount
+ ", incrementalSize="
+ incrementalSize
+ ", totalSize="
+ totalSize
+ ", processedSize="
+ processedSize
+ ", indexVersion="
+ indexVersion
+ ", failure='"
+ failure
+ '\''
+ ')';
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.type;
import io.prestosql.operator.scalar.AbstractTestFunctions;
import org.testng.annotations.Test;
import static io.prestosql.spi.StandardErrorCode.DIVISION_BY_ZERO;
import static io.prestosql.spi.StandardErrorCode.INVALID_CAST_ARGUMENT;
import static io.prestosql.spi.function.OperatorType.INDETERMINATE;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.BooleanType.BOOLEAN;
import static io.prestosql.spi.type.DoubleType.DOUBLE;
import static io.prestosql.spi.type.IntegerType.INTEGER;
import static io.prestosql.spi.type.RealType.REAL;
import static io.prestosql.spi.type.SmallintType.SMALLINT;
import static io.prestosql.spi.type.TinyintType.TINYINT;
import static io.prestosql.spi.type.VarcharType.VARCHAR;
import static java.lang.String.format;
public class TestSmallintOperators
extends AbstractTestFunctions
{
@Test
public void testLiteral()
{
assertFunction("SMALLINT'37'", SMALLINT, (short) 37);
assertFunction("SMALLINT'17'", SMALLINT, (short) 17);
assertInvalidCast("SMALLINT'" + ((long) Short.MAX_VALUE + 1L) + "'");
}
@Test
public void testUnaryPlus()
{
assertFunction("+SMALLINT'37'", SMALLINT, (short) 37);
assertFunction("+SMALLINT'17'", SMALLINT, (short) 17);
}
@Test
public void testUnaryMinus()
{
assertFunction("SMALLINT'-37'", SMALLINT, (short) -37);
assertFunction("SMALLINT'-17'", SMALLINT, (short) -17);
assertInvalidFunction("SMALLINT'-" + Short.MIN_VALUE + "'", INVALID_CAST_ARGUMENT);
}
@Test
public void testAdd()
{
assertFunction("SMALLINT'37' + SMALLINT'37'", SMALLINT, (short) (37 + 37));
assertFunction("SMALLINT'37' + SMALLINT'17'", SMALLINT, (short) (37 + 17));
assertFunction("SMALLINT'17' + SMALLINT'37'", SMALLINT, (short) (17 + 37));
assertFunction("SMALLINT'17' + SMALLINT'17'", SMALLINT, (short) (17 + 17));
assertNumericOverflow(format("SMALLINT'%s' + SMALLINT'1'", Short.MAX_VALUE), "smallint addition overflow: 32767 + 1");
}
@Test
public void testSubtract()
{
assertFunction("SMALLINT'37' - SMALLINT'37'", SMALLINT, (short) 0);
assertFunction("SMALLINT'37' - SMALLINT'17'", SMALLINT, (short) (37 - 17));
assertFunction("SMALLINT'17' - SMALLINT'37'", SMALLINT, (short) (17 - 37));
assertFunction("SMALLINT'17' - SMALLINT'17'", SMALLINT, (short) 0);
assertNumericOverflow(format("SMALLINT'%s' - SMALLINT'1'", Short.MIN_VALUE), "smallint subtraction overflow: -32768 - 1");
}
@Test
public void testMultiply()
{
assertFunction("SMALLINT'37' * SMALLINT'37'", SMALLINT, (short) (37 * 37));
assertFunction("SMALLINT'37' * SMALLINT'17'", SMALLINT, (short) (37 * 17));
assertFunction("SMALLINT'17' * SMALLINT'37'", SMALLINT, (short) (17 * 37));
assertFunction("SMALLINT'17' * SMALLINT'17'", SMALLINT, (short) (17 * 17));
assertNumericOverflow(format("SMALLINT'%s' * SMALLINT'2'", Short.MAX_VALUE), "smallint multiplication overflow: 32767 * 2");
}
@Test
public void testDivide()
{
assertFunction("SMALLINT'37' / SMALLINT'37'", SMALLINT, (short) 1);
assertFunction("SMALLINT'37' / SMALLINT'17'", SMALLINT, (short) (37 / 17));
assertFunction("SMALLINT'17' / SMALLINT'37'", SMALLINT, (short) (17 / 37));
assertFunction("SMALLINT'17' / SMALLINT'17'", SMALLINT, (short) 1);
assertInvalidFunction("SMALLINT'17' / SMALLINT'0'", DIVISION_BY_ZERO);
}
@Test
public void testModulus()
{
assertFunction("SMALLINT'37' % SMALLINT'37'", SMALLINT, (short) 0);
assertFunction("SMALLINT'37' % SMALLINT'17'", SMALLINT, (short) (37 % 17));
assertFunction("SMALLINT'17' % SMALLINT'37'", SMALLINT, (short) (17 % 37));
assertFunction("SMALLINT'17' % SMALLINT'17'", SMALLINT, (short) 0);
assertInvalidFunction("SMALLINT'17' % SMALLINT'0'", DIVISION_BY_ZERO);
}
@Test
public void testNegation()
{
assertFunction("-(SMALLINT'37')", SMALLINT, (short) -37);
assertFunction("-(SMALLINT'17')", SMALLINT, (short) -17);
assertFunction("-(SMALLINT'" + Short.MAX_VALUE + "')", SMALLINT, (short) (Short.MIN_VALUE + 1));
assertNumericOverflow(format("-(SMALLINT'%s')", Short.MIN_VALUE), "smallint negation overflow: -32768");
}
@Test
public void testEqual()
{
assertFunction("SMALLINT'37' = SMALLINT'37'", BOOLEAN, true);
assertFunction("SMALLINT'37' = SMALLINT'17'", BOOLEAN, false);
assertFunction("SMALLINT'17' = SMALLINT'37'", BOOLEAN, false);
assertFunction("SMALLINT'17' = SMALLINT'17'", BOOLEAN, true);
}
@Test
public void testNotEqual()
{
assertFunction("SMALLINT'37' <> SMALLINT'37'", BOOLEAN, false);
assertFunction("SMALLINT'37' <> SMALLINT'17'", BOOLEAN, true);
assertFunction("SMALLINT'17' <> SMALLINT'37'", BOOLEAN, true);
assertFunction("SMALLINT'17' <> SMALLINT'17'", BOOLEAN, false);
}
@Test
public void testLessThan()
{
assertFunction("SMALLINT'37' < SMALLINT'37'", BOOLEAN, false);
assertFunction("SMALLINT'37' < SMALLINT'17'", BOOLEAN, false);
assertFunction("SMALLINT'17' < SMALLINT'37'", BOOLEAN, true);
assertFunction("SMALLINT'17' < SMALLINT'17'", BOOLEAN, false);
}
@Test
public void testLessThanOrEqual()
{
assertFunction("SMALLINT'37' <= SMALLINT'37'", BOOLEAN, true);
assertFunction("SMALLINT'37' <= SMALLINT'17'", BOOLEAN, false);
assertFunction("SMALLINT'17' <= SMALLINT'37'", BOOLEAN, true);
assertFunction("SMALLINT'17' <= SMALLINT'17'", BOOLEAN, true);
}
@Test
public void testGreaterThan()
{
assertFunction("SMALLINT'37' > SMALLINT'37'", BOOLEAN, false);
assertFunction("SMALLINT'37' > SMALLINT'17'", BOOLEAN, true);
assertFunction("SMALLINT'17' > SMALLINT'37'", BOOLEAN, false);
assertFunction("SMALLINT'17' > SMALLINT'17'", BOOLEAN, false);
}
@Test
public void testGreaterThanOrEqual()
{
assertFunction("SMALLINT'37' >= SMALLINT'37'", BOOLEAN, true);
assertFunction("SMALLINT'37' >= SMALLINT'17'", BOOLEAN, true);
assertFunction("SMALLINT'17' >= SMALLINT'37'", BOOLEAN, false);
assertFunction("SMALLINT'17' >= SMALLINT'17'", BOOLEAN, true);
}
@Test
public void testBetween()
{
assertFunction("SMALLINT'37' BETWEEN SMALLINT'37' AND SMALLINT'37'", BOOLEAN, true);
assertFunction("SMALLINT'37' BETWEEN SMALLINT'37' AND SMALLINT'17'", BOOLEAN, false);
assertFunction("SMALLINT'37' BETWEEN SMALLINT'17' AND SMALLINT'37'", BOOLEAN, true);
assertFunction("SMALLINT'37' BETWEEN SMALLINT'17' AND SMALLINT'17'", BOOLEAN, false);
assertFunction("SMALLINT'17' BETWEEN SMALLINT'37' AND SMALLINT'37'", BOOLEAN, false);
assertFunction("SMALLINT'17' BETWEEN SMALLINT'37' AND SMALLINT'17'", BOOLEAN, false);
assertFunction("SMALLINT'17' BETWEEN SMALLINT'17' AND SMALLINT'37'", BOOLEAN, true);
assertFunction("SMALLINT'17' BETWEEN SMALLINT'17' AND SMALLINT'17'", BOOLEAN, true);
}
@Test
public void testCastToBigint()
{
assertFunction("cast(SMALLINT'37' as bigint)", BIGINT, 37L);
assertFunction("cast(SMALLINT'17' as bigint)", BIGINT, 17L);
}
@Test
public void testCastToInteger()
{
assertFunction("cast(SMALLINT'37' as integer)", INTEGER, 37);
assertFunction("cast(SMALLINT'17' as integer)", INTEGER, 17);
}
@Test
public void testCastToTinyint()
{
assertFunction("cast(SMALLINT'37' as tinyint)", TINYINT, (byte) 37);
assertFunction("cast(SMALLINT'17' as tinyint)", TINYINT, (byte) 17);
}
@Test
public void testCastToVarchar()
{
assertFunction("cast(SMALLINT'37' as varchar)", VARCHAR, "37");
assertFunction("cast(SMALLINT'17' as varchar)", VARCHAR, "17");
}
@Test
public void testCastToDouble()
{
assertFunction("cast(SMALLINT'37' as double)", DOUBLE, 37.0);
assertFunction("cast(SMALLINT'17' as double)", DOUBLE, 17.0);
}
@Test
public void testCastToFloat()
{
assertFunction("cast(SMALLINT'37' as real)", REAL, 37.0f);
assertFunction("cast(SMALLINT'-32768' as real)", REAL, -32768.0f);
assertFunction("cast(SMALLINT'0' as real)", REAL, 0.0f);
}
@Test
public void testCastToBoolean()
{
assertFunction("cast(SMALLINT'37' as boolean)", BOOLEAN, true);
assertFunction("cast(SMALLINT'17' as boolean)", BOOLEAN, true);
assertFunction("cast(SMALLINT'0' as boolean)", BOOLEAN, false);
}
@Test
public void testCastFromVarchar()
{
assertFunction("cast('37' as smallint)", SMALLINT, (short) 37);
assertFunction("cast('17' as smallint)", SMALLINT, (short) 17);
}
@Test
public void testIsDistinctFrom()
{
assertFunction("CAST(NULL AS SMALLINT) IS DISTINCT FROM CAST(NULL AS SMALLINT)", BOOLEAN, false);
assertFunction("SMALLINT'37' IS DISTINCT FROM SMALLINT'37'", BOOLEAN, false);
assertFunction("SMALLINT'37' IS DISTINCT FROM SMALLINT'38'", BOOLEAN, true);
assertFunction("NULL IS DISTINCT FROM SMALLINT'37'", BOOLEAN, true);
assertFunction("SMALLINT'37' IS DISTINCT FROM NULL", BOOLEAN, true);
}
@Test
public void testIndeterminate()
throws Exception
{
assertOperator(INDETERMINATE, "cast(null as smallint)", BOOLEAN, true);
assertOperator(INDETERMINATE, "cast(12 as smallint)", BOOLEAN, false);
assertOperator(INDETERMINATE, "cast(0 as smallint)", BOOLEAN, false);
assertOperator(INDETERMINATE, "cast(-23 as smallint)", BOOLEAN, false);
assertOperator(INDETERMINATE, "cast(1.4 as smallint)", BOOLEAN, false);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.expression;
import com.google.common.collect.ImmutableMap;
import org.apache.calcite.rex.RexNode;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.builtin.IPv4AddressMatchOperatorConversion;
import org.junit.Before;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class IPv4AddressMatchExpressionTest extends ExpressionTestBase
{
private static final String IPV4 = "192.168.0.1";
private static final long IPV4_LONG = 3232235521L;
private static final String IPV4_UINT = "3232235521";
private static final String IPV4_NETWORK = "192.168.0.0";
private static final String IPV4_BROADCAST = "192.168.255.255";
private static final String IPV6_COMPATIBLE = "::192.168.0.1";
private static final String IPV6_MAPPED = "::ffff:192.168.0.1";
private static final String SUBNET_192_168 = "192.168.0.0/16";
private static final String SUBNET_10 = "10.0.0.0/8";
private static final Object IGNORE_EXPECTED_RESULT = null;
private static final long MATCH = 1L;
private static final long NO_MATCH = 0L;
private static final String VAR = "s";
private static final RowSignature ROW_SIGNATURE = RowSignature.builder().add(VAR, ColumnType.STRING).build();
private static final Map<String, Object> BINDINGS = ImmutableMap.of(VAR, "foo");
private IPv4AddressMatchOperatorConversion target;
private ExpressionTestHelper testHelper;
@Before
public void setUp()
{
target = new IPv4AddressMatchOperatorConversion();
testHelper = new ExpressionTestHelper(ROW_SIGNATURE, BINDINGS);
}
@Test
public void testTooFewArgs()
{
expectException(IllegalArgumentException.class, "must have 2 arguments");
testExpression(
Collections.emptyList(),
buildExpectedExpression(),
IGNORE_EXPECTED_RESULT
);
}
@Test
public void testTooManyArgs()
{
expectException(IllegalArgumentException.class, "must have 2 arguments");
String address = IPV4;
String subnet = SUBNET_192_168;
testExpression(
Arrays.asList(
testHelper.makeLiteral(address),
testHelper.makeLiteral(subnet),
testHelper.makeLiteral(address)
),
buildExpectedExpression(address, subnet, address),
IGNORE_EXPECTED_RESULT
);
}
@Test
public void testSubnetArgNotLiteral()
{
expectException(IllegalArgumentException.class, "subnet arg must be a literal");
String address = IPV4;
String variableName = VAR;
testExpression(
Arrays.asList(
testHelper.makeLiteral(address),
testHelper.makeInputRef(variableName)
),
buildExpectedExpression(address, testHelper.makeVariable(variableName)),
IGNORE_EXPECTED_RESULT
);
}
@Test
public void testSubnetArgInvalid()
{
expectException(IllegalArgumentException.class, "subnet arg has an invalid format");
String address = IPV4;
String invalidSubnet = "192.168.0.1/invalid";
testExpression(
Arrays.asList(
testHelper.makeLiteral(address),
testHelper.makeLiteral(invalidSubnet)
),
buildExpectedExpression(address, invalidSubnet),
IGNORE_EXPECTED_RESULT
);
}
@Test
public void testNullArg()
{
String subnet = SUBNET_192_168;
testExpression(
Arrays.asList(
testHelper.getConstantNull(),
testHelper.makeLiteral(subnet)
),
buildExpectedExpression(null, subnet),
NO_MATCH
);
}
@Test
public void testInvalidArgType()
{
String variableNameWithInvalidType = VAR;
String subnet = SUBNET_192_168;
testExpression(
Arrays.asList(
testHelper.makeInputRef(variableNameWithInvalidType),
testHelper.makeLiteral(subnet)
),
buildExpectedExpression(testHelper.makeVariable(variableNameWithInvalidType), subnet),
NO_MATCH
);
}
@Test
public void testMatchingStringArgIPv4()
{
testExpression(IPV4, SUBNET_192_168, MATCH);
}
@Test
public void testNotMatchingStringArgIPv4()
{
testExpression(IPV4, SUBNET_10, NO_MATCH);
}
@Test
public void testMatchingStringArgIPv6Mapped()
{
testExpression(IPV6_MAPPED, SUBNET_192_168, NO_MATCH);
}
@Test
public void testNotMatchingStringArgIPv6Mapped()
{
testExpression(IPV6_MAPPED, SUBNET_10, NO_MATCH);
}
@Test
public void testMatchingStringArgIPv6Compatible()
{
testExpression(IPV6_COMPATIBLE, SUBNET_192_168, NO_MATCH);
}
@Test
public void testNotMatchingStringArgIPv6Compatible()
{
testExpression(IPV6_COMPATIBLE, SUBNET_10, NO_MATCH);
}
@Test
public void testNotIpAddress()
{
testExpression("druid.apache.org", SUBNET_192_168, NO_MATCH);
}
@Test
public void testMatchingLongArg()
{
testExpression(IPV4_LONG, SUBNET_192_168, MATCH);
}
@Test
public void testNotMatchingLongArg()
{
testExpression(IPV4_LONG, SUBNET_10, NO_MATCH);
}
@Test
public void testMatchingStringArgUnsignedInt()
{
testExpression(IPV4_UINT, SUBNET_192_168, NO_MATCH);
}
@Test
public void testNotMatchingStringArgUnsignedInt()
{
testExpression(IPV4_UINT, SUBNET_10, NO_MATCH);
}
@Test
public void testInclusive()
{
String subnet = SUBNET_192_168;
testExpression(IPV4_NETWORK, subnet, MATCH);
testExpression(IPV4, subnet, MATCH);
testExpression(IPV4_BROADCAST, subnet, MATCH);
}
private void testExpression(String address, String subnet, long match)
{
testExpression(
Arrays.asList(
testHelper.makeLiteral(address),
testHelper.makeLiteral(subnet)
),
buildExpectedExpression(address, subnet),
match
);
}
private void testExpression(long address, String subnet, long match)
{
testExpression(
Arrays.asList(
testHelper.makeLiteral(address),
testHelper.makeLiteral(subnet)
),
buildExpectedExpression(address, subnet),
match
);
}
private void testExpression(
List<? extends RexNode> exprs,
final DruidExpression expectedExpression,
final Object expectedResult
)
{
testHelper.testExpressionString(target.calciteOperator(), exprs, expectedExpression, expectedResult);
}
private DruidExpression buildExpectedExpression(Object... args)
{
return testHelper.buildExpectedExpression(target.getDruidFunctionName(), args);
}
}
| |
/**
* Copyright (C) 2015 Bruno Candido Volpato da Cunha (brunocvcunha@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.brunocvcunha.taskerbox.web.resources;
import java.beans.IntrospectionException;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.apache.log4j.Logger;
import org.brunocvcunha.taskerbox.Taskerbox;
import org.brunocvcunha.taskerbox.core.TaskerboxChannel;
import org.brunocvcunha.taskerbox.core.annotation.TaskerboxField;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
/**
* Resource that is used to control Channels
*
* @author Bruno Candido Volpato da Cunha
*
*/
@Path("/channels")
@Produces(MediaType.APPLICATION_JSON)
public class ChannelsResource {
/**
* Logger
*/
private static final Logger log = Logger.getLogger(ChannelsResource.class.getSimpleName());
private Taskerbox taskerbox;
/**
* @param taskerbox
*/
public ChannelsResource(Taskerbox taskerbox) {
super();
this.taskerbox = taskerbox;
}
@GET
public Response handleRequest() {
log.info("Listing...");
try {
JsonArray array = new JsonArray();
for (TaskerboxChannel<?> channel : taskerbox.getChannels()) {
JsonObject jsonChannel = getChannelJson(channel);
log.info("Added Channel to Return: " + jsonChannel.toString());
array.add(jsonChannel);
}
return Response.ok().entity(array.toString()).build();
} catch (Exception e) {
log.warn("Error in request: " + e.getMessage());
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getMessage()).build();
}
}
@GET
@Path("/{channel}/pause")
public Response handlePause(@PathParam("channel") String channelName) {
log.info("Pausing channel " + channelName);
return handlePauseStatus(channelName, true);
}
@GET
@Path("/{channel}/unpause")
public Response handleUnpause(@PathParam("channel") String channelName) {
log.info("Unausing channel " + channelName);
return handlePauseStatus(channelName, false);
}
@GET
@Path("/{channel}")
public Response handleChannel(@PathParam("channel") String channelName) {
log.info("Getting channel " + channelName);
try {
for (TaskerboxChannel<?> channel : taskerbox.getChannels()) {
if (channel.getId().equalsIgnoreCase(channelName)) {
return Response.ok().entity(getChannelJson(channel).toString()).build();
}
}
return Response.status(Status.BAD_REQUEST).entity("Not found channel " + channelName).build();
} catch (Exception e) {
log.warn("Error in request: " + e.getMessage());
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getMessage()).build();
}
}
@GET
@Path("/{channel}/force")
public Response handleForce(@PathParam("channel") String channelName) {
log.info("Forcing channel " + channelName);
try {
for (TaskerboxChannel<?> channel : taskerbox.getChannels()) {
if (channel.getId().equalsIgnoreCase(channelName)) {
channel.check(channel.isPaused());
return Response.ok().entity(getChannelJson(channel).toString()).build();
}
}
return Response.status(Status.BAD_REQUEST).entity("Not found channel " + channelName).build();
} catch (Exception e) {
log.warn("Error in request: " + e.getMessage());
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getMessage()).build();
}
}
private Response handlePauseStatus(String channelName, boolean paused) {
try {
for (TaskerboxChannel<?> channel : taskerbox.getChannels()) {
if (channel.getId().equalsIgnoreCase(channelName)) {
channel.setPaused(paused);
return Response.ok().entity(getChannelJson(channel).toString()).build();
}
}
return Response.status(Status.BAD_REQUEST).entity("Not found channel " + channelName).build();
} catch (Exception e) {
log.warn("Error in request: " + e.getMessage());
return Response.status(Status.INTERNAL_SERVER_ERROR).entity(e.getMessage()).build();
}
}
private JsonObject getChannelJson(TaskerboxChannel<?> channel) throws IntrospectionException,
IllegalAccessException, IllegalArgumentException, InvocationTargetException {
JsonObject jsonChannel = new JsonObject();
jsonChannel.addProperty("id", channel.getId());
jsonChannel.addProperty("displayName", channel.getDisplayName());
jsonChannel.addProperty("daemon", channel.isDaemon());
jsonChannel.addProperty("running", channel.isRunning());
jsonChannel.addProperty("paused", channel.isPaused());
jsonChannel.addProperty("checkCount", channel.getCheckCount());
jsonChannel.addProperty("every", channel.getEvery());
jsonChannel.addProperty("timeout", channel.getTimeout());
jsonChannel.addProperty("class", channel.getClass().getSimpleName());
JsonObject channelData = new JsonObject();
for (Field field : channel.getClass().getDeclaredFields()) {
if (field.isAnnotationPresent(TaskerboxField.class)) {
PropertyDescriptor descriptor = new PropertyDescriptor(field.getName(), channel.getClass());
Method readMethod = descriptor.getReadMethod();
Object dataVal = readMethod.invoke(channel);
if (dataVal != null) {
if (dataVal instanceof String || dataVal instanceof Character) {
channelData.addProperty(field.getName(), (String) dataVal);
} else if (dataVal instanceof Number) {
channelData.addProperty(field.getName(), (Number) dataVal);
} else if (dataVal instanceof Boolean) {
channelData.addProperty(field.getName(), (Boolean) dataVal);
} else if (dataVal.getClass().isArray()) {
JsonArray dataArray = new JsonArray();
for (Object obj : (Object[]) dataVal) {
dataArray.add(new JsonPrimitive(obj.toString()));
}
channelData.add(field.getName(), dataArray);
} else {
channelData.addProperty(field.getName(), dataVal.toString());
}
}
}
}
jsonChannel.add("data", channelData);
return jsonChannel;
}
}
| |
/*
* Copyright (C) 2014 Michell Bak
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nmj.nmjmanager;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.database.Cursor;
import android.graphics.Typeface;
import android.os.Bundle;
import android.support.v4.content.LocalBroadcastManager;
import android.text.method.PasswordTransformationMethod;
import android.view.View;
import android.view.WindowManager;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.EditText;
import android.widget.Toast;
import com.nmj.db.DbAdapterSources;
import com.nmj.functions.FileSource;
import com.nmj.functions.NMJLib;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.TreeSet;
import static com.nmj.functions.NMJLib.DOMAIN;
import static com.nmj.functions.NMJLib.FILESOURCE;
import static com.nmj.functions.NMJLib.MOVIE;
import static com.nmj.functions.NMJLib.PASSWORD;
import static com.nmj.functions.NMJLib.SERVER;
import static com.nmj.functions.NMJLib.TV_SHOW;
import static com.nmj.functions.NMJLib.TYPE;
import static com.nmj.functions.NMJLib.USER;
public class AddNMJFilesourceDialog extends Activity {
private EditText server, display_name, port;
private String mDomain, mUser, mPass, mServer;
private boolean isMovie = false;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
setContentView(R.layout.nmj_input);
server = (EditText) findViewById(R.id.ip_address);
display_name = (EditText) findViewById(R.id.display_name);
LocalBroadcastManager.getInstance(this).registerReceiver(mMessageReceiver, new IntentFilter("NMJManager-network-search"));
}
private BroadcastReceiver mMessageReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
String ip = intent.getExtras().getString("ip");
server.setText(ip);
}
};
@Override
public void onDestroy() {
super.onDestroy();
// Unregister since the activity is about to be closed.
LocalBroadcastManager.getInstance(this).unregisterReceiver(mMessageReceiver);
}
@SuppressLint("UseSparseArrays")
public void search(View v) {
final ArrayList<FileSource> sources = new ArrayList<FileSource>();
DbAdapterSources dbHelper = NMJManagerApplication.getSourcesAdapter();
TreeSet<String> uniqueSources = new TreeSet<String>();
int count = sources.size();
for (int i = 0; i < count; i++) {
String temp = sources.get(i).getFilepath().replace("smb://", "");
temp = temp.substring(0, temp.indexOf("/"));
uniqueSources.add(temp);
}
final CharSequence[] items = new CharSequence[uniqueSources.size() + 1];
count = 0;
Iterator<String> it = uniqueSources.iterator();
while (it.hasNext()) {
items[count] = it.next();
count++;
}
items[items.length - 1] = getString(R.string.scanForSources);
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(getString(R.string.browseSources));
builder.setItems(items, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
if (which == (items.length - 1)) {
Intent intent = new Intent();
intent.setClass(getApplicationContext(), SearchForNetworkShares.class);
startActivity(intent);
} else {
showUserDialog(items, which);
}
}});
builder.show();
}
private void showUserDialog(final CharSequence[] items, final int which) {
final ArrayList<FileSource> sources = new ArrayList<FileSource>();
DbAdapterSources dbHelper = NMJManagerApplication.getSourcesAdapter();
// Fetch all movie sources and add them to the array
Cursor cursor = dbHelper.fetchAllSources();
while (cursor.moveToNext()) {
sources.add(new FileSource(
cursor.getLong(cursor.getColumnIndex(DbAdapterSources.KEY_ROWID)),
cursor.getString(cursor.getColumnIndex(DbAdapterSources.KEY_FILEPATH)),
cursor.getInt(cursor.getColumnIndex(DbAdapterSources.KEY_FILESOURCE_TYPE)),
cursor.getString(cursor.getColumnIndex(DbAdapterSources.KEY_USER)),
cursor.getString(cursor.getColumnIndex(DbAdapterSources.KEY_PASSWORD)),
cursor.getString(cursor.getColumnIndex(DbAdapterSources.KEY_DOMAIN)),
cursor.getString(cursor.getColumnIndex(DbAdapterSources.KEY_TYPE))
));
}
cursor.close();
HashMap<String, String> userPass = new HashMap<String, String>();
int count = sources.size();
for (int i = 0; i < count; i++) {
String temp = sources.get(i).getFilepath().replace("smb://", "");
temp = temp.substring(0, temp.indexOf("/"));
if (temp.equals(items[which])) {
userPass.put((sources.get(i).getUser().isEmpty() ? getString(R.string.anonymous) : sources.get(i).getUser()), sources.get(i).getPassword());
}
}
if (userPass.size() == 1) {
String userTemp = userPass.keySet().iterator().next();
userPass.get(userTemp);
server.setText(items[which]);
} else {
final CharSequence[] usernames = new CharSequence[userPass.size()];
final CharSequence[] passwords = new CharSequence[userPass.size()];
int i = 0;
Iterator<String> it = userPass.keySet().iterator();
while (it.hasNext()) {
String s = it.next();
usernames[i] = s;
passwords[i] = userPass.get(s);
i++;
}
AlertDialog.Builder builder2 = new AlertDialog.Builder(this);
builder2.setTitle(getString(R.string.selectLogin));
builder2.setItems(usernames, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int choice) {
server.setText(items[which]);
}
});
builder2.show();
}
}
public void cancel(View v) {
finish();
}
public void ok(View v) {
if (server.getText().toString().isEmpty()) {
Toast.makeText(AddNMJFilesourceDialog.this, getString(R.string.enterNetworkAddress), Toast.LENGTH_LONG).show();
return;
}
if (NMJLib.isWifiConnected(this)) {
mDomain = display_name.getText().toString().trim();
mServer = server.getText().toString().trim();
attemptLogin();
} else
Toast.makeText(AddNMJFilesourceDialog.this, getString(R.string.noConnection), Toast.LENGTH_LONG).show();
}
private void attemptLogin() {
Intent intent = new Intent();
intent.setClass(getApplicationContext(), FileSourceBrowser.class);
intent.putExtra(USER, mUser);
intent.putExtra(PASSWORD, mPass);
intent.putExtra(DOMAIN, mDomain);
intent.putExtra(SERVER, mServer);
intent.putExtra(TYPE, isMovie ? MOVIE : TV_SHOW);
intent.putExtra(FILESOURCE, FileSource.SMB);
startActivity(intent);
finish();
}
}
| |
/*
* Copyright 2014 BitPOS Pty Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.dogecoin.store;
import com.google.dogecoin.core.*;
import com.google.dogecoin.script.Script;
import com.google.common.collect.Lists;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.sql.*;
import java.util.*;
/**
* <p>A full pruned block store using the Postgres database engine. As an added bonus an address index is calculated,
* so you can use {@link #calculateBalanceForAddress(com.google.bitcoin.core.Address)} to quickly look up
* the quantity of bitcoins controlled by that address.</p>
*/
public class PostgresFullPrunedBlockStore implements FullPrunedBlockStore {
private static final Logger log = LoggerFactory.getLogger(PostgresFullPrunedBlockStore.class);
private static final String POSTGRES_DUPLICATE_KEY_ERROR_CODE = "23505";
private Sha256Hash chainHeadHash;
private StoredBlock chainHeadBlock;
private Sha256Hash verifiedChainHeadHash;
private StoredBlock verifiedChainHeadBlock;
private NetworkParameters params;
private ThreadLocal<Connection> conn;
private List<Connection> allConnections;
private String connectionURL;
private int fullStoreDepth;
private String username;
private String password;
private String schemaName;
private static final String driver = "org.postgresql.Driver";
private static final String CREATE_SETTINGS_TABLE = "CREATE TABLE settings (\n" +
" name character varying(32) NOT NULL,\n" +
" value bytea\n" +
");";
private static final String CHAIN_HEAD_SETTING = "chainhead";
private static final String VERIFIED_CHAIN_HEAD_SETTING = "verifiedchainhead";
private static final String VERSION_SETTING = "version";
private static final String CREATE_HEADERS_TABLE = "CREATE TABLE headers (" +
" hash bytea NOT NULL," +
" chainwork bytea NOT NULL," +
" height integer NOT NULL," +
" header bytea NOT NULL," +
" wasundoable boolean NOT NULL" +
");";
private static final String CREATE_UNDOABLE_TABLE = "CREATE TABLE undoableblocks (" +
" hash bytea NOT NULL," +
" height integer NOT NULL," +
" txoutchanges bytea," +
" transactions bytea" +
");";
private static final String CREATE_OPEN_OUTPUT_TABLE = "CREATE TABLE openoutputs (" +
" hash bytea NOT NULL," +
" index integer NOT NULL," +
" height integer NOT NULL," +
" value bytea NOT NULL," +
" scriptbytes bytea NOT NULL," +
" toaddress character varying(35)," +
" addresstargetable integer" +
");";
private static final String CREATE_UNDOABLE_TABLE_INDEX = "CREATE INDEX heightIndex ON undoableBlocks (height)";
// Some indexes to speed up inserts
private static final String CREATE_HEADERS_HASH_INDEX = "CREATE INDEX headershashindex ON headers USING btree (hash);";
private static final String CREATE_OUTPUTS_ADDRESS_INDEX = "CREATE INDEX idx_address ON openoutputs USING btree (hash, index, height, toaddress);";
private static final String CREATE_OUTPUT_ADDRESS_TYPE_INDEX = "CREATE INDEX idx_addresstargetable ON openoutputs USING btree (addresstargetable);";
private static final String CREATE_OUTPUTS_HASH_INDEX = "CREATE INDEX openoutputshash ON openoutputs USING btree (hash);";
private static final String CREATE_OUTPUTS_HASH_INDEX_INDEX = "CREATE INDEX openoutputshashindex ON openoutputs USING btree (hash, index);";
private static final String CREATE_UNDOABLE_HASH_INDEX = "CREATE INDEX undoableblockshashindex ON undoableblocks USING btree (hash);";
/**
* Creates a new PostgresFullPrunedBlockStore.
*
* @param params A copy of the NetworkParameters used
* @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe)
* @param hostname The hostname of the database to connect to
* @param dbName The database to connect to
* @param username The database username
* @param password The password to the database
* @throws BlockStoreException if the database fails to open for any reason
*/
public PostgresFullPrunedBlockStore(NetworkParameters params, int fullStoreDepth, String hostname, String dbName,
String username, String password) throws BlockStoreException {
this(params, "jdbc:postgresql://" + hostname + "/" + dbName, fullStoreDepth, username, password, null);
}
/**
* <p>Create a new PostgresFullPrunedBlockStore, storing the tables in the schema specified. You may want to
* specify a schema to avoid name collisions, or just to keep the database better organized. The schema is not
* required, and if one is not provided than the default schema for the username will be used. See
* <a href="http://www.postgres.org/docs/9.3/static/ddl-schemas.html">the postgres schema docs</a> for more on
* schemas.</p>
*
* @param params A copy of the NetworkParameters used.
* @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe).
* @param hostname The hostname of the database to connect to.
* @param dbName The database to connect to.
* @param username The database username.
* @param password The password to the database.
* @param schemaName The name of the schema to put the tables in. May be null if no schema is being used.
* @throws BlockStoreException If the database fails to open for any reason.
*/
public PostgresFullPrunedBlockStore(NetworkParameters params, int fullStoreDepth, String hostname, String dbName,
String username, String password, @Nullable String schemaName) throws BlockStoreException {
this(params, "jdbc:postgresql://" + hostname + "/" + dbName, fullStoreDepth, username, password, schemaName);
}
/**
* <p>Create a new PostgresFullPrunedBlockStore, using the full connection URL instead of a hostname and password,
* and optionally allowing a schema to be specified.</p>
*
* <p>The connection URL will be passed to the database driver, and should look like
* "jdbc:postrgresql://host[:port]/databasename". You can use this to change the port, or specify additional
* parameters. See <a href="http://jdbc.postgresql.org/documentation/head/connect.html#connection-parameters">
* the PostgreSQL JDBC documentation</a> for more on the connection URL.</p>
*
* <p>This constructor also accepts a schema name to use, which can be used to avoid name collisions, or to keep the
* database organized. If no schema is provided the default schema for the username will be used. See
* <a href="http://www.postgres.org/docs/9.3/static/ddl-schemas.html">the postgres schema docs</a> for more on
* schemas.</p>
*
*
* @param params A copy of the NetworkParameters used.
* @param connectionURL The jdbc url to connect to the database.
* @param fullStoreDepth The number of blocks of history stored in full (something like 1000 is pretty safe).
* @param username The database username.
* @param password The password to the database.
* @param schemaName The name of the schema to put the tables in. May be null if no schema is being used.
* @throws BlockStoreException If the database fails to open for any reason.
*/
public PostgresFullPrunedBlockStore(NetworkParameters params, String connectionURL, int fullStoreDepth,
String username, String password, @Nullable String schemaName) throws BlockStoreException {
this.params = params;
this.fullStoreDepth = fullStoreDepth;
this.connectionURL = connectionURL;
this.schemaName = schemaName;
this.username = username;
this.password = password;
conn = new ThreadLocal<Connection>();
allConnections = new LinkedList<Connection>();
try {
Class.forName(driver);
log.info(driver + " loaded. ");
} catch (java.lang.ClassNotFoundException e) {
log.error("check CLASSPATH for Postgres jar ", e);
}
maybeConnect();
try {
// Create tables if needed
if (!tableExists("settings"))
createTables();
initFromDatabase();
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
private synchronized void maybeConnect() throws BlockStoreException {
try {
if (conn.get() != null)
return;
Properties props = new Properties();
props.setProperty("user", this.username);
props.setProperty("password", this.password);
conn.set(DriverManager.getConnection(connectionURL, props));
Connection connection = conn.get();
// set the schema if one is needed
if(schemaName != null) {
Statement s = connection.createStatement();
s.execute("CREATE SCHEMA IF NOT EXISTS " + schemaName + ";");
s.execute("set search_path to '" + schemaName +"';");
}
allConnections.add(conn.get());
log.info("Made a new connection to database " + connectionURL);
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
public synchronized void close() {
for (Connection conn : allConnections) {
try {
if(!conn.getAutoCommit()) {
conn.rollback();
}
} catch (SQLException ex) {
throw new RuntimeException(ex);
}
}
allConnections.clear();
}
public void resetStore() throws BlockStoreException {
maybeConnect();
try {
Statement s = conn.get().createStatement();
s.execute("DROP TABLE settings");
s.execute("DROP TABLE headers");
s.execute("DROP TABLE undoableBlocks");
s.execute("DROP TABLE openOutputs");
s.close();
createTables();
initFromDatabase();
} catch (SQLException ex) {
throw new RuntimeException(ex);
}
}
private void createTables() throws SQLException, BlockStoreException {
Statement s = conn.get().createStatement();
if (log.isDebugEnabled())
log.debug("PostgresFullPrunedBlockStore : CREATE headers table");
s.executeUpdate(CREATE_HEADERS_TABLE);
if (log.isDebugEnabled())
log.debug("PostgresFullPrunedBlockStore : CREATE settings table");
s.executeUpdate(CREATE_SETTINGS_TABLE);
if (log.isDebugEnabled())
log.debug("PostgresFullPrunedBlockStore : CREATE undoable block table");
s.executeUpdate(CREATE_UNDOABLE_TABLE);
if (log.isDebugEnabled())
log.debug("PostgresFullPrunedBlockStore : CREATE undoable block index");
s.executeUpdate(CREATE_UNDOABLE_TABLE_INDEX);
if (log.isDebugEnabled())
log.debug("PostgresFullPrunedBlockStore : CREATE open output table");
s.executeUpdate(CREATE_OPEN_OUTPUT_TABLE);
// Create indexes..
s.executeUpdate(CREATE_HEADERS_HASH_INDEX);
s.executeUpdate(CREATE_OUTPUT_ADDRESS_TYPE_INDEX);
s.executeUpdate(CREATE_OUTPUTS_ADDRESS_INDEX);
s.executeUpdate(CREATE_OUTPUTS_HASH_INDEX);
s.executeUpdate(CREATE_OUTPUTS_HASH_INDEX_INDEX);
s.executeUpdate(CREATE_UNDOABLE_HASH_INDEX);
s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + CHAIN_HEAD_SETTING + "', NULL)");
s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + VERIFIED_CHAIN_HEAD_SETTING + "', NULL)");
s.executeUpdate("INSERT INTO settings(name, value) VALUES('" + VERSION_SETTING + "', '03')");
s.close();
createNewStore(params);
}
private void initFromDatabase() throws SQLException, BlockStoreException {
Statement s = conn.get().createStatement();
ResultSet rs;
rs = s.executeQuery("SELECT value FROM settings WHERE name = '" + CHAIN_HEAD_SETTING + "'");
if (!rs.next()) {
throw new BlockStoreException("corrupt Postgres block store - no chain head pointer");
}
Sha256Hash hash = new Sha256Hash(rs.getBytes(1));
rs.close();
this.chainHeadBlock = get(hash);
this.chainHeadHash = hash;
if (this.chainHeadBlock == null) {
throw new BlockStoreException("corrupt Postgres block store - head block not found");
}
rs = s.executeQuery("SELECT value FROM settings WHERE name = '" + VERIFIED_CHAIN_HEAD_SETTING + "'");
if (!rs.next()) {
throw new BlockStoreException("corrupt Postgres block store - no verified chain head pointer");
}
hash = new Sha256Hash(rs.getBytes(1));
rs.close();
s.close();
this.verifiedChainHeadBlock = get(hash);
this.verifiedChainHeadHash = hash;
if (this.verifiedChainHeadBlock == null) {
throw new BlockStoreException("corrupt Postgres block store - verified head block not found");
}
}
private void createNewStore(NetworkParameters params) throws BlockStoreException {
try {
// Set up the genesis block. When we start out fresh, it is by
// definition the top of the chain.
StoredBlock storedGenesisHeader = new StoredBlock(params.getGenesisBlock().cloneAsHeader(), params.getGenesisBlock().getWork(), 0);
// The coinbase in the genesis block is not spendable. This is because of how the reference client inits
// its database - the genesis transaction isn't actually in the db so its spent flags can never be updated.
List<Transaction> genesisTransactions = Lists.newLinkedList();
StoredUndoableBlock storedGenesis = new StoredUndoableBlock(params.getGenesisBlock().getHash(), genesisTransactions);
put(storedGenesisHeader, storedGenesis);
setChainHead(storedGenesisHeader);
setVerifiedChainHead(storedGenesisHeader);
} catch (VerificationException e) {
throw new RuntimeException(e); // Cannot happen.
}
}
private boolean tableExists(String table) throws SQLException {
Statement s = conn.get().createStatement();
try {
ResultSet results = s.executeQuery("SELECT * FROM " + table + " WHERE 1 = 2");
results.close();
return true;
} catch (SQLException ex) {
return false;
} finally {
s.close();
}
}
/**
* Dumps information about the size of actual data in the database to standard output
* The only truly useless data counted is printed in the form "N in id indexes"
* This does not take database indexes into account
*/
public void dumpSizes() throws SQLException, BlockStoreException {
maybeConnect();
Statement s = conn.get().createStatement();
long size = 0;
long totalSize = 0;
int count = 0;
ResultSet rs = s.executeQuery("SELECT name, value FROM settings");
while (rs.next()) {
size += rs.getString(1).length();
size += rs.getBytes(2).length;
count++;
}
rs.close();
System.out.printf("Settings size: %d, count: %d, average size: %f%n", size, count, (double)size/count);
totalSize += size; size = 0; count = 0;
rs = s.executeQuery("SELECT chainWork, header FROM headers");
while (rs.next()) {
size += 28; // hash
size += rs.getBytes(1).length;
size += 4; // height
size += rs.getBytes(2).length;
count++;
}
rs.close();
System.out.printf("Headers size: %d, count: %d, average size: %f%n", size, count, (double)size/count);
totalSize += size; size = 0; count = 0;
rs = s.executeQuery("SELECT txOutChanges, transactions FROM undoableBlocks");
while (rs.next()) {
size += 28; // hash
size += 4; // height
byte[] txOutChanges = rs.getBytes(1);
byte[] transactions = rs.getBytes(2);
if (txOutChanges == null)
size += transactions.length;
else
size += txOutChanges.length;
// size += the space to represent NULL
count++;
}
rs.close();
System.out.printf("Undoable Blocks size: %d, count: %d, average size: %f%n", size, count, (double)size/count);
totalSize += size; size = 0; count = 0;
long scriptSize = 0;
rs = s.executeQuery("SELECT value, scriptBytes FROM openOutputs");
while (rs.next()) {
size += 32; // hash
size += 4; // index
size += 4; // height
size += rs.getBytes(1).length;
size += rs.getBytes(2).length;
scriptSize += rs.getBytes(2).length;
count++;
}
rs.close();
System.out.printf("Open Outputs size: %d, count: %d, average size: %f, average script size: %f (%d in id indexes)%n",
size, count, (double)size/count, (double)scriptSize/count, count * 8);
totalSize += size;
System.out.println("Total Size: " + totalSize);
s.close();
}
private void putUpdateStoredBlock(StoredBlock storedBlock, boolean wasUndoable) throws SQLException {
try {
PreparedStatement s =
conn.get().prepareStatement("INSERT INTO headers(hash, chainWork, height, header, wasUndoable)"
+ " VALUES(?, ?, ?, ?, ?)");
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28);
s.setBytes(1, hashBytes);
s.setBytes(2, storedBlock.getChainWork().toByteArray());
s.setInt(3, storedBlock.getHeight());
s.setBytes(4, storedBlock.getHeader().unsafeBitcoinSerialize());
s.setBoolean(5, wasUndoable);
s.executeUpdate();
s.close();
} catch (SQLException e) {
// It is possible we try to add a duplicate StoredBlock if we upgraded
// In that case, we just update the entry to mark it wasUndoable
if (!(e.getSQLState().equals(POSTGRES_DUPLICATE_KEY_ERROR_CODE)) || !wasUndoable)
throw e;
PreparedStatement s = conn.get().prepareStatement("UPDATE headers SET wasUndoable=? WHERE hash=?");
s.setBoolean(1, true);
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28);
s.setBytes(2, hashBytes);
s.executeUpdate();
s.close();
}
}
public void put(StoredBlock storedBlock) throws BlockStoreException {
maybeConnect();
try {
putUpdateStoredBlock(storedBlock, false);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void put(StoredBlock storedBlock, StoredUndoableBlock undoableBlock) throws BlockStoreException {
maybeConnect();
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(storedBlock.getHeader().getHash().getBytes(), 3, hashBytes, 0, 28);
int height = storedBlock.getHeight();
byte[] transactions = null;
byte[] txOutChanges = null;
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
if (undoableBlock.getTxOutChanges() != null) {
undoableBlock.getTxOutChanges().serializeToStream(bos);
txOutChanges = bos.toByteArray();
} else {
int numTxn = undoableBlock.getTransactions().size();
bos.write((int) (0xFF & (numTxn >> 0)));
bos.write((int) (0xFF & (numTxn >> 8)));
bos.write((int) (0xFF & (numTxn >> 16)));
bos.write((int) (0xFF & (numTxn >> 24)));
for (Transaction tx : undoableBlock.getTransactions())
tx.bitcoinSerialize(bos);
transactions = bos.toByteArray();
}
bos.close();
} catch (IOException e) {
throw new BlockStoreException(e);
}
try {
if (log.isDebugEnabled())
log.debug("Looking for undoable block with hash: " + Utils.bytesToHexString(hashBytes));
PreparedStatement findS = conn.get().prepareStatement("select 1 from undoableBlocks where hash = ?");
findS.setBytes(1, hashBytes);
ResultSet rs = findS.executeQuery();
if (rs.next())
{
// We already have this output, update it.
findS.close();
// Postgres insert-or-updates are very complex (and finnicky). This level of transaction isolation
// seems to work for bitcoinj
PreparedStatement s =
conn.get().prepareStatement("UPDATE undoableBlocks SET txOutChanges=?, transactions=?"
+ " WHERE hash = ?");
s.setBytes(3, hashBytes);
if (log.isDebugEnabled())
log.debug("Updating undoable block with hash: " + Utils.bytesToHexString(hashBytes));
if (transactions == null) {
s.setBytes(1, txOutChanges);
s.setNull(2, Types.BINARY);
} else {
s.setNull(1, Types.BINARY);
s.setBytes(2, transactions);
}
s.executeUpdate();
s.close();
return;
}
PreparedStatement s =
conn.get().prepareStatement("INSERT INTO undoableBlocks(hash, height, txOutChanges, transactions)"
+ " VALUES(?, ?, ?, ?)");
s.setBytes(1, hashBytes);
s.setInt(2, height);
if (log.isDebugEnabled())
log.debug("Inserting undoable block with hash: " + Utils.bytesToHexString(hashBytes) + " at height " + height);
if (transactions == null) {
s.setBytes(3, txOutChanges);
s.setNull(4, Types.BINARY);
} else {
s.setNull(3, Types.BINARY);
s.setBytes(4, transactions);
}
s.executeUpdate();
s.close();
try {
putUpdateStoredBlock(storedBlock, true);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
} catch (SQLException e) {
if (!e.getSQLState().equals(POSTGRES_DUPLICATE_KEY_ERROR_CODE))
throw new BlockStoreException(e);
}
}
public StoredBlock get(Sha256Hash hash, boolean wasUndoableOnly) throws BlockStoreException {
// Optimize for chain head
if (chainHeadHash != null && chainHeadHash.equals(hash))
return chainHeadBlock;
if (verifiedChainHeadHash != null && verifiedChainHeadHash.equals(hash))
return verifiedChainHeadBlock;
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT chainWork, height, header, wasUndoable FROM headers WHERE hash = ?");
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(hash.getBytes(), 3, hashBytes, 0, 28);
s.setBytes(1, hashBytes);
ResultSet results = s.executeQuery();
if (!results.next()) {
return null;
}
// Parse it.
if (wasUndoableOnly && !results.getBoolean(4))
return null;
BigInteger chainWork = new BigInteger(results.getBytes(1));
int height = results.getInt(2);
Block b = new Block(params, results.getBytes(3));
b.verifyHeader();
StoredBlock stored = new StoredBlock(b, chainWork, height);
return stored;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} catch (ProtocolException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (VerificationException e) {
// Should not be able to happen unless the database contains bad
// blocks.
throw new BlockStoreException(e);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
public StoredBlock get(Sha256Hash hash) throws BlockStoreException {
return get(hash, false);
}
public StoredBlock getOnceUndoableStoredBlock(Sha256Hash hash) throws BlockStoreException {
return get(hash, true);
}
public StoredUndoableBlock getUndoBlock(Sha256Hash hash) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT txOutChanges, transactions FROM undoableBlocks WHERE hash = ?");
// We skip the first 4 bytes because (on prodnet) the minimum target has 4 0-bytes
byte[] hashBytes = new byte[28];
System.arraycopy(hash.getBytes(), 3, hashBytes, 0, 28);
s.setBytes(1, hashBytes);
ResultSet results = s.executeQuery();
if (!results.next()) {
return null;
}
// Parse it.
byte[] txOutChanges = results.getBytes(1);
byte[] transactions = results.getBytes(2);
StoredUndoableBlock block;
if (txOutChanges == null) {
int offset = 0;
int numTxn = ((transactions[offset++] & 0xFF) << 0) |
((transactions[offset++] & 0xFF) << 8) |
((transactions[offset++] & 0xFF) << 16) |
((transactions[offset++] & 0xFF) << 24);
List<Transaction> transactionList = new LinkedList<Transaction>();
for (int i = 0; i < numTxn; i++) {
Transaction tx = new Transaction(params, transactions, offset);
transactionList.add(tx);
offset += tx.getMessageSize();
}
block = new StoredUndoableBlock(hash, transactionList);
} else {
TransactionOutputChanges outChangesObject =
new TransactionOutputChanges(new ByteArrayInputStream(txOutChanges));
block = new StoredUndoableBlock(hash, outChangesObject);
}
return block;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} catch (NullPointerException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (ClassCastException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (ProtocolException e) {
// Corrupted database.
throw new BlockStoreException(e);
} catch (IOException e) {
// Corrupted database.
throw new BlockStoreException(e);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
public StoredBlock getChainHead() throws BlockStoreException {
return chainHeadBlock;
}
public void setChainHead(StoredBlock chainHead) throws BlockStoreException {
Sha256Hash hash = chainHead.getHeader().getHash();
this.chainHeadHash = hash;
this.chainHeadBlock = chainHead;
maybeConnect();
try {
PreparedStatement s = conn.get()
.prepareStatement("UPDATE settings SET value = ? WHERE name = ?");
s.setString(2, CHAIN_HEAD_SETTING);
s.setBytes(1, hash.getBytes());
s.executeUpdate();
s.close();
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
public StoredBlock getVerifiedChainHead() throws BlockStoreException {
return verifiedChainHeadBlock;
}
public void setVerifiedChainHead(StoredBlock chainHead) throws BlockStoreException {
Sha256Hash hash = chainHead.getHeader().getHash();
this.verifiedChainHeadHash = hash;
this.verifiedChainHeadBlock = chainHead;
maybeConnect();
try {
PreparedStatement s = conn.get()
.prepareStatement("UPDATE settings SET value = ? WHERE name = ?");
s.setString(2, VERIFIED_CHAIN_HEAD_SETTING);
s.setBytes(1, hash.getBytes());
s.executeUpdate();
s.close();
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
if (this.chainHeadBlock.getHeight() < chainHead.getHeight())
setChainHead(chainHead);
removeUndoableBlocksWhereHeightIsLessThan(chainHead.getHeight() - fullStoreDepth);
}
private void removeUndoableBlocksWhereHeightIsLessThan(int height) throws BlockStoreException {
try {
PreparedStatement s = conn.get()
.prepareStatement("DELETE FROM undoableBlocks WHERE height <= ?");
s.setInt(1, height);
if (log.isDebugEnabled())
log.debug("Deleting undoable undoable block with height <= " + height);
s.executeUpdate();
s.close();
} catch (SQLException ex) {
throw new BlockStoreException(ex);
}
}
public StoredTransactionOutput getTransactionOutput(Sha256Hash hash, long index) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT height, value, scriptBytes FROM openOutputs " +
"WHERE hash = ? AND index = ?");
s.setBytes(1, hash.getBytes());
// index is actually an unsigned int
s.setInt(2, (int)index);
ResultSet results = s.executeQuery();
if (!results.next()) {
return null;
}
// Parse it.
int height = results.getInt(1);
BigInteger value = new BigInteger(results.getBytes(2));
// Tell the StoredTransactionOutput that we are a coinbase, as that is encoded in height
StoredTransactionOutput txout = new StoredTransactionOutput(hash, index, value, height, true, results.getBytes(3));
return txout;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
public void addUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
// Calculate the toAddress (if any)
String dbAddress = "";
int type = 0;
Script outputScript = null;
try
{
outputScript = new Script(out.getScriptBytes());
}
catch (ScriptException e)
{
// Unparseable, but this isn't an error - it's an output not containing an address
log.info("Could not parse script for output: " + out.getHash().toString());
}
if (outputScript != null && (outputScript.isSentToAddress()
|| outputScript.isSentToRawPubKey()
|| outputScript.isPayToScriptHash()))
{
if (outputScript.isSentToAddress())
{
Address targetAddr = new Address(params, outputScript.getPubKeyHash());
dbAddress = targetAddr.toString();
type = 1;
}
else if (outputScript.isSentToRawPubKey())
{
/*
* Note we use the deprecated getFromAddress here. Coinbase outputs seem to have the target address
* in the pubkey of the script - perhaps we can rename this function?
*/
dbAddress = outputScript.getFromAddress(params).toString();
type = 2;
} else if (outputScript.isPayToScriptHash())
{
dbAddress = Address.fromP2SHHash(params, outputScript.getPubKeyHash()).toString();
type = 3;
}
}
try {
s = conn.get().prepareStatement("INSERT INTO openOutputs (hash, index, height, value, scriptBytes, toAddress, addressTargetable) " +
"VALUES (?, ?, ?, ?, ?, ?, ?)");
s.setBytes(1, out.getHash().getBytes());
// index is actually an unsigned int
s.setInt(2, (int)out.getIndex());
s.setInt(3, out.getHeight());
s.setBytes(4, out.getValue().toByteArray());
s.setBytes(5, out.getScriptBytes());
s.setString(6, dbAddress);
s.setInt(7, type);
s.executeUpdate();
s.close();
} catch (SQLException e) {
if (!(e.getSQLState().equals(POSTGRES_DUPLICATE_KEY_ERROR_CODE)))
throw new BlockStoreException(e);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException(e); }
}
}
public void removeUnspentTransactionOutput(StoredTransactionOutput out) throws BlockStoreException {
maybeConnect();
// TODO: This should only need one query (maybe a stored procedure)
if (getTransactionOutput(out.getHash(), out.getIndex()) == null)
throw new BlockStoreException("Tried to remove a StoredTransactionOutput from PostgresFullPrunedBlockStore that it didn't have!");
try {
PreparedStatement s = conn.get()
.prepareStatement("DELETE FROM openOutputs WHERE hash = ? AND index = ?");
s.setBytes(1, out.getHash().getBytes());
// index is actually an unsigned int
s.setInt(2, (int)out.getIndex());
s.executeUpdate();
s.close();
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void beginDatabaseBatchWrite() throws BlockStoreException {
maybeConnect();
if (log.isDebugEnabled())
log.debug("Starting database batch write with connection: " + conn.get().toString());
try {
conn.get().setAutoCommit(false);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void commitDatabaseBatchWrite() throws BlockStoreException {
maybeConnect();
if (log.isDebugEnabled())
log.debug("Committing database batch write with connection: " + conn.get().toString());
try {
conn.get().commit();
conn.get().setAutoCommit(true);
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public void abortDatabaseBatchWrite() throws BlockStoreException {
maybeConnect();
if (log.isDebugEnabled())
log.debug("Rollback database batch write with connection: " + conn.get().toString());
try {
if (!conn.get().getAutoCommit()) {
conn.get().rollback();
conn.get().setAutoCommit(true);
} else {
log.warn("Warning: Rollback attempt without transaction");
}
} catch (SQLException e) {
throw new BlockStoreException(e);
}
}
public boolean hasUnspentOutputs(Sha256Hash hash, int numOutputs) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get()
.prepareStatement("SELECT COUNT(*) FROM openOutputs WHERE hash = ?");
s.setBytes(1, hash.getBytes());
ResultSet results = s.executeQuery();
if (!results.next()) {
throw new BlockStoreException("Got no results from a COUNT(*) query");
}
int count = results.getInt(1);
return count != 0;
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) { throw new BlockStoreException("Failed to close PreparedStatement"); }
}
}
/**
* Calculate the balance for a coinbase, to-address, or p2sh address.
* @param address The address to calculate the balance of
* @return The balance of the address supplied. If the address has not been seen, or there are no outputs open for this
* address, the return value is 0
* @throws BlockStoreException
*/
public BigInteger calculateBalanceForAddress(Address address) throws BlockStoreException {
maybeConnect();
PreparedStatement s = null;
try {
s = conn.get().prepareStatement("select sum(('x'||lpad(substr(value::text, 3, 50),16,'0'))::bit(64)::bigint) "
+ "from openoutputs where toaddress = ?");
s.setString(1, address.toString());
ResultSet rs = s.executeQuery();
if (rs.next()) {
return BigInteger.valueOf(rs.getLong(1));
} else {
throw new BlockStoreException("Failed to execute balance lookup");
}
} catch (SQLException ex) {
throw new BlockStoreException(ex);
} finally {
if (s != null)
try {
s.close();
} catch (SQLException e) {
throw new BlockStoreException("Could not close statement");
}
}
}
}
| |
package org.robolectric.shadows;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
import org.robolectric.Robolectric;
import org.robolectric.annotation.Implementation;
import org.robolectric.annotation.Implements;
import org.robolectric.annotation.RealObject;
import org.robolectric.bytecode.RobolectricInternals;
import org.robolectric.util.Join;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import static android.content.Intent.*;
import static org.robolectric.internal.ReflectionHelpers.ClassParameter;
import static org.robolectric.Robolectric.shadowOf;
@SuppressWarnings({"UnusedDeclaration"})
@Implements(Intent.class)
public class ShadowIntent {
@RealObject private Intent realIntent;
private final Bundle extras = new Bundle();
private String action;
private ComponentName componentName;
private String type;
private Uri data;
private int flags;
private Class<?> intentClass;
private String packageName;
private final Set<String> categories = new HashSet<String>();
public void __constructor__(String action, Uri uri, Context packageContext, Class cls) {
componentName = new ComponentName(packageContext, cls);
data = uri;
intentClass = cls;
RobolectricInternals.invokeConstructor(Intent.class, realIntent, new ClassParameter(String.class, action),
new ClassParameter(Uri.class, uri), new ClassParameter(Context.class, packageContext), new ClassParameter(Class.class, cls));
}
public void __constructor__(Context packageContext, Class cls) {
componentName = new ComponentName(packageContext, cls);
intentClass = cls;
RobolectricInternals.invokeConstructor(Intent.class, realIntent, new ClassParameter(Context.class, packageContext),
new ClassParameter(Class.class, cls));
}
public void __constructor__(String action, Uri uri) {
this.action = action;
data = uri;
RobolectricInternals.invokeConstructor(Intent.class, realIntent, new ClassParameter(String.class, action),
new ClassParameter(Uri.class, uri));
}
public void __constructor__(String action) {
__constructor__(action, null);
RobolectricInternals.invokeConstructor(Intent.class, realIntent, new ClassParameter(String.class, action));
}
public void __constructor__(Parcel in) {
__constructor__(in.readString());
data = Uri.CREATOR.createFromParcel(in);
type = in.readString();
flags = in.readInt();
packageName = in.readString();
componentName = ComponentName.readFromParcel(in);
int N = in.readInt();
if (N > 0) {
int i;
for (i=0; i<N; i++) {
categories.add(in.readString());
}
}
extras.putAll(in.readBundle());
}
@Implementation
public void writeToParcel(Parcel out, int flags) {
out.writeString(action);
Uri.writeToParcel(out, data);
out.writeString(type);
out.writeInt(this.flags);
out.writeString(packageName);
ComponentName.writeToParcel(componentName, out);
if (categories != null) {
final int N = categories.size();
out.writeInt(N);
for (String s : categories) {
out.writeString(s);
}
} else {
out.writeInt(0);
}
out.writeBundle(extras);
}
public void __constructor__(Intent intent) {
ShadowIntent other = shadowOf(intent);
extras.putAll(other.extras);
action = other.action;
componentName = other.componentName;
type = other.type;
data = other.data;
flags = other.flags;
intentClass = other.intentClass;
packageName = other.packageName;
categories.addAll(other.categories);
RobolectricInternals.invokeConstructor(Intent.class, realIntent, new ClassParameter(Intent.class, intent));
}
@Implementation
public static Intent createChooser(Intent target, CharSequence title) {
Intent intent = new Intent(Intent.ACTION_CHOOSER);
intent.putExtra(Intent.EXTRA_INTENT, target);
if (title != null) {
intent.putExtra(Intent.EXTRA_TITLE, title);
}
return intent;
}
@Implementation
public Intent setAction(String action) {
this.action = action;
return realIntent;
}
@Implementation
public String getAction() {
return action;
}
@Implementation
public Intent setType(String type) {
this.type = type;
this.data = null;
return realIntent;
}
@Implementation
public Intent setDataAndType(Uri data, String type) {
this.data = data;
this.type = type;
return realIntent;
}
@Implementation
public String getType() {
return type;
}
@Implementation
public Intent addCategory(String category) {
categories.add(category);
return realIntent;
}
@Implementation
public void removeCategory(String category) {
categories.remove(category);
}
@Implementation
public boolean hasCategory(String category) {
return categories.contains(category);
}
@Implementation
public Set<String> getCategories() {
return categories;
}
@Implementation
public Intent setPackage(String packageName) {
this.packageName = packageName;
return realIntent;
}
@Implementation
public String getPackage() {
return packageName;
}
@Implementation
public Uri getData() {
return data;
}
@Implementation
public String getScheme() {
return data != null ? data.getScheme() : null;
}
@Implementation
public String getDataString() {
if (data != null) {
return data.toString();
}
return null;
}
@Implementation
public Intent setClass(Context packageContext, Class<?> cls) {
componentName = new ComponentName(packageContext, cls);
this.intentClass = cls;
return realIntent;
}
@Implementation
public Intent setClassName(String packageName, String className) {
componentName = new ComponentName(packageName, className);
try {
this.intentClass = Class.forName(className);
} catch (ClassNotFoundException e) {
// ignore
}
return realIntent;
}
@Implementation
public Intent setClassName(Context packageContext, String className) {
componentName = new ComponentName(packageContext.getPackageName(), className);
return realIntent;
}
@Implementation
public Intent setData(Uri data) {
this.data = data;
this.type = null;
return realIntent;
}
@Implementation
public int getFlags() {
return flags;
}
@Implementation
public Intent setFlags(int flags) {
this.flags = flags;
return realIntent;
}
@Implementation
public Intent addFlags(int flags) {
this.flags |= flags;
return realIntent;
}
@Implementation
public Intent putExtras(Bundle src) {
extras.putAll(src);
return realIntent;
}
@Implementation
public Intent putExtras(Intent src) {
ShadowIntent srcShadowIntent = shadowOf(src);
extras.putAll(srcShadowIntent.extras);
return realIntent;
}
@Implementation
public Bundle getExtras() {
return extras.isEmpty() ? null : new Bundle(extras);
}
@Implementation
public Intent putExtra(String key, char value) {
extras.putChar(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, byte value) {
extras.putByte(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, int value) {
extras.putInt(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, double value) {
extras.putDouble(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, short value) {
extras.putShort(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, float value) {
extras.putFloat(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, long value) {
extras.putLong(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, Serializable value) {
extras.putSerializable(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, Parcelable value) {
extras.putParcelable(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, Parcelable[] value) {
extras.putParcelableArray(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, String value) {
extras.putString(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, String[] value) {
extras.putStringArray(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, Bundle value) {
extras.putBundle(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, boolean value) {
extras.putBoolean(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, boolean[] value) {
extras.putBooleanArray(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, char[] value) {
extras.putCharArray(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, int[] value) {
extras.putIntArray(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, long[] value) {
extras.putLongArray(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, float[] value) {
extras.putFloatArray(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, double[] value) {
extras.putDoubleArray(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, short[] value) {
extras.putShortArray(key, value);
return realIntent;
}
@Implementation
public char[] getCharArrayExtra(String name) {
return extras.getCharArray(name);
}
@Implementation
public boolean[] getBooleanArrayExtra(String name) {
return extras.getBooleanArray(name);
}
@Implementation
public int[] getIntArrayExtra(String name) {
return extras.getIntArray(name);
}
@Implementation
public long[] getLongArrayExtra(String name) {
return extras.getLongArray(name);
}
@Implementation
public float[] getFloatArrayExtra(String name) {
return extras.getFloatArray(name);
}
@Implementation
public double[] getDoubleArrayExtra(String name) {
return extras.getDoubleArray(name);
}
@Implementation
public short[] getShortArrayExtra(String name) {
return extras.getShortArray(name);
}
@Implementation
public boolean getBooleanExtra(String name, boolean defaultValue) {
return extras.getBoolean(name, defaultValue);
}
@Implementation
public String[] getStringArrayExtra(String name) {
return extras.getStringArray(name);
}
@Implementation
public CharSequence[] getCharSequenceArrayExtra(String name) {
return extras.getCharSequenceArray(name);
}
@Implementation
public ArrayList<CharSequence> getCharSequenceArrayListExtra(String name) {
return extras.getCharSequenceArrayList(name);
}
@Implementation
public Intent putExtra(String key, CharSequence value) {
extras.putCharSequence(key, value);
return realIntent;
}
@Implementation
public CharSequence getCharSequenceExtra(String name) {
return extras.getCharSequence(name);
}
@Implementation
public Intent putExtra(String key, byte[] value) {
extras.putByteArray(key, value);
return realIntent;
}
@Implementation
public Intent putStringArrayListExtra(String key, ArrayList<String> value) {
extras.putStringArrayList(key, value);
return realIntent;
}
@Implementation
public Intent putExtra(String key, CharSequence[] value) {
extras.putCharSequenceArray(key, value);
return realIntent;
}
@Implementation
public Intent putCharSequenceArrayListExtra(String key, ArrayList<CharSequence> value) {
extras.putCharSequenceArrayList(key, value);
return realIntent;
}
@Implementation
public ArrayList<String> getStringArrayListExtra(String name) {
return extras.getStringArrayList(name);
}
@Implementation
public Intent putIntegerArrayListExtra(String key, ArrayList<Integer> value) {
extras.putIntegerArrayList(key, value);
return realIntent;
}
@Implementation
public ArrayList<Integer> getIntegerArrayListExtra(String name) {
return extras.getIntegerArrayList(name);
}
@Implementation
public Intent putParcelableArrayListExtra(String key, ArrayList<Parcelable> value) {
extras.putParcelableArrayList(key, value);
return realIntent;
}
@Implementation
public ArrayList<Parcelable> getParcelableArrayListExtra(String key) {
return extras.getParcelableArrayList(key);
}
@Implementation
public boolean hasExtra(String name) {
return extras.containsKey(name);
}
@Implementation
public String getStringExtra(String name) {
return extras.getString(name);
}
@Implementation
public Parcelable getParcelableExtra(String name) {
return extras.getParcelable(name);
}
@Implementation
public Parcelable[] getParcelableArrayExtra(String name) {
return extras.getParcelableArray(name);
}
@Implementation
public byte getByteExtra(String name, byte defaultValue) {
return extras.getByte(name, defaultValue);
}
@Implementation
public char getCharExtra(String name, char defaultValue) {
return extras.getChar(name, defaultValue);
}
@Implementation
public int getIntExtra(String name, int defaultValue) {
return extras.getInt(name, defaultValue);
}
@Implementation
public long getLongExtra(String name, long defaultValue) {
return extras.getLong(name, defaultValue);
}
@Implementation
public double getDoubleExtra(String name, double defaultValue) {
return extras.getDouble(name, defaultValue);
}
@Implementation
public short getShortExtra(String name, short defaultValue) {
return extras.getShort(name, defaultValue);
}
@Implementation
public Bundle getBundleExtra(String name) {
return extras.getBundle(name);
}
@Implementation
public float getFloatExtra(String name, float defaultValue) {
return extras.getFloat(name, defaultValue);
}
@Implementation
public byte[] getByteArrayExtra(String name) {
return extras.getByteArray(name);
}
@Implementation
public Serializable getSerializableExtra(String name) {
return extras.getSerializable(name);
}
@Implementation
public void removeExtra(String name) {
extras.remove(name);
}
@Implementation
public Intent setComponent(ComponentName componentName) {
this.componentName = componentName;
return realIntent;
}
@Implementation
public ComponentName getComponent() {
return componentName;
}
@Implementation
public String toURI() {
return data.toString();
}
@Implementation
public int fillIn(Intent otherIntent, int flags) {
int changes = 0;
ShadowIntent other = shadowOf(otherIntent);
if (other.action != null && (action == null || (flags & FILL_IN_ACTION) != 0)) {
action = other.action;
changes |= FILL_IN_ACTION;
}
if ((other.data != null || other.type != null)
&& ((data == null && type == null) || (flags & FILL_IN_DATA) != 0)) {
data = other.data;
type = other.type;
changes |= FILL_IN_DATA;
}
if (!other.categories.isEmpty()
&& (categories.isEmpty() || (flags & FILL_IN_CATEGORIES) != 0)) {
categories.addAll(other.categories);
changes |= FILL_IN_CATEGORIES;
}
if (other.packageName != null
&& (packageName == null || (flags & FILL_IN_PACKAGE) != 0)) {
packageName = other.packageName;
changes |= FILL_IN_PACKAGE;
}
if (other.componentName != null && (flags & FILL_IN_COMPONENT) != 0) {
componentName = other.componentName;
changes |= FILL_IN_COMPONENT;
}
extras.putAll(other.extras);
return changes;
}
@Implementation
// cribbed from Android source
public boolean filterEquals(Intent other) {
if (other == null) {
return false;
}
if (getAction() != other.getAction()) {
if (getAction() != null) {
if (!getAction().equals(other.getAction())) {
return false;
}
} else {
if (!other.getAction().equals(getAction())) {
return false;
}
}
}
if (getData() != other.getData()) {
if (getData() != null) {
if (!getData().equals(other.getData())) {
return false;
}
} else {
if (!other.getData().equals(getData())) {
return false;
}
}
}
if (getType() != other.getType()) {
if (getType() != null) {
if (!getType().equals(other.getType())) {
return false;
}
} else {
if (!other.getType().equals(getType())) {
return false;
}
}
}
if (getPackage() != other.getPackage()) {
if (getPackage() != null) {
if (!getPackage().equals(other.getPackage())) {
return false;
}
} else {
if (!other.getPackage().equals(getPackage())) {
return false;
}
}
}
if (getComponent() != other.getComponent()) {
if (getComponent() != null) {
if (!getComponent().equals(other.getComponent())) {
return false;
}
} else {
if (!other.getComponent().equals(getComponent())) {
return false;
}
}
}
if (getCategories() != other.getCategories()) {
if (getCategories() != null) {
if (!getCategories().equals(other.getCategories())) {
return false;
}
} else {
if (!other.getCategories().equals(getCategories())) {
return false;
}
}
}
return true;
}
/**
* Compares an {@code Intent} with a {@code ShadowIntent} (obtained via a call to
* {@link Robolectric#shadowOf(android.content.Intent)})
*
* @param o a {@code ShadowIntent}
* @return whether they are equivalent
*/
@Deprecated
public boolean realIntentEquals(ShadowIntent o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (action != null ? !action.equals(o.action) : o.action != null) return false;
if (packageName != null ? !packageName.equals(o.packageName) : o.packageName != null)
return false;
if (componentName != null ? !componentName.equals(o.componentName) : o.componentName != null)
return false;
if (data != null ? !data.equals(o.data) : o.data != null) return false;
if (extras != null ? !extras.equals(o.extras) : o.extras != null) return false;
if (type != null ? !type.equals(o.type) : o.type != null) return false;
if (categories != null ? !categories.equals(o.categories) : o.categories != null) return false;
return true;
}
@Override
@Implementation
public int hashCode() {
int result = extras != null ? extras.hashCode() : 0;
result = 31 * result + (action != null ? action.hashCode() : 0);
result = 31 * result + (packageName != null ? packageName.hashCode() : 0);
result = 31 * result + (componentName != null ? componentName.hashCode() : 0);
result = 31 * result + (data != null ? data.hashCode() : 0);
result = 31 * result + (type != null ? type.hashCode() : 0);
result = 31 * result + (categories != null ? categories.hashCode() : 0);
result = 31 * result + flags;
return result;
}
@Override
@Implementation
public boolean equals(Object o) {
if (!(o instanceof Intent)) return false;
return realIntentEquals(shadowOf((Intent) o));
}
/**
* Non-Android accessor that returns the {@code Class} object set by
* {@link #setClass(android.content.Context, Class)}
*
* @return the {@code Class} object set by
* {@link #setClass(android.content.Context, Class)}
*/
public Class<?> getIntentClass() {
return intentClass;
}
@Override
@Implementation
public String toString() {
return "Intent{" +
Join.join(
", ",
ifWeHave(componentName, "componentName"),
ifWeHave(action, "action"),
ifWeHave(extras, "extras"),
ifWeHave(data, "data"),
ifWeHave(type, "type")
) +
'}';
}
private String ifWeHave(Object o, String name) {
if (o == null) return null;
if (o instanceof Map && ((Map) o).isEmpty()) return null;
return name + "=" + o;
}
/**
* @deprecated Use {@link ShadowIntent#setData(android.net.Uri).}
*/
public void setURI(String uri) {
this.data = Uri.parse(uri);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.service.client;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.server.service.SystemServiceManager;
import org.apache.hadoop.yarn.service.api.records.Service;
import org.apache.hadoop.yarn.service.api.records.ServiceState;
import org.apache.hadoop.yarn.service.conf.SliderExitCodes;
import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
import org.apache.hadoop.yarn.service.exceptions.SliderException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.apache.hadoop.yarn.service.utils.ServiceApiUtil.jsonSerDeser;
/**
* SystemServiceManager implementation.
* Scan for configure system service path.
*
* The service path structure is as follows:
* SYSTEM_SERVICE_DIR_PATH
* |---- sync
* | |--- user1
* | | |---- service1.yarnfile
* | | |---- service2.yarnfile
* | |--- user2
* | | |---- service1.yarnfile
* | | ....
* | |
* |---- async
* | |--- user3
* | | |---- service1.yarnfile
* | | |---- service2.yarnfile
* | |--- user4
* | | |---- service1.yarnfile
* | | ....
* | |
*
* sync: These services are launched at the time of service start synchronously.
* It is a blocking service start.
* async: These services are launched in separate thread without any delay after
* service start. Non-blocking service start.
*/
public class SystemServiceManagerImpl extends AbstractService
implements SystemServiceManager {
private static final Logger LOG =
LoggerFactory.getLogger(SystemServiceManagerImpl.class);
private static final String YARN_FILE_SUFFIX = ".yarnfile";
private static final String SYNC = "sync";
private static final String ASYNC = "async";
private FileSystem fs;
private Path systemServiceDir;
private AtomicBoolean stopExecutors = new AtomicBoolean(false);
private Map<String, Set<Service>> syncUserServices = new HashMap<>();
private Map<String, Set<Service>> asyncUserServices = new HashMap<>();
private UserGroupInformation loginUGI;
private Thread serviceLaucher;
@VisibleForTesting
private int badFileNameExtensionSkipCounter;
@VisibleForTesting
private Map<String, Integer> ignoredUserServices =
new HashMap<>();
@VisibleForTesting
private int badDirSkipCounter;
public SystemServiceManagerImpl() {
super(SystemServiceManagerImpl.class.getName());
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
String dirPath =
conf.get(YarnServiceConf.YARN_SERVICES_SYSTEM_SERVICE_DIRECTORY);
if (dirPath != null) {
systemServiceDir = new Path(dirPath);
LOG.info("System Service Directory is configured to {}",
systemServiceDir);
fs = systemServiceDir.getFileSystem(conf);
this.loginUGI = UserGroupInformation.isSecurityEnabled() ?
UserGroupInformation.getLoginUser() :
UserGroupInformation.getCurrentUser();
LOG.info("UserGroupInformation initialized to {}", loginUGI);
}
}
@Override
protected void serviceStart() throws Exception {
scanForUserServices();
launchUserService(syncUserServices);
// Create a thread and submit services in background otherwise it
// block RM switch time.
serviceLaucher = new Thread(createRunnable());
serviceLaucher.setName("System service launcher");
serviceLaucher.start();
}
@Override
protected void serviceStop() throws Exception {
LOG.info("Stopping {}", getName());
stopExecutors.set(true);
if (serviceLaucher != null) {
serviceLaucher.interrupt();
try {
serviceLaucher.join();
} catch (InterruptedException ie) {
LOG.warn("Interrupted Exception while stopping", ie);
}
}
}
private Runnable createRunnable() {
return new Runnable() {
@Override
public void run() {
launchUserService(asyncUserServices);
}
};
}
void launchUserService(Map<String, Set<Service>> userServices) {
for (Map.Entry<String, Set<Service>> entry : userServices.entrySet()) {
String user = entry.getKey();
Set<Service> services = entry.getValue();
if (services.isEmpty()) {
continue;
}
ServiceClient serviceClient = null;
try {
UserGroupInformation userUgi = getProxyUser(user);
serviceClient = createServiceClient(userUgi);
for (Service service : services) {
LOG.info("POST: createService = {} user = {}", service, userUgi);
try {
launchServices(userUgi, serviceClient, service);
} catch (IOException | UndeclaredThrowableException e) {
if (e.getCause() != null) {
LOG.warn(e.getCause().getMessage());
} else {
String message =
"Failed to create service " + service.getName() + " : ";
LOG.error(message, e);
}
}
}
} catch (InterruptedException e) {
LOG.warn("System service launcher thread interrupted", e);
break;
} catch (Exception e) {
LOG.error("Error while submitting services for user " + user, e);
} finally {
if (serviceClient != null) {
try {
serviceClient.close();
} catch (IOException e) {
LOG.warn("Error while closing serviceClient for user {}", user);
}
}
}
}
}
private ServiceClient createServiceClient(UserGroupInformation userUgi)
throws IOException, InterruptedException {
ServiceClient serviceClient =
userUgi.doAs(new PrivilegedExceptionAction<ServiceClient>() {
@Override public ServiceClient run()
throws IOException, YarnException {
ServiceClient sc = getServiceClient();
sc.init(getConfig());
sc.start();
return sc;
}
});
return serviceClient;
}
private void launchServices(UserGroupInformation userUgi,
ServiceClient serviceClient, Service service)
throws IOException, InterruptedException {
if (service.getState() == ServiceState.STOPPED) {
userUgi.doAs(new PrivilegedExceptionAction<Void>() {
@Override public Void run() throws IOException, YarnException {
serviceClient.actionBuild(service);
return null;
}
});
LOG.info("Service {} version {} saved.", service.getName(),
service.getVersion());
} else {
ApplicationId applicationId =
userUgi.doAs(new PrivilegedExceptionAction<ApplicationId>() {
@Override public ApplicationId run()
throws IOException, YarnException {
boolean tryStart = true;
try {
serviceClient.actionBuild(service);
} catch (Exception e) {
if (e instanceof SliderException && ((SliderException) e)
.getExitCode() == SliderExitCodes.EXIT_INSTANCE_EXISTS) {
LOG.info("Service {} already exists, will attempt to start " +
"service", service.getName());
} else {
tryStart = false;
LOG.info("Got exception saving {}, will not attempt to " +
"start service", service.getName(), e);
}
}
if (tryStart) {
return serviceClient.actionStartAndGetId(service.getName());
} else {
return null;
}
}
});
if (applicationId != null) {
LOG.info("Service {} submitted with Application ID: {}",
service.getName(), applicationId);
}
}
}
ServiceClient getServiceClient() {
return new ServiceClient();
}
private UserGroupInformation getProxyUser(String user) {
UserGroupInformation ugi;
if (UserGroupInformation.isSecurityEnabled()) {
ugi = UserGroupInformation.createProxyUser(user, loginUGI);
} else {
ugi = UserGroupInformation.createRemoteUser(user);
}
return ugi;
}
// scan for both launch service types i.e sync and async
void scanForUserServices() throws IOException {
if (systemServiceDir == null) {
return;
}
try {
LOG.info("Scan for launch type on {}", systemServiceDir);
RemoteIterator<FileStatus> iterLaunchType = list(systemServiceDir);
while (iterLaunchType.hasNext()) {
FileStatus launchType = iterLaunchType.next();
if (!launchType.isDirectory()) {
LOG.debug("Scanner skips for unknown file {}", launchType.getPath());
continue;
}
if (launchType.getPath().getName().equals(SYNC)) {
scanForUserServiceDefinition(launchType.getPath(), syncUserServices);
} else if (launchType.getPath().getName().equals(ASYNC)) {
scanForUserServiceDefinition(launchType.getPath(), asyncUserServices);
} else {
badDirSkipCounter++;
LOG.debug("Scanner skips for unknown dir {}.", launchType.getPath());
}
}
} catch (FileNotFoundException e) {
LOG.warn("System service directory {} doesn't not exist.",
systemServiceDir);
}
}
// Files are under systemServiceDir/<users>. Scan for 2 levels
// 1st level for users
// 2nd level for service definitions under user
private void scanForUserServiceDefinition(Path userDirPath,
Map<String, Set<Service>> userServices) throws IOException {
LOG.info("Scan for users on {}", userDirPath);
RemoteIterator<FileStatus> iterUsers = list(userDirPath);
while (iterUsers.hasNext()) {
FileStatus userDir = iterUsers.next();
// if 1st level is not user directory then skip it.
if (!userDir.isDirectory()) {
LOG.info(
"Service definition {} doesn't belong to any user. Ignoring.. ",
userDir.getPath().getName());
continue;
}
String userName = userDir.getPath().getName();
LOG.info("Scanning service definitions for user {}.", userName);
//2nd level scan
RemoteIterator<FileStatus> iterServices = list(userDir.getPath());
while (iterServices.hasNext()) {
FileStatus serviceCache = iterServices.next();
String filename = serviceCache.getPath().getName();
if (!serviceCache.isFile()) {
LOG.info("Scanner skips for unknown dir {}", filename);
continue;
}
if (!filename.endsWith(YARN_FILE_SUFFIX)) {
LOG.info("Scanner skips for unknown file extension, filename = {}",
filename);
badFileNameExtensionSkipCounter++;
continue;
}
Service service = getServiceDefinition(serviceCache.getPath());
if (service != null) {
Set<Service> services = userServices.get(userName);
if (services == null) {
services = new HashSet<>();
userServices.put(userName, services);
}
if (!services.add(service)) {
int count = ignoredUserServices.containsKey(userName) ?
ignoredUserServices.get(userName) : 0;
ignoredUserServices.put(userName, count + 1);
LOG.warn(
"Ignoring service {} for the user {} as it is already present,"
+ " filename = {}", service.getName(), userName, filename);
} else {
LOG.info("Added service {} for the user {}, filename = {}",
service.getName(), userName, filename);
}
}
}
}
}
private Service getServiceDefinition(Path filePath) {
Service service = null;
try {
if (LOG.isDebugEnabled()) {
LOG.debug("Loading service definition from FS: " + filePath);
}
service = jsonSerDeser.load(fs, filePath);
} catch (IOException e) {
LOG.info("Error while loading service definition from FS: {}", e);
}
return service;
}
private RemoteIterator<FileStatus> list(Path path) throws IOException {
return new StoppableRemoteIterator(fs.listStatusIterator(path));
}
@VisibleForTesting Map<String, Integer> getIgnoredUserServices() {
return ignoredUserServices;
}
private class StoppableRemoteIterator implements RemoteIterator<FileStatus> {
private final RemoteIterator<FileStatus> remote;
StoppableRemoteIterator(RemoteIterator<FileStatus> remote) {
this.remote = remote;
}
@Override public boolean hasNext() throws IOException {
return !stopExecutors.get() && remote.hasNext();
}
@Override public FileStatus next() throws IOException {
return remote.next();
}
}
@VisibleForTesting
Map<String, Set<Service>> getSyncUserServices() {
return syncUserServices;
}
@VisibleForTesting
int getBadFileNameExtensionSkipCounter() {
return badFileNameExtensionSkipCounter;
}
@VisibleForTesting
int getBadDirSkipCounter() {
return badDirSkipCounter;
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.federation.kerberos;
import static org.keycloak.testsuite.admin.AbstractAdminTest.loadJson;
import static org.keycloak.testsuite.admin.ApiUtil.findClientByClientId;
import java.net.URI;
import java.security.Principal;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import javax.naming.Context;
import javax.naming.NamingException;
import javax.naming.directory.Attributes;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.security.sasl.Sasl;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.apache.http.NameValuePair;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.Credentials;
import org.apache.http.client.params.AuthPolicy;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.impl.client.AbstractHttpClient;
import org.apache.http.impl.client.DefaultHttpClient;
import org.ietf.jgss.GSSCredential;
import org.jboss.arquillian.graphene.page.Page;
import org.jboss.resteasy.client.jaxrs.ResteasyClient;
import org.jboss.resteasy.client.jaxrs.ResteasyClientBuilder;
import org.jboss.resteasy.client.jaxrs.engines.ApacheHttpClient4Engine;
import org.junit.After;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.keycloak.OAuth2Constants;
import org.keycloak.adapters.HttpClientBuilder;
import org.keycloak.admin.client.resource.ClientResource;
import org.keycloak.admin.client.resource.RealmResource;
import org.keycloak.common.constants.KerberosConstants;
import org.keycloak.common.util.KerberosSerializationUtils;
import org.keycloak.common.util.MultivaluedHashMap;
import org.keycloak.events.Details;
import org.keycloak.federation.kerberos.CommonKerberosConfig;
import org.keycloak.models.LDAPConstants;
import org.keycloak.models.ProtocolMapperModel;
import org.keycloak.models.UserModel;
import org.keycloak.models.utils.ModelToRepresentation;
import org.keycloak.protocol.oidc.mappers.UserSessionNoteMapper;
import org.keycloak.representations.AccessToken;
import org.keycloak.representations.idm.ComponentRepresentation;
import org.keycloak.representations.idm.ProtocolMapperRepresentation;
import org.keycloak.representations.idm.RealmRepresentation;
import org.keycloak.representations.idm.UserRepresentation;
import org.keycloak.storage.UserStorageProvider;
import org.keycloak.testsuite.AbstractAuthTest;
import org.keycloak.testsuite.Assert;
import org.keycloak.testsuite.AssertEvents;
import org.keycloak.testsuite.admin.ApiUtil;
import org.keycloak.testsuite.auth.page.AuthRealm;
import org.keycloak.testsuite.pages.AccountPasswordPage;
import org.keycloak.testsuite.pages.LoginPage;
import org.keycloak.testsuite.util.OAuthClient;
/**
* @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a>
*/
public abstract class AbstractKerberosTest extends AbstractAuthTest {
protected KeycloakSPNegoSchemeFactory spnegoSchemeFactory;
protected ResteasyClient client;
@Page
protected LoginPage loginPage;
@Rule
public AssertEvents events = new AssertEvents(this);
@Page
protected AccountPasswordPage changePasswordPage;
protected abstract CommonKerberosConfig getKerberosConfig();
protected abstract ComponentRepresentation getUserStorageConfiguration();
protected abstract void setKrb5ConfPath();
protected abstract boolean isStartEmbeddedLdapServer();
@Override
public void addTestRealms(List<RealmRepresentation> testRealms) {
RealmRepresentation realmRep = loadJson(getClass().getResourceAsStream("/kerberos/kerberosrealm.json"), RealmRepresentation.class);
testRealms.add(realmRep);
}
@Before
public void beforeAbstractKeycloakTest() throws Exception {
super.beforeAbstractKeycloakTest();
testRealmPage.setAuthRealm(AuthRealm.TEST);
changePasswordPage.realm(AuthRealm.TEST);
setKrb5ConfPath();
spnegoSchemeFactory = new KeycloakSPNegoSchemeFactory(getKerberosConfig());
initHttpClient(true);
removeAllUsers();
oauth.clientId("kerberos-app");
ComponentRepresentation rep = getUserStorageConfiguration();
Response resp = testRealmResource().components().add(rep);
getCleanup().addComponentId(ApiUtil.getCreatedId(resp));
resp.close();
}
@After
public void afterAbstractKeycloakTest() {
cleanupApacheHttpClient();
super.afterAbstractKeycloakTest();
}
private void cleanupApacheHttpClient() {
client.close();
client = null;
}
// @Test
// public void sleepTest() throws Exception {
// String kcLoginPageLocation = oauth.getLoginFormUrl();
// Thread.sleep(10000000);
// }
@Test
public void spnegoNotAvailableTest() throws Exception {
initHttpClient(false);
String kcLoginPageLocation = oauth.getLoginFormUrl();
Response response = client.target(kcLoginPageLocation).request().get();
Assert.assertEquals(401, response.getStatus());
Assert.assertEquals(KerberosConstants.NEGOTIATE, response.getHeaderString(HttpHeaders.WWW_AUTHENTICATE));
String responseText = response.readEntity(String.class);
response.close();
}
protected OAuthClient.AccessTokenResponse spnegoLoginTestImpl() throws Exception {
Response spnegoResponse = spnegoLogin("hnelson", "secret");
Assert.assertEquals(302, spnegoResponse.getStatus());
List<UserRepresentation> users = testRealmResource().users().search("hnelson", 0, 1);
String userId = users.get(0).getId();
events.expectLogin()
.client("kerberos-app")
.user(userId)
.detail(Details.USERNAME, "hnelson")
.assertEvent();
String codeUrl = spnegoResponse.getLocation().toString();
return assertAuthenticationSuccess(codeUrl);
}
protected abstract boolean isCaseSensitiveLogin();
// KEYCLOAK-2102
@Test
public void spnegoCaseInsensitiveTest() throws Exception {
Response spnegoResponse = spnegoLogin(isCaseSensitiveLogin() ? "MyDuke" : "myduke", "theduke");
Assert.assertEquals(302, spnegoResponse.getStatus());
List<UserRepresentation> users = testRealmResource().users().search("myduke", 0, 1);
String userId = users.get(0).getId();
events.expectLogin()
.client("kerberos-app")
.user(userId)
.detail(Details.USERNAME, "myduke")
.assertEvent();
String codeUrl = spnegoResponse.getLocation().toString();
assertAuthenticationSuccess(codeUrl);
}
@Test
public void usernamePasswordLoginTest() throws Exception {
// Change editMode to READ_ONLY
updateProviderEditMode(UserStorageProvider.EditMode.READ_ONLY);
// Login with username/password from kerberos
changePasswordPage.open();
loginPage.assertCurrent();
loginPage.login("jduke", "theduke");
changePasswordPage.assertCurrent();
// Bad existing password
changePasswordPage.changePassword("theduke-invalid", "newPass", "newPass");
Assert.assertTrue(driver.getPageSource().contains("Invalid existing password."));
// Change password is not possible as editMode is READ_ONLY
changePasswordPage.changePassword("theduke", "newPass", "newPass");
Assert.assertTrue(
driver.getPageSource().contains("You can't update your password as your account is read only"));
// Change editMode to UNSYNCED
updateProviderEditMode(UserStorageProvider.EditMode.UNSYNCED);
// Successfully change password now
changePasswordPage.changePassword("theduke", "newPass", "newPass");
Assert.assertTrue(driver.getPageSource().contains("Your password has been updated."));
changePasswordPage.logout();
// Login with old password doesn't work, but with new password works
loginPage.login("jduke", "theduke");
loginPage.assertCurrent();
loginPage.login("jduke", "newPass");
changePasswordPage.assertCurrent();
changePasswordPage.logout();
// Assert SPNEGO login still with the old password as mode is unsynced
events.clear();
Response spnegoResponse = spnegoLogin("jduke", "theduke");
Assert.assertEquals(302, spnegoResponse.getStatus());
List<UserRepresentation> users = testRealmResource().users().search("jduke", 0, 1);
String userId = users.get(0).getId();
events.expectLogin()
.client("kerberos-app")
.user(userId)
.detail(Details.USERNAME, "jduke")
.assertEvent();
String codeUrl = spnegoResponse.getLocation().toString();
assertAuthenticationSuccess(codeUrl);
}
@Test
public void credentialDelegationTest() throws Exception {
Assume.assumeTrue("Ignoring test as the embedded server is not started", isStartEmbeddedLdapServer());
// Add kerberos delegation credential mapper
ProtocolMapperModel protocolMapper = UserSessionNoteMapper.createClaimMapper(KerberosConstants.GSS_DELEGATION_CREDENTIAL_DISPLAY_NAME,
KerberosConstants.GSS_DELEGATION_CREDENTIAL,
KerberosConstants.GSS_DELEGATION_CREDENTIAL, "String",
true, KerberosConstants.GSS_DELEGATION_CREDENTIAL_DISPLAY_NAME,
true, false);
ProtocolMapperRepresentation protocolMapperRep = ModelToRepresentation.toRepresentation(protocolMapper);
ClientResource clientResource = findClientByClientId(testRealmResource(), "kerberos-app");
Response response = clientResource.getProtocolMappers().createMapper(protocolMapperRep);
String protocolMapperId = ApiUtil.getCreatedId(response);
response.close();
// SPNEGO login
OAuthClient.AccessTokenResponse tokenResponse = spnegoLoginTestImpl();
// Assert kerberos ticket in the accessToken can be re-used to authenticate against other 3rd party kerberos service (ApacheDS Server in this case)
String accessToken = tokenResponse.getAccessToken();
AccessToken token = oauth.verifyToken(accessToken);
String serializedGssCredential = (String) token.getOtherClaims().get(KerberosConstants.GSS_DELEGATION_CREDENTIAL);
Assert.assertNotNull(serializedGssCredential);
GSSCredential gssCredential = KerberosSerializationUtils.deserializeCredential(serializedGssCredential);
String ldapResponse = invokeLdap(gssCredential, token.getPreferredUsername());
Assert.assertEquals("Horatio Nelson", ldapResponse);
// Logout
oauth.openLogout();
// Remove protocolMapper
clientResource.getProtocolMappers().delete(protocolMapperId);
// Login and assert delegated credential not anymore
tokenResponse = spnegoLoginTestImpl();
accessToken = tokenResponse.getAccessToken();
token = oauth.verifyToken(accessToken);
Assert.assertFalse(token.getOtherClaims().containsKey(KerberosConstants.GSS_DELEGATION_CREDENTIAL));
events.clear();
}
private String invokeLdap(GSSCredential gssCredential, String username) throws NamingException {
Hashtable env = new Hashtable(11);
env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
env.put(Context.PROVIDER_URL, "ldap://localhost:10389");
if (gssCredential != null) {
env.put(Context.SECURITY_AUTHENTICATION, "GSSAPI");
env.put(Sasl.CREDENTIALS, gssCredential);
}
DirContext ctx = new InitialDirContext(env);
try {
Attributes attrs = ctx.getAttributes("uid=" + username + ",ou=People,dc=keycloak,dc=org");
String cn = (String) attrs.get("cn").get();
String sn = (String) attrs.get("sn").get();
return cn + " " + sn;
} finally {
ctx.close();
}
}
protected Response spnegoLogin(String username, String password) {
String kcLoginPageLocation = oauth.getLoginFormUrl();
// Request for SPNEGO login sent with Resteasy client
spnegoSchemeFactory.setCredentials(username, password);
Response response = client.target(kcLoginPageLocation).request().get();
if (response.getStatus() == 302) {
if (response.getLocation() == null)
return response;
String uri = response.getLocation().toString();
if (uri.contains("login-actions/required-action")) {
response = client.target(uri).request().get();
}
}
return response;
}
protected void initHttpClient(boolean useSpnego) {
if (client != null) {
cleanupApacheHttpClient();
}
DefaultHttpClient httpClient = (DefaultHttpClient) new HttpClientBuilder()
.disableCookieCache(false)
.build();
httpClient.getAuthSchemes().register(AuthPolicy.SPNEGO, spnegoSchemeFactory);
if (useSpnego) {
Credentials fake = new Credentials() {
public String getPassword() {
return null;
}
public Principal getUserPrincipal() {
return null;
}
};
httpClient.getCredentialsProvider().setCredentials(
new AuthScope(null, -1, null),
fake);
}
ApacheHttpClient4Engine engine = new ApacheHttpClient4Engine(httpClient);
client = new ResteasyClientBuilder().httpEngine(engine).build();
}
protected void removeAllUsers() {
RealmResource realm = testRealmResource();
List<UserRepresentation> users = realm.users().search("", 0, Integer.MAX_VALUE);
for (UserRepresentation user : users) {
if (!user.getUsername().equals(AssertEvents.DEFAULT_USERNAME)) {
realm.users().get(user.getId()).remove();
}
}
Assert.assertEquals(1, realm.users().search("", 0, Integer.MAX_VALUE).size());
}
protected void assertUser(String expectedUsername, String expectedEmail, String expectedFirstname,
String expectedLastname, boolean updateProfileActionExpected) {
try {
UserRepresentation user = ApiUtil.findUserByUsername(testRealmResource(), expectedUsername);
Assert.assertNotNull(user);
Assert.assertEquals(expectedEmail, user.getEmail());
Assert.assertEquals(expectedFirstname, user.getFirstName());
Assert.assertEquals(expectedLastname, user.getLastName());
if (updateProfileActionExpected) {
Assert.assertEquals(UserModel.RequiredAction.UPDATE_PROFILE.toString(),
user.getRequiredActions().iterator().next());
} else {
Assert.assertTrue(user.getRequiredActions().isEmpty());
}
} finally {
}
}
protected OAuthClient.AccessTokenResponse assertAuthenticationSuccess(String codeUrl) throws Exception {
List<NameValuePair> pairs = URLEncodedUtils.parse(new URI(codeUrl), "UTF-8");
String code = null;
String state = null;
for (NameValuePair pair : pairs) {
if (pair.getName().equals(OAuth2Constants.CODE)) {
code = pair.getValue();
} else if (pair.getName().equals(OAuth2Constants.STATE)) {
state = pair.getValue();
}
}
Assert.assertNotNull(code);
Assert.assertNotNull(state);
OAuthClient.AccessTokenResponse response = oauth.doAccessTokenRequest(code, "password");
Assert.assertNotNull(response.getAccessToken());
events.clear();
return response;
}
protected void updateProviderEditMode(UserStorageProvider.EditMode editMode) {
List<ComponentRepresentation> reps = testRealmResource().components().query("test", UserStorageProvider.class.getName());
Assert.assertEquals(1, reps.size());
ComponentRepresentation kerberosProvider = reps.get(0);
kerberosProvider.getConfig().putSingle(LDAPConstants.EDIT_MODE, editMode.toString());
testRealmResource().components().component(kerberosProvider.getId()).update(kerberosProvider);
}
public RealmResource testRealmResource() {
return adminClient.realm("test");
}
// TODO: Use LDAPTestUtils.toComponentConfig once it's migrated to new testsuite
public static MultivaluedHashMap<String, String> toComponentConfig(Map<String, String> ldapConfig) {
MultivaluedHashMap<String, String> config = new MultivaluedHashMap<>();
for (Map.Entry<String, String> entry : ldapConfig.entrySet()) {
config.add(entry.getKey(), entry.getValue());
}
return config;
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) frentix GmbH<br>
* http://www.frentix.com<br>
* <p>
*/
package org.olat.presentation.framework.core.components.form.flexible.impl.elements;
/* TODO: ORID-1007 'File' */
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.log4j.Logger;
import org.olat.data.commons.fileutil.FileUtils;
import org.olat.data.commons.vfs.LocalFolderImpl;
import org.olat.data.commons.vfs.VFSContainer;
import org.olat.data.commons.vfs.VFSItem;
import org.olat.data.commons.vfs.VFSLeaf;
import org.olat.data.commons.vfs.VFSManager;
import org.olat.lms.commons.validation.ValidationStatus;
import org.olat.lms.commons.validation.ValidationStatusImpl;
import org.olat.presentation.framework.core.UserRequest;
import org.olat.presentation.framework.core.components.Component;
import org.olat.presentation.framework.core.components.form.flexible.elements.FileElement;
import org.olat.presentation.framework.core.components.form.flexible.impl.Form;
import org.olat.presentation.framework.core.components.form.flexible.impl.FormItemImpl;
import org.olat.presentation.framework.core.control.Disposable;
import org.olat.system.commons.CodeHelper;
import org.olat.system.commons.WebappHelper;
import org.olat.system.logging.log4j.LoggerHelper;
import com.oreilly.servlet.multipart.DefaultFileRenamePolicy;
import com.oreilly.servlet.multipart.FileRenamePolicy;
/**
* <h3>Description:</h3>
* <p>
* Implementation of the file element. See the interface for more documentation.
* <p>
* The class implements the disposable interface to cleanup temporary files on form disposal.
* <p>
* Initial Date: 08.12.2008 <br>
*
* @author Florian Gnaegi, frentix GmbH, http://www.frentix.com
*/
public class FileElementImpl extends FormItemImpl implements FileElement, Disposable {
private static final Logger log = LoggerHelper.getLogger();
protected FileElementComponent component;
//
private File initialFile, tempUploadFile;
private Set<String> mimeTypes;
private int maxUploadSizeKB = UPLOAD_UNLIMITED;
private String uploadFilename;
private String uploadMimeType;
//
private boolean checkForMaxFileSize = false;
private boolean checkForMimeTypes = false;
// error keys
private String i18nErrMandatory;
private String i18nErrMaxSize;
private String i18nErrMimeType;
private String[] i18nErrMaxSizeArgs;
private String[] i18nErrMimeTypeArgs;
/**
* Constructor for a file element. Use the limitToMimeType and setter methods to configure the element
*
* @param name
*/
public FileElementImpl(String name) {
super(name);
this.component = new FileElementComponent(this);
}
/**
*/
@Override
public void evalFormRequest(UserRequest ureq) {
Set<String> keys = getRootForm().getRequestMultipartFilesSet();
if (keys.size() > 0 && keys.contains(component.getFormDispatchId())) {
// Remove old files first
if (tempUploadFile != null && tempUploadFile.exists()) {
tempUploadFile.delete();
}
// Move file from a temporary request scope location to a location
// with a
// temporary form item scope. The file must be moved later using the
// moveUploadFileTo() method to the final destination.
tempUploadFile = new File(WebappHelper.getUserDataRoot() + "/tmp/" + CodeHelper.getGlobalForeverUniqueID());
File tmpRequestFile = getRootForm().getRequestMultipartFile(component.getFormDispatchId());
// Move file to internal temp location
boolean success = tmpRequestFile.renameTo(tempUploadFile);
if (!success) {
// try to move file by copying it, command above might fail
// when source and target are on different volumes
FileUtils.copyFileToFile(tmpRequestFile, tempUploadFile, true);
}
uploadFilename = getRootForm().getRequestMultipartFileName(component.getFormDispatchId());
uploadMimeType = getRootForm().getRequestMultipartFileMimeType(component.getFormDispatchId());
if (uploadMimeType == null) {
// use fallback: mime-type form file name
uploadMimeType = WebappHelper.getMimeType(uploadFilename);
}
// Mark associated component dirty, that it gets rerendered
component.setDirty(true);
}
}
/**
*/
@Override
protected Component getFormItemComponent() {
return this.component;
}
/**
*/
@Override
public void reset() {
if (tempUploadFile != null && tempUploadFile.exists()) {
tempUploadFile.delete();
tempUploadFile = null;
}
uploadFilename = null;
uploadMimeType = null;
}
/**
*/
@Override
protected void rootFormAvailable() {
//
}
/**
*/
@Override
public void setMandatory(boolean mandatory, String i18nErrKey) {
super.setMandatory(mandatory);
this.i18nErrMandatory = i18nErrKey;
}
/**
*/
@Override
public void validate(List validationResults) {
int lastFormError = getRootForm().getLastRequestError();
if (lastFormError == Form.REQUEST_ERROR_UPLOAD_LIMIT_EXCEEDED) {
// check if total upload limit is exceeded (e.g. sum of files)
setErrorKey(i18nErrMaxSize, i18nErrMaxSizeArgs);
validationResults.add(new ValidationStatusImpl(ValidationStatus.ERROR));
return;
// check for a general error
} else if (lastFormError == Form.REQUEST_ERROR_GENERAL) {
setErrorKey("file.element.error.general", null);
validationResults.add(new ValidationStatusImpl(ValidationStatus.ERROR));
return;
// check if uploaded at all
} else if (isMandatory()
&& ((initialFile == null && (tempUploadFile == null || !tempUploadFile.exists())) || (initialFile != null && tempUploadFile != null && !tempUploadFile
.exists()))) {
setErrorKey(i18nErrMandatory, null);
validationResults.add(new ValidationStatusImpl(ValidationStatus.ERROR));
return;
// check for file size of current file
} else if (checkForMaxFileSize && tempUploadFile != null && tempUploadFile.exists() && tempUploadFile.length() > maxUploadSizeKB * 1024l) {
setErrorKey(i18nErrMaxSize, i18nErrMaxSizeArgs);
validationResults.add(new ValidationStatusImpl(ValidationStatus.ERROR));
return;
// check for mime types
} else if (checkForMimeTypes && tempUploadFile != null && tempUploadFile.exists()) {
boolean found = false;
// Fix problem with upload mimetype: if the mimetype differs from the
// mimetype the webapp helper generates from the file name the match won't work
String mimeFromWebappHelper = WebappHelper.getMimeType(uploadFilename);
if (uploadMimeType != null || mimeFromWebappHelper != null) {
for (String validType : mimeTypes) {
if (validType.equals(uploadMimeType) || validType.equals(mimeFromWebappHelper)) {
// exact match: image/jpg
found = true;
break;
} else if (validType.endsWith("/*")) {
// wildcard match: image/*
if (uploadMimeType != null && uploadMimeType.startsWith(validType.substring(0, validType.length() - 2))) {
found = true;
break;
} else if (mimeFromWebappHelper != null && mimeFromWebappHelper.startsWith(validType.substring(0, validType.length() - 2))) {
// fallback to mime type from filename
found = true;
break;
}
}
}
}
if (!found) {
setErrorKey(i18nErrMimeType, i18nErrMimeTypeArgs);
validationResults.add(new ValidationStatusImpl(ValidationStatus.ERROR));
return;
}
}
// No error, clear errors from previous attempts
clearError();
}
/**
*/
@Override
public void setInitialFile(File initialFile) {
this.initialFile = initialFile;
}
/**
*/
@Override
public File getInitialFile() {
return this.initialFile;
}
/**
*/
@Override
public void limitToMimeType(Set<String> mimeTypes, String i18nErrKey, String[] i18nArgs) {
this.mimeTypes = mimeTypes;
this.checkForMimeTypes = true;
this.i18nErrMimeType = i18nErrKey;
this.i18nErrMimeTypeArgs = i18nArgs;
}
/**
*/
@Override
public Set<String> getMimeTypeLimitations() {
if (mimeTypes == null)
mimeTypes = new HashSet<String>();
return mimeTypes;
}
/**
*/
@Override
public void setMaxUploadSizeKB(int maxUploadSizeKB, String i18nErrKey, String[] i18nArgs) {
this.maxUploadSizeKB = maxUploadSizeKB;
this.checkForMaxFileSize = (maxUploadSizeKB == UPLOAD_UNLIMITED ? false : true);
this.i18nErrMaxSize = i18nErrKey;
this.i18nErrMaxSizeArgs = i18nArgs;
}
/**
*/
@Override
public int getMaxUploadSizeKB() {
return this.maxUploadSizeKB;
}
/**
*/
@Override
public boolean isUploadSuccess() {
if (tempUploadFile != null && tempUploadFile.exists()) {
return true;
}
return false;
}
/**
*/
@Override
public String getUploadFileName() {
return this.uploadFilename;
}
/**
*/
@Override
public String getUploadMimeType() {
return this.uploadMimeType;
}
/**
*/
@Override
public File getUploadFile() {
return this.tempUploadFile;
}
/**
*/
@Override
public InputStream getUploadInputStream() {
if (this.tempUploadFile == null)
return null;
try {
return new FileInputStream(this.tempUploadFile);
} catch (FileNotFoundException e) {
log.error("Could not open stream for file element::" + getName(), e);
}
return null;
}
/**
*/
@Override
public long getUploadSize() {
if (tempUploadFile != null && tempUploadFile.exists()) {
return tempUploadFile.length();
} else if (initialFile != null && initialFile.exists()) {
return initialFile.length();
} else {
return 0;
}
}
/**
*/
@Override
/* TODO: ORID-1007 'File' this method should be supporting service */
public File moveUploadFileTo(File destinationDir) {
if (tempUploadFile != null && tempUploadFile.exists()) {
destinationDir.mkdirs();
// Check if such a file does already exist, if yes rename new file
File existsFile = new File(destinationDir, uploadFilename);
if (existsFile.exists()) {
// Use standard rename policy
FileRenamePolicy frp = new DefaultFileRenamePolicy();
File tmpF = new File(uploadFilename);
uploadFilename = frp.rename(tmpF).getName();
}
// Move file now
File targetFile = new File(destinationDir, uploadFilename);
if (FileUtils.copyFileToFile(tempUploadFile, targetFile, true)) {
return targetFile;
}
}
return null;
}
/**
*/
@Override
public VFSLeaf moveUploadFileTo(VFSContainer destinationContainer) {
VFSLeaf targetLeaf = null;
if (tempUploadFile != null && tempUploadFile.exists()) {
// Check if such a file does already exist, if yes rename new file
VFSItem existsChild = destinationContainer.resolve(uploadFilename);
if (existsChild != null) {
// Use standard rename policy
FileRenamePolicy frp = new DefaultFileRenamePolicy();
File tmpF = new File(uploadFilename);
uploadFilename = frp.rename(tmpF).getName();
if (log.isDebugEnabled()) {
log.debug("FileElement rename policy::" + tmpF.getName() + " -> " + uploadFilename);
}
}
// Create target leaf file now and delete original temp file
if (destinationContainer instanceof LocalFolderImpl) {
// Optimize for local files (don't copy, move instead)
LocalFolderImpl folderContainer = (LocalFolderImpl) destinationContainer;
File destinationDir = folderContainer.getBasefile();
File targetFile = new File(destinationDir, uploadFilename);
if (FileUtils.copyFileToFile(tempUploadFile, targetFile, true)) {
targetLeaf = (VFSLeaf) destinationContainer.resolve(targetFile.getName());
if (targetLeaf == null) {
log.error("Error after copying content from temp file, cannot resolve copied file::" + (tempUploadFile == null ? "NULL" : tempUploadFile) + " - "
+ (targetFile == null ? "NULL" : targetFile), null);
}
} else {
log.error("Error after copying content from temp file, cannot copy file::" + (tempUploadFile == null ? "NULL" : tempUploadFile) + " - "
+ (targetFile == null ? "NULL" : targetFile), null);
}
} else {
// Copy stream in case the destination is a non-local container
VFSLeaf leaf = destinationContainer.createChildLeaf(uploadFilename);
boolean success = false;
try {
success = VFSManager.copyContent(new FileInputStream(tempUploadFile), leaf);
} catch (FileNotFoundException e) {
log.error("Error while copying content from temp file::" + (tempUploadFile == null ? "NULL" : tempUploadFile.getAbsolutePath()), e);
}
if (success) {
// Delete original temp file after copy to simulate move behavior
tempUploadFile.delete();
targetLeaf = leaf;
}
}
} else if (log.isDebugEnabled()) {
log.debug("Error while copying content from temp file, no temp file::" + (tempUploadFile == null ? "NULL" : tempUploadFile.getAbsolutePath()));
}
return targetLeaf;
}
/**
*/
@Override
public void dispose() {
if (tempUploadFile != null && tempUploadFile.exists()) {
tempUploadFile.delete();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.update;
import com.google.common.collect.Maps;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.WriteConsistencyLevel;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.replication.ReplicationType;
import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.VersionType;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.action.ValidateActions.addValidationError;
/**
*/
public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest> {
private String type;
private String id;
@Nullable
private String routing;
@Nullable
String script;
@Nullable
String scriptLang;
@Nullable
Map<String, Object> scriptParams;
private String[] fields;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
private int retryOnConflict = 0;
private boolean refresh = false;
private ReplicationType replicationType = ReplicationType.DEFAULT;
private WriteConsistencyLevel consistencyLevel = WriteConsistencyLevel.DEFAULT;
private IndexRequest upsertRequest;
private boolean docAsUpsert = false;
@Nullable
private IndexRequest doc;
public UpdateRequest() {
}
public UpdateRequest(String index, String type, String id) {
this.index = index;
this.type = type;
this.id = id;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();
if (type == null) {
validationException = addValidationError("type is missing", validationException);
}
if (id == null) {
validationException = addValidationError("id is missing", validationException);
}
if (version != Versions.MATCH_ANY && retryOnConflict > 0) {
validationException = addValidationError("can't provide both retry_on_conflict and a specific version", validationException);
}
if (!versionType.validateVersion(version)) {
validationException = addValidationError("illegal version value [" + version + "] for version type ["+ versionType.name() + "]", validationException);
}
if (script == null && doc == null) {
validationException = addValidationError("script or doc is missing", validationException);
}
if (script != null && doc != null) {
validationException = addValidationError("can't provide both script and doc", validationException);
}
if (doc == null && docAsUpsert) {
validationException = addValidationError("doc must be specified if doc_as_upsert is enabled", validationException);
}
return validationException;
}
/**
* The type of the indexed document.
*/
public String type() {
return type;
}
/**
* Sets the type of the indexed document.
*/
public UpdateRequest type(String type) {
this.type = type;
return this;
}
/**
* The id of the indexed document.
*/
public String id() {
return id;
}
/**
* Sets the id of the indexed document.
*/
public UpdateRequest id(String id) {
this.id = id;
return this;
}
/**
* Controls the shard routing of the request. Using this value to hash the shard
* and not the id.
*/
public UpdateRequest routing(String routing) {
if (routing != null && routing.length() == 0) {
this.routing = null;
} else {
this.routing = routing;
}
return this;
}
/**
* Sets the parent id of this document. Will simply set the routing to this value, as it is only
* used for routing with delete requests.
*/
public UpdateRequest parent(String parent) {
if (routing == null) {
routing = parent;
}
return this;
}
/**
* Controls the shard routing of the request. Using this value to hash the shard
* and not the id.
*/
public String routing() {
return this.routing;
}
int shardId() {
return this.shardId;
}
public String script() {
return this.script;
}
public Map<String, Object> scriptParams() {
return this.scriptParams;
}
/**
* The script to execute. Note, make sure not to send different script each times and instead
* use script params if possible with the same (automatically compiled) script.
*/
public UpdateRequest script(String script) {
this.script = script;
return this;
}
/**
* The language of the script to execute.
*/
public UpdateRequest scriptLang(String scriptLang) {
this.scriptLang = scriptLang;
return this;
}
public String scriptLang() {
return scriptLang;
}
/**
* Add a script parameter.
*/
public UpdateRequest addScriptParam(String name, Object value) {
if (scriptParams == null) {
scriptParams = Maps.newHashMap();
}
scriptParams.put(name, value);
return this;
}
/**
* Sets the script parameters to use with the script.
*/
public UpdateRequest scriptParams(Map<String, Object> scriptParams) {
if (this.scriptParams == null) {
this.scriptParams = scriptParams;
} else {
this.scriptParams.putAll(scriptParams);
}
return this;
}
/**
* The script to execute. Note, make sure not to send different script each times and instead
* use script params if possible with the same (automatically compiled) script.
*/
public UpdateRequest script(String script, @Nullable Map<String, Object> scriptParams) {
this.script = script;
if (this.scriptParams != null) {
this.scriptParams.putAll(scriptParams);
} else {
this.scriptParams = scriptParams;
}
return this;
}
/**
* The script to execute. Note, make sure not to send different script each times and instead
* use script params if possible with the same (automatically compiled) script.
*
* @param script The script to execute
* @param scriptLang The script language
* @param scriptParams The script parameters
*/
public UpdateRequest script(String script, @Nullable String scriptLang, @Nullable Map<String, Object> scriptParams) {
this.script = script;
this.scriptLang = scriptLang;
if (this.scriptParams != null) {
this.scriptParams.putAll(scriptParams);
} else {
this.scriptParams = scriptParams;
}
return this;
}
/**
* Explicitly specify the fields that will be returned. By default, nothing is returned.
*/
public UpdateRequest fields(String... fields) {
this.fields = fields;
return this;
}
/**
* Get the fields to be returned.
*/
public String[] fields() {
return this.fields;
}
/**
* Sets the number of retries of a version conflict occurs because the document was updated between
* getting it and updating it. Defaults to 0.
*/
public UpdateRequest retryOnConflict(int retryOnConflict) {
this.retryOnConflict = retryOnConflict;
return this;
}
public int retryOnConflict() {
return this.retryOnConflict;
}
/**
* Sets the version, which will cause the index operation to only be performed if a matching
* version exists and no changes happened on the doc since then.
*/
public UpdateRequest version(long version) {
this.version = version;
return this;
}
public long version() {
return this.version;
}
/**
* Sets the versioning type. Defaults to {@link VersionType#INTERNAL}.
*/
public UpdateRequest versionType(VersionType versionType) {
this.versionType = versionType;
return this;
}
public VersionType versionType() {
return this.versionType;
}
/**
* Should a refresh be executed post this update operation causing the operation to
* be searchable. Note, heavy indexing should not set this to <tt>true</tt>. Defaults
* to <tt>false</tt>.
*/
public UpdateRequest refresh(boolean refresh) {
this.refresh = refresh;
return this;
}
public boolean refresh() {
return this.refresh;
}
/**
* The replication type.
*/
public ReplicationType replicationType() {
return this.replicationType;
}
/**
* Sets the replication type.
*/
public UpdateRequest replicationType(ReplicationType replicationType) {
this.replicationType = replicationType;
return this;
}
public WriteConsistencyLevel consistencyLevel() {
return this.consistencyLevel;
}
/**
* Sets the consistency level of write. Defaults to {@link org.elasticsearch.action.WriteConsistencyLevel#DEFAULT}
*/
public UpdateRequest consistencyLevel(WriteConsistencyLevel consistencyLevel) {
this.consistencyLevel = consistencyLevel;
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(IndexRequest doc) {
this.doc = doc;
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(XContentBuilder source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(Map source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(Map source, XContentType contentType) {
safeDoc().source(source, contentType);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(String source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(byte[] source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(byte[] source, int offset, int length) {
safeDoc().source(source, offset, length);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified, the doc provided
* is a field and value pairs.
*/
public UpdateRequest doc(Object... source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(String field, Object value) {
safeDoc().source(field, value);
return this;
}
public IndexRequest doc() {
return this.doc;
}
private IndexRequest safeDoc() {
if (doc == null) {
doc = new IndexRequest();
}
return doc;
}
/**
* Sets the index request to be used if the document does not exists. Otherwise, a {@link org.elasticsearch.index.engine.DocumentMissingException}
* is thrown.
*/
public UpdateRequest upsert(IndexRequest upsertRequest) {
this.upsertRequest = upsertRequest;
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(XContentBuilder source) {
safeUpsertRequest().source(source);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(Map source) {
safeUpsertRequest().source(source);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(Map source, XContentType contentType) {
safeUpsertRequest().source(source, contentType);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(String source) {
safeUpsertRequest().source(source);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(byte[] source) {
safeUpsertRequest().source(source);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(byte[] source, int offset, int length) {
safeUpsertRequest().source(source, offset, length);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists. The doc
* includes field and value pairs.
*/
public UpdateRequest upsert(Object... source) {
safeUpsertRequest().source(source);
return this;
}
public IndexRequest upsertRequest() {
return this.upsertRequest;
}
private IndexRequest safeUpsertRequest() {
if (upsertRequest == null) {
upsertRequest = new IndexRequest();
}
return upsertRequest;
}
public UpdateRequest source(XContentBuilder source) throws Exception {
return source(source.bytes());
}
public UpdateRequest source(byte[] source) throws Exception {
return source(source, 0, source.length);
}
public UpdateRequest source(byte[] source, int offset, int length) throws Exception {
return source(new BytesArray(source, offset, length));
}
public UpdateRequest source(BytesReference source) throws Exception {
XContentType xContentType = XContentFactory.xContentType(source);
XContentParser parser = XContentFactory.xContent(xContentType).createParser(source);
try {
XContentParser.Token t = parser.nextToken();
if (t == null) {
return this;
}
String currentFieldName = null;
while ((t = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (t == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if ("script".equals(currentFieldName)) {
script = parser.textOrNull();
} else if ("params".equals(currentFieldName)) {
scriptParams = parser.map();
} else if ("lang".equals(currentFieldName)) {
scriptLang = parser.text();
} else if ("upsert".equals(currentFieldName)) {
XContentBuilder builder = XContentFactory.contentBuilder(xContentType);
builder.copyCurrentStructure(parser);
safeUpsertRequest().source(builder);
} else if ("doc".equals(currentFieldName)) {
XContentBuilder docBuilder = XContentFactory.contentBuilder(xContentType);
docBuilder.copyCurrentStructure(parser);
safeDoc().source(docBuilder);
} else if ("doc_as_upsert".equals(currentFieldName)) {
docAsUpsert(parser.booleanValue());
}
}
} finally {
parser.close();
}
return this;
}
public boolean docAsUpsert() {
return this.docAsUpsert;
}
public void docAsUpsert(boolean shouldUpsertDoc) {
this.docAsUpsert = shouldUpsertDoc;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
replicationType = ReplicationType.fromId(in.readByte());
consistencyLevel = WriteConsistencyLevel.fromId(in.readByte());
type = in.readSharedString();
id = in.readString();
routing = in.readOptionalString();
script = in.readOptionalString();
scriptLang = in.readOptionalString();
scriptParams = in.readMap();
retryOnConflict = in.readVInt();
refresh = in.readBoolean();
if (in.readBoolean()) {
doc = new IndexRequest();
doc.readFrom(in);
}
int size = in.readInt();
if (size >= 0) {
fields = new String[size];
for (int i = 0; i < size; i++) {
fields[i] = in.readString();
}
}
if (in.readBoolean()) {
upsertRequest = new IndexRequest();
upsertRequest.readFrom(in);
}
docAsUpsert = in.readBoolean();
version = in.readLong();
versionType = VersionType.fromValue(in.readByte());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeByte(replicationType.id());
out.writeByte(consistencyLevel.id());
out.writeSharedString(type);
out.writeString(id);
out.writeOptionalString(routing);
out.writeOptionalString(script);
out.writeOptionalString(scriptLang);
out.writeMap(scriptParams);
out.writeVInt(retryOnConflict);
out.writeBoolean(refresh);
if (doc == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
// make sure the basics are set
doc.index(index);
doc.type(type);
doc.id(id);
doc.writeTo(out);
}
if (fields == null) {
out.writeInt(-1);
} else {
out.writeInt(fields.length);
for (String field : fields) {
out.writeString(field);
}
}
if (upsertRequest == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
// make sure the basics are set
upsertRequest.index(index);
upsertRequest.type(type);
upsertRequest.id(id);
upsertRequest.writeTo(out);
}
out.writeBoolean(docAsUpsert);
out.writeLong(version);
out.writeByte(versionType.getValue());
}
}
| |
/*
* Copyright 2016 Richard Cartwright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Log: WeakReference.java,v $
* Revision 1.10 2011/10/05 17:14:25 vizigoth
* Added support for application metadata plugins, package markers and dynamic metadictionary extraction from AAF files.
*
* Revision 1.9 2011/02/14 22:32:49 vizigoth
* First commit after major sourceforge outage.
*
* Revision 1.8 2011/01/13 17:44:26 vizigoth
* Major refactor of the industrial area and improved front-end documentation.
*
* Revision 1.7 2011/01/04 10:39:02 vizigoth
* Refactor all package names to simpler forms more consistent with typical Java usage.
*
* Revision 1.6 2010/04/13 10:11:03 vizigoth
* Added a TODO on improving exception handling.
*
* Revision 1.5 2010/04/13 07:22:08 vizigoth
* Using Long rather than long for persistent IDs and setting to null as a default.
*
* Revision 1.4 2010/03/19 10:01:52 vizigoth
* Added support for lazy evaluation of weak references when read from a stream.
*
* Revision 1.3 2010/03/01 15:17:22 vizigoth
* Added a generic table for weak reference resolution. Helps with auto generated weak reference targets.
*
* Revision 1.2 2009/12/18 17:56:00 vizigoth
* Interim check in to help with some training activities. Early support for reading Preface objects from MXF files.
*
*
*/
package tv.amwa.maj.industry;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import tv.amwa.maj.io.xml.XMLBuilder;
import tv.amwa.maj.meta.ClassDefinition;
import tv.amwa.maj.meta.TypeDefinitionWeakObjectReference;
import tv.amwa.maj.model.DefinitionObject;
import tv.amwa.maj.record.AUID;
import tv.amwa.maj.record.impl.AUIDImpl;
/**
* <p>Represents and provides a means to resolve a weak references that are stored
* as a field of an implementing class. Weak references can be safely persisted
* through object relational mapping and will resolve themselves at runtime as required.</p>
*
*
*
* @param <T> Type of the target of the weak reference.
*
* @see WeakReferenceTarget
* @see TypeDefinitionWeakObjectReference
* @see WeakReferenceSet
* @see WeakReferenceVector
*/
public class WeakReference<T extends WeakReferenceTarget>
implements MediaEntity,
Cloneable {
private static Map<AUID, WeakReferenceTarget> genericTable =
Collections.synchronizedMap(new HashMap<AUID, WeakReferenceTarget>());
private AUID identifier;
private String canonicalTypeName;
private long persistentID = 0l;
@SuppressWarnings("unused")
private int persistentIndex = 0;
private T cachedValue = null;
public WeakReference() { }
public WeakReference(
T target) {
setTarget(target);
}
public WeakReference(
Class<T> targetType,
AUID targetIdentifier) {
this.canonicalTypeName = targetType.getCanonicalName();
this.identifier = targetIdentifier.clone();
}
@SuppressWarnings("unchecked")
public T getTarget() { // TODO improve exception handling
if (cachedValue != null)
return cachedValue;
if (canonicalTypeName.equals("tv.amwa.maj.meta.impl.TypeDefinitionImpl")) {
cachedValue = (T) Warehouse.lookForType(identifier);
if (cachedValue != null) return cachedValue;
}
ClassDefinition targetType = Warehouse.lookForClass(canonicalTypeName);
try {
Method staticResolver = targetType.getJavaImplementation().getMethod(
"forAUID", AUID.class);
cachedValue = (T) staticResolver.invoke(null, identifier);
if (cachedValue != null) return cachedValue;
}
catch (Exception e) {
// Method is not found ... try the generic resolver
}
try {
return (T) genericTable.get(identifier);
}
catch (ClassCastException cce) {
return null;
}
}
public void setTarget(
T target) {
this.identifier = target.getAUID();
canonicalTypeName = target.getClass().getCanonicalName();
cachedValue = target;
}
public Long getPersistentID() {
return persistentID;
}
public void setPersistentIndex(
int index) {
this.persistentIndex = index;
}
@SuppressWarnings("unchecked")
public boolean equals(
Object o) {
if (o == null) return false;
if (o instanceof AUID)
return this.identifier.equals(o);
if (o instanceof WeakReferenceTarget)
return this.identifier.equals(((WeakReferenceTarget) o).getAUID());
if (o instanceof WeakReference)
return getTarget().equals(((WeakReference<T>) o).getTarget());
return false;
}
public int hashCode() {
return identifier.hashCode();
}
public String toString() {
return getTarget().toString();
}
@SuppressWarnings("unchecked")
public WeakReference<T> clone() {
try {
return (WeakReference<T>) super.clone();
}
catch (CloneNotSupportedException cnse) {
// Should never get here
throw new InternalError(cnse.getMessage());
}
}
public final static void registerTarget(
WeakReferenceTarget target)
throws NullPointerException {
if (target == null)
throw new NullPointerException("Cannot register a weak reference target using a null value.");
try {
genericTable.put(target.getAUID(), target);
if (target instanceof DefinitionObject)
Warehouse.register((DefinitionObject) target);
}
catch (NullPointerException npe) { /* Assume still initializing. */ }
}
public final static void forgetTarget(
WeakReferenceTarget target) {
if (target == null) return;
genericTable.remove(target.getAUID());
}
public final static void generateWeakReferenceORM(
Node parent,
String namespace,
String prefix) {
Element entity = XMLBuilder.createChild(parent, namespace, prefix, "entity");
XMLBuilder.setAttribute(entity, namespace, prefix, "class", WeakReference.class.getCanonicalName());
XMLBuilder.setAttribute(entity, namespace, prefix, "access", "FIELD");
Element attributes = XMLBuilder.createChild(entity, namespace, prefix, "attributes");
Element identifier = XMLBuilder.createChild(attributes, namespace, prefix, "embeddable");
XMLBuilder.setAttribute(identifier, namespace, prefix, "name", "identifier");
Element identifierOverride = XMLBuilder.createChild(identifier, namespace, prefix, "attribute-override");
XMLBuilder.setAttribute(identifierOverride, namespace, prefix, "name", "auidValue");
Element identifierColumn = XMLBuilder.createChild(identifierOverride, namespace, prefix, "column");
XMLBuilder.setAttribute(identifierColumn, namespace, prefix, "name", "Identifier");
XMLBuilder.setAttribute(identifierColumn, namespace, prefix, "nullable", "false");
Element canonicalTypeName = XMLBuilder.createChild(attributes, namespace, prefix, "basic");
XMLBuilder.setAttribute(canonicalTypeName, namespace, prefix, "name", "canonicalTypeName");
Element typeNameColumn = XMLBuilder.createChild(canonicalTypeName, namespace, prefix, "column");
XMLBuilder.setAttribute(typeNameColumn, namespace, prefix, "name", "TypeName");
XMLBuilder.setAttribute(typeNameColumn, namespace, prefix, "nullable", "false");
Element persistentID = XMLBuilder.createChild(attributes, namespace, prefix, "id");
XMLBuilder.setAttribute(persistentID, namespace, prefix, "name", "persistentID");
XMLBuilder.createChild(persistentID, namespace, prefix, "generated-id");
Element persistentIDColumn = XMLBuilder.createChild(persistentID, namespace, prefix, "column");
XMLBuilder.setAttribute(persistentIDColumn, namespace, prefix, "name", "PersistentID");
Element persitentIndex = XMLBuilder.createChild(attributes, namespace, prefix, "basic");
XMLBuilder.setAttribute(persitentIndex, namespace, prefix, "name", "persistentIndex");
Element indexColumn = XMLBuilder.createChild(persitentIndex, namespace, prefix, "column");
XMLBuilder.setAttribute(indexColumn, namespace, prefix, "name", "PersistentIndex");
XMLBuilder.setAttribute(indexColumn, namespace, prefix, "nullable", "false");
Element transientCache = XMLBuilder.createChild(attributes, namespace, prefix, "transient");
XMLBuilder.setAttribute(transientCache, namespace, prefix, "name", "cachedValue");
}
public String getIdentifierString() {
return AUIDImpl.toPersistentForm(identifier);
}
public void setIdentifierString(
String identifier) {
this.identifier = AUIDImpl.fromPersistentForm(identifier);
}
public AUID getIdentifier() {
return identifier.clone();
}
}
| |
/**
* The MIT License (MIT)
*
* Copyright (c) 2011-2016 Incapture Technologies LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package rapture.lock;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.testng.Assert;
import org.testng.Reporter;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import rapture.common.LockHandle;
import rapture.common.RaptureLockConfig;
import rapture.common.RaptureURI;
import rapture.common.Scheme;
import rapture.common.client.HttpAdminApi;
import rapture.common.client.HttpLockApi;
import rapture.common.impl.jackson.MD5Utils;
import rapture.helper.IntegrationTestHelper;
public class LockApiTest {
private IntegrationTestHelper helper;
private HttpLockApi lockApi = null;
private HttpAdminApi admin = null;
private static final String user = "User";
private IntegrationTestHelper helper2;
private HttpLockApi lockApi2 = null;
private RaptureURI repoUri = null;
@BeforeClass(groups = { "nightly", "lock" })
@Parameters({ "RaptureURL", "RaptureUser", "RapturePassword" })
public void setUp(@Optional("http://localhost:8665/rapture") String url, @Optional("rapture") String username, @Optional("rapture") String password) {
helper = new IntegrationTestHelper(url, username, password);
lockApi = helper.getLockApi();
admin = helper.getAdminApi();
if (!admin.doesUserExist(user)) {
admin.addUser(user, "Another User", MD5Utils.hash16(user), "user@incapture.net");
}
helper2 = new IntegrationTestHelper(url, user, user);
lockApi2 = helper2.getLockApi();
repoUri = helper.getRandomAuthority(Scheme.DOCUMENT);
helper.configureTestRepo(repoUri, "MONGODB"); // TODO Make this configurable
}
@AfterClass(groups = { "nightly", "lock" })
public void tearDown() {
helper.cleanAllAssets();
helper2.cleanAllAssets();
}
String winningContent;
String winningPath;
LockHandle lockHandle;
List<String> winningContentList;
// TODO: This test will hang jenkins build, disable for now
@Test(groups = { "nightly", "lock" }, enabled=false)
public void testMultipleRequestsAcquireReleaseLockZookeeper() {
int threadCount = 25;
winningContentList = new ArrayList<>();
winningContent = "";
winningPath = "";
lockHandle = null;
RaptureURI lockUri = RaptureURI.builder(helper.getRandomAuthority(Scheme.DOCUMENT)).docPath("foo/bar" + System.currentTimeMillis()).build();
RaptureLockConfig lockConfig = lockApi.createLockManager(lockUri.toString(), "LOCKING USING ZOOKEEPER {}", "");
Random rand = new Random();
RaptureURI docRepoUri = helper.getRandomAuthority(Scheme.DOCUMENT);
helper.configureTestRepo(docRepoUri, "MONGODB", true);
List<Long> threadList = new ArrayList<>();
class LockThread implements Runnable {
@Override
public void run() {
long currThreadId = Thread.currentThread().getId();
threadList.add(new Long(currThreadId));
try {
Double timeDelay = rand.nextDouble() * 5000;
String content = "{\"key_" + currThreadId + "_" + rand.nextInt(5000) + "\" : \"" + currThreadId + "\" }";
Reporter.log("Thread id " + currThreadId + " waiting for " + timeDelay.longValue() + " ms.", true);
Thread.sleep(timeDelay.longValue());
lockHandle = lockApi.acquireLock(lockUri.toString(), lockConfig.getName(), 2, 10);
if (lockHandle != null) {
winningPath = docRepoUri + "doc" + currThreadId;
helper.getDocApi().putDoc(winningPath, content);
winningContentList.add(content);
Reporter.log("Thread id " + currThreadId + " acquired lock and wrote doc: " + content + " to " + winningPath, true);
Thread.sleep(1000);
Assert.assertTrue(lockApi.releaseLock(lockUri.toString(), lockConfig.getName(), lockHandle));
} else {
Reporter.log("Thread id " + currThreadId + " did not acquire lock.", true);
}
} catch (Exception e) {
Reporter.log("Exception with thread " + currThreadId + ", " + e.getMessage(), true);
}
threadList.remove(new Long(currThreadId));
}
}
Reporter.log("Running test with " + threadCount + " threads", true);
for (int i = 0; i < threadCount; i++)
new Thread(new LockThread()).start();
while (threadList.size() > 0) {
try {
Thread.sleep(500);
if (helper.getDocApi().getDoc(winningPath) != null) {
Assert.assertTrue(winningContentList.contains(helper.getDocApi().getDoc(winningPath)));
}
} catch (Exception e) {
}
}
}
// TODO: This test will hang jenkins build, disable for now
@Test(groups = { "nightly", "lock" }, dataProvider = "threadScenarios", enabled=false)
public void testOneThreadBlockingMultipleRequestsZookeeper(Integer threadCount) {
winningContent = "";
winningPath = "";
lockHandle = null;
RaptureURI lockUri = RaptureURI.builder(helper.getRandomAuthority(Scheme.DOCUMENT)).docPath("foo/bar" + System.currentTimeMillis()).build();
RaptureLockConfig lockConfig = lockApi.createLockManager(lockUri.toString(), "LOCKING USING ZOOKEEPER {}", "");
Random rand = new Random();
RaptureURI docRepoUri = helper.getRandomAuthority(Scheme.DOCUMENT);
helper.configureTestRepo(docRepoUri, "MONGODB", false);
List<Long> threadList = new ArrayList<>();
class LockThread implements Runnable {
@Override
public void run() {
long currThreadId = Thread.currentThread().getId();
threadList.add(new Long(currThreadId));
try {
Double timeDelay = rand.nextDouble() * 5000;
String content = "{\"key_" + currThreadId + "_" + rand.nextInt(5000) + "\" : \"" + currThreadId + "\" }";
Reporter.log("Thread id " + currThreadId + " waiting for " + timeDelay.longValue() + " ms.", true);
Thread.sleep(timeDelay.longValue());
lockHandle = lockApi.acquireLock(lockUri.toString(), lockConfig.getName(), 2, 10);
if (lockHandle != null) {
winningPath = docRepoUri + "doc" + currThreadId;
helper.getDocApi().putDoc(winningPath, content);
Reporter.log("Thread id " + currThreadId + " acquired lock and wrote doc: " + content + " to " + winningPath, true);
winningContent = content;
} else {
Reporter.log("Thread id " + currThreadId + " did not acquire lock.", true);
}
} catch (Exception e) {
Reporter.log("Exception with thread " + currThreadId + ", " + e.getMessage(), true);
}
threadList.remove(new Long(currThreadId));
}
}
Reporter.log("Running test with " + threadCount + " threads", true);
for (int i = 0; i < threadCount; i++)
new Thread(new LockThread()).start();
while (threadList.size() > 0) {
try {
Thread.sleep(97);
} catch (Exception e) {
}
}
Assert.assertEquals(helper.getDocApi().getDoc(winningPath), winningContent);
try {
Assert.assertTrue(lockApi.releaseLock(lockUri.toString(), lockConfig.getName(), lockHandle));
} catch (Exception e) {
// Possible timing/race condition can cause the lock to have been unlocked already?
Reporter.log("Exception releasing log; possible timing issue", true);
}
}
@Test(groups = { "nightly", "lock" })
public void testZookeeperLock() throws InterruptedException {
// Player 1 acquires a lock
RaptureURI lockUri = RaptureURI.builder(helper.getRandomAuthority(Scheme.DOCUMENT)).docPath("foo/bar" + System.currentTimeMillis()).build();
RaptureLockConfig lockConfig = lockApi.createLockManager(lockUri.toString(), "LOCKING USING ZOOKEEPER {}", "");
assertNotNull(lockConfig);
LockHandle lockHandle = lockApi.acquireLock(lockUri.toString(), lockConfig.getName(), 1, 60);
assertNotNull(lockHandle);
Thread.sleep(100);
// Meanwhile elsewhere Player 2 tries to acquire the lock
RaptureLockConfig lockConfig2 = lockApi.getLockManagerConfig(lockUri.toString());
assertNotNull(lockConfig2);
LockHandle lockHandle2 = lockApi2.acquireLock(lockUri.toString(), lockConfig2.getName(), 1, 60);
// but fails
assertNull(lockHandle2);
// Eventually player1 releases the lock
Thread.sleep(100);
assertTrue(lockApi.releaseLock(lockUri.toString(), lockConfig.getName(), lockHandle));
assertFalse(lockApi2.releaseLock(lockUri.toString(), lockConfig2.getName(), lockHandle2));
// and now Player 2 can acquire it
lockHandle2 = lockApi2.acquireLock(lockUri.toString(), lockConfig2.getName(), 1, 60);
assertNotNull(lockHandle2);
assertTrue(lockApi2.releaseLock(lockUri.toString(), lockConfig2.getName(), lockHandle2));
assertFalse(lockApi2.releaseLock(lockUri.toString(), lockConfig2.getName(), lockHandle2));
assertNotNull(lockApi.getLockManagerConfig(lockUri.toString()));
assertTrue(lockApi.lockManagerExists(lockUri.toString()));
}
@Test(groups = { "nightly", "lock" })
public void testMongoDBLock() throws InterruptedException {
// Player 1 acquires a lock
RaptureURI lockUri = RaptureURI.builder(helper.getRandomAuthority(Scheme.DOCUMENT)).docPath("foo/bar" + System.currentTimeMillis()).build();
RaptureLockConfig lockConfig = lockApi.createLockManager(lockUri.toString(), "LOCKING USING MONGODB {}", "");
assertNotNull(lockConfig);
LockHandle lockHandle = lockApi.acquireLock(lockUri.toString(), lockConfig.getName(), 1, 60);
assertNotNull(lockHandle);
Thread.sleep(100);
// Meanwhile elsewhere Player 2 tries to acquire the lock
RaptureLockConfig lockConfig2 = lockApi.getLockManagerConfig(lockUri.toString());
assertNotNull(lockConfig2);
LockHandle lockHandle2 = lockApi2.acquireLock(lockUri.toString(), lockConfig2.getName(), 1, 60);
// but fails
assertNull(lockHandle2);
// Eventually player1 releases the lock
Thread.sleep(100);
assertTrue(lockApi.releaseLock(lockUri.toString(), lockConfig.getName(), lockHandle));
assertFalse(lockApi2.releaseLock(lockUri.toString(), lockConfig2.getName(), lockHandle2));
// and now Player 2 can acquire it
lockHandle2 = lockApi2.acquireLock(lockUri.toString(), lockConfig2.getName(), 1, 60);
assertNotNull(lockHandle2);
assertTrue(lockApi2.releaseLock(lockUri.toString(), lockConfig2.getName(), lockHandle2));
// assertFalse(lockApi2.releaseLock(lockUri.toString(), lockConfig2.getName(), lockHandle2));
assertNotNull(lockApi.getLockManagerConfig(lockUri.toString()));
assertTrue(lockApi.lockManagerExists(lockUri.toString()));
}
@Test(groups = { "nightly", "lock" })
public void testRecreateLockManager() throws InterruptedException {
// Player 1 acquires a lock
RaptureURI lockUri = RaptureURI.builder(helper.getRandomAuthority(Scheme.DOCUMENT)).docPath("foo/bar" + System.currentTimeMillis()).build();
RaptureLockConfig lockConfig = lockApi.createLockManager(lockUri.toString(), "LOCKING USING MONGODB {}", "");
assertNotNull(lockConfig);
LockHandle lockHandle = lockApi.acquireLock(lockUri.toString(), lockConfig.getName(), 1, 60);
assertNotNull(lockHandle);
// Player 2 tries to create a different lock manager for the same URI - this will be ignored
RaptureLockConfig lockConfig2 = lockApi2.createLockManager(lockUri.toString(), "LOCKING USING MEMORY {}", "");
assertNotNull(lockConfig);
assertEquals(lockConfig, lockConfig2);
RaptureLockConfig lockConfig3 = lockApi2.getLockManagerConfig(lockUri.toString());
assertEquals(lockConfig, lockConfig3);
LockHandle lockHandle2 = lockApi.acquireLock(lockUri.toString(), lockConfig3.getName(), 1, 60);
assertNull(lockHandle2);
assertTrue(lockApi.releaseLock(lockUri.toString(), lockConfig.getName(), lockHandle));
lockHandle2 = lockApi2.acquireLock(lockUri.toString(), lockConfig3.getName(), 1, 60);
assertNotNull(lockHandle2);
assertTrue(lockApi2.releaseLock(lockUri.toString(), lockConfig3.getName(), lockHandle2));
}
@DataProvider(name = "threadScenarios")
public Object[][] threadScenariosData() {
return new Object[][] { {new Integer(10)},
{new Integer(20)},
{new Integer(30)}
};
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kinesisfirehose.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/firehose-2015-08-04/ListTagsForDeliveryStream" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListTagsForDeliveryStreamResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A list of tags associated with <code>DeliveryStreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* </p>
*/
private java.util.List<Tag> tags;
/**
* <p>
* If this is <code>true</code> in the response, more tags are available. To list the remaining tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned and call
* <code>ListTagsForDeliveryStream</code> again.
* </p>
*/
private Boolean hasMoreTags;
/**
* <p>
* A list of tags associated with <code>DeliveryStreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* </p>
*
* @return A list of tags associated with <code>DeliveryStreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
*/
public java.util.List<Tag> getTags() {
return tags;
}
/**
* <p>
* A list of tags associated with <code>DeliveryStreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* </p>
*
* @param tags
* A list of tags associated with <code>DeliveryStreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
*/
public void setTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new java.util.ArrayList<Tag>(tags);
}
/**
* <p>
* A list of tags associated with <code>DeliveryStreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tags
* A list of tags associated with <code>DeliveryStreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForDeliveryStreamResult withTags(Tag... tags) {
if (this.tags == null) {
setTags(new java.util.ArrayList<Tag>(tags.length));
}
for (Tag ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
* <p>
* A list of tags associated with <code>DeliveryStreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* </p>
*
* @param tags
* A list of tags associated with <code>DeliveryStreamName</code>, starting with the first tag after
* <code>ExclusiveStartTagKey</code> and up to the specified <code>Limit</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForDeliveryStreamResult withTags(java.util.Collection<Tag> tags) {
setTags(tags);
return this;
}
/**
* <p>
* If this is <code>true</code> in the response, more tags are available. To list the remaining tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned and call
* <code>ListTagsForDeliveryStream</code> again.
* </p>
*
* @param hasMoreTags
* If this is <code>true</code> in the response, more tags are available. To list the remaining tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned and call
* <code>ListTagsForDeliveryStream</code> again.
*/
public void setHasMoreTags(Boolean hasMoreTags) {
this.hasMoreTags = hasMoreTags;
}
/**
* <p>
* If this is <code>true</code> in the response, more tags are available. To list the remaining tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned and call
* <code>ListTagsForDeliveryStream</code> again.
* </p>
*
* @return If this is <code>true</code> in the response, more tags are available. To list the remaining tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned and call
* <code>ListTagsForDeliveryStream</code> again.
*/
public Boolean getHasMoreTags() {
return this.hasMoreTags;
}
/**
* <p>
* If this is <code>true</code> in the response, more tags are available. To list the remaining tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned and call
* <code>ListTagsForDeliveryStream</code> again.
* </p>
*
* @param hasMoreTags
* If this is <code>true</code> in the response, more tags are available. To list the remaining tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned and call
* <code>ListTagsForDeliveryStream</code> again.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForDeliveryStreamResult withHasMoreTags(Boolean hasMoreTags) {
setHasMoreTags(hasMoreTags);
return this;
}
/**
* <p>
* If this is <code>true</code> in the response, more tags are available. To list the remaining tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned and call
* <code>ListTagsForDeliveryStream</code> again.
* </p>
*
* @return If this is <code>true</code> in the response, more tags are available. To list the remaining tags, set
* <code>ExclusiveStartTagKey</code> to the key of the last tag returned and call
* <code>ListTagsForDeliveryStream</code> again.
*/
public Boolean isHasMoreTags() {
return this.hasMoreTags;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTags() != null)
sb.append("Tags: ").append(getTags()).append(",");
if (getHasMoreTags() != null)
sb.append("HasMoreTags: ").append(getHasMoreTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListTagsForDeliveryStreamResult == false)
return false;
ListTagsForDeliveryStreamResult other = (ListTagsForDeliveryStreamResult) obj;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
if (other.getHasMoreTags() == null ^ this.getHasMoreTags() == null)
return false;
if (other.getHasMoreTags() != null && other.getHasMoreTags().equals(this.getHasMoreTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
hashCode = prime * hashCode + ((getHasMoreTags() == null) ? 0 : getHasMoreTags().hashCode());
return hashCode;
}
@Override
public ListTagsForDeliveryStreamResult clone() {
try {
return (ListTagsForDeliveryStreamResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
Copyright 1996-2008 Ariba, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
$Id: //ariba/platform/util/core/ariba/util/formatter/SystemBooleanFormatter.java#4 $
*/
package ariba.util.formatter;
import java.text.ParseException;
import java.util.Locale;
import ariba.util.core.Assert;
import ariba.util.core.Constants;
/**
<code>SystemBooleanFormatter</code> is a subclass of
<code>Formatter</code> which is responsible for formatting,
parsing, and comparing <code>boolean</code> values and/or
<code>Boolean</code> objects. <p>
The values produced will always be 'true' or 'false' no matter the
locale being used.
@aribaapi documented
*/
public class SystemBooleanFormatter extends Formatter
{
/*-----------------------------------------------------------------------
Constants
-----------------------------------------------------------------------*/
/**
Our Java class name.
@aribaapi private
*/
public static final String ClassName = "ariba.util.formatter.SystemBooleanFormatter";
// strings for true and false
private static final String TrueString = "true";
private static final String FalseString = "false";
private static final String BooleanTrue = "BooleanTrue";
private static final String BooleanFalse = "BooleanFalse";
private static final SystemBooleanFormatter SharedSystemBooleanFormatter =
new SystemBooleanFormatter();
/*-----------------------------------------------------------------------
Constructor
-----------------------------------------------------------------------*/
/**
Creates a new <code>SystemBooleanFormatter</code>.
@aribaapi private
*/
public SystemBooleanFormatter ()
{
}
/*-----------------------------------------------------------------------
Static Formatting
-----------------------------------------------------------------------*/
/**
Returns a formatted string for the given <code>Boolean</code> object
in the default locale.
@param object the <code>Boolean</code> object to format into a string
@return a formatted string for the given <code>Boolean</code>
@aribaapi documented
*/
public static String getStringValue (Boolean object)
{
return object.toString();
}
/**
Returns a formatted string for the given <code>Boolean</code> object
@param object the <code>Boolean</code> object to format into a string
@param locale the <code>Locale</code> to use for formatting
@return a formatted string for the given <code>Boolean</code>
object in the given locale
@aribaapi documented
@deprecated use getStringValue(Boolean) instead.
*/
public static String getStringValue (Boolean object, Locale locale)
{
// locale doesn't matter
return getStringValue(object);
}
/*-----------------------------------------------------------------------
Static Parsing
-----------------------------------------------------------------------*/
/**
Parses the given string as a <code>boolean</code> value.
Returns true if and only if the given string is
<code>"true"</code>, or if the value is
<code>"BooleanTrue"</code>.
@param string the <code>String</code> to parse
@return <code>true</code> if the given string is
<code>"true"</code>; <code>false</code> otherwise.
@aribaapi documented
*/
public static boolean parseBoolean (String string)
{
return parseStringAsBoolean(string).booleanValue();
}
/**
Parses the given string as a <code>Boolean</code> value.
Returns <code>Boolean.TRUE</code> if and only if the given
string is <code>"true"</code> or <code>"BooleanTrue"</code>.
@param string the <code>String</code> to parse
@return <code>Boolean.TRUE</code> if the given string is
<code>"true"</code>; <code>Boolean.FALSE</code>
otherwise.
@aribaapi documented
*/
public static Boolean parseStringAsBoolean (String string)
{
try {
return (Boolean)SharedSystemBooleanFormatter.parseString(
string, (Locale)null);
}
catch (ParseException parseException) {
return Boolean.FALSE;
}
}
/**
Returns a <code>boolean</code> value derived from the given
object. If the object is not a <code>Boolean</code>, it is
converted to a string and compared against the string
<code>"true"</code> or <code>"BooleanTrue"</code>.
@param object the object to covert to a <code>boolean</code>
@return a <code>boolean</code> derived from the given object
@aribaapi documented
*/
public static boolean getBooleanValue (Object object)
{
if (object == null) {
return false;
}
else if (object instanceof Boolean) {
return ((Boolean)object).booleanValue();
}
else {
Boolean booleanObject = parseStringAsBoolean(object.toString());
return booleanObject.booleanValue();
}
}
/*-----------------------------------------------------------------------
Static Comparison
-----------------------------------------------------------------------*/
/**
Compares two <code>Boolean</code> objects for sorting purposes.
Returns an <code>int</code> value which is less than, equal to, or
greater than zero depending on whether the first object sorts before,
the same, or after the second object. Sorts <code>Boolean.TRUE</code>
before <code>Boolean.FALSE</code>.
@param b1 the first <code>Boolean</code> to compare
@param b2 the second <code>Boolean</code> to compare
@return <code>int</code> value which determines how the two objects
should be ordered
@aribaapi documented
*/
public static int compareBooleans (Boolean b1, Boolean b2)
{
if (b1 == b2) {
return 0;
}
else if (b1 == null) {
return -1;
}
else if (b2 == null) {
return 1;
}
else {
return compareBooleans(b1.booleanValue(), b2.booleanValue());
}
}
/**
Compares two <code>boolean</code> values for sorting purposes.
Returns an <code>int</code> value which is less than, equal to, or
greater than zero depending on whether the first object sorts before,
the same, or after the second object. Sorts <code>true</code> before
<code>false</code>.
@param b1 the first <code>boolean</code> to compare
@param b2 the second <code>boolean</code> to compare
@return <code>int</code> value which determines how the two objects
should be ordered
@aribaapi documented
*/
public static int compareBooleans (boolean b1, boolean b2)
{
// arbitrarily sort true before false
return (b1 == b2) ? 0 : ((b1 && !b2) ? 1 : -1);
}
/*-----------------------------------------------------------------------
Formatting
-----------------------------------------------------------------------*/
/**
Returns a string representation of the given object in the given
locale. The object must be a non-null <code>Boolean</code>.
@param object the <code>Boolean</code> to format into a string
@param locale the <code>Locale</code> to use for formatting
@return a string representation of the <code>Boolean</code>
@aribaapi documented
*/
protected String formatObject (Object object, Locale locale)
{
Assert.that(object instanceof Boolean, "invalid type");
return getStringValue((Boolean)object, locale);
}
/*-----------------------------------------------------------------------
Parsing
-----------------------------------------------------------------------*/
/**
Parses the given string into a <code>Boolean</code> object. The
string is assumed to be non-null and trimmed of leading and trailing
whitespace. Returns <code>Boolean.TRUE</code> if and only if the
given string is <code>"true"</code>.
@param string the string to parse
@param locale the <code>Locale</code> to use for parsing
@return a <code>Boolean</code> object derived from the string
@aribaapi documented
*/
protected Object parseString (String string, Locale locale)
throws ParseException
{
if (TrueString.equals(string) || BooleanTrue.equals(string)) {
return Boolean.TRUE;
}
if (FalseString.equals(string) || BooleanFalse.equals(string)) {
return Boolean.FALSE;
}
throw makeParseException(BooleanFormatter.BooleanTrueOrFalseKey, 0);
}
/**
Returns a new <code>Boolean</code> derived from the given object. If
the object is not a <code>Boolean</code>, it is converted to a string
and compared against the string <code>"true"</code>.
@param object the object to convert to a <code>Boolean</code>
@param locale the <code>Locale</code> to use for conversion
@return a <code>Boolean</code> derived from the given object
@aribaapi documented
*/
public Object getValue (Object object, Locale locale)
{
return Constants.getBoolean(getBooleanValue(object));
}
/*-----------------------------------------------------------------------
Comparison
-----------------------------------------------------------------------*/
/**
Compares two objects for sorting purposes in the given locale. The
two objects must be non-null <code>Boolean</code> objects. Returns a
value which is less than, equal to, or greater than zero depending on
whether the first object sorts before, the same, or after the second
object. Sorts <code>Boolean.TRUE</code> before
<code>Boolean.FALSE</code>.
@param o1 the first <code>Boolean</code> to compare
@param o2 the second <code>Boolean</code> to compare
@param locale the <code>Locale</code> to use for comparison
@return <code>int</code> value which determines how the two
objects should be ordered
@aribaapi documented
*/
protected int compareObjects (Object o1, Object o2, Locale locale)
{
Assert.that(o1 instanceof Boolean, "invalid type");
Assert.that(o2 instanceof Boolean, "invalid type");
return compareBooleans((Boolean)o1, (Boolean)o2);
}
}
| |
package com.heaven7.java.data.mediator.compiler;
import com.heaven7.java.base.anno.Nullable;
import com.heaven7.java.data.mediator.bind.*;
import com.heaven7.java.data.mediator.compiler.util.TypeUtils;
import com.heaven7.java.data.mediator.internal.BindMethod;
import javax.lang.model.element.*;
import javax.lang.model.type.MirroredTypesException;
import javax.lang.model.type.TypeMirror;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static com.heaven7.java.data.mediator.compiler.DataMediatorConstants.PKG_DATA_BINDING_ANNO;
import static com.heaven7.java.data.mediator.compiler.util.CheckUtils.checkEmptyString;
/**
* Created by heaven7 on 2017/11/5.
* @since 1.4.0
*/
public class DataBindingParser {
private static final String TAG = "DataBindingParser";
private final ProcessorContext mContext;
public DataBindingParser(ProcessorContext mContext) {
this.mContext = mContext;
}
public boolean parseClassAnnotations(TypeElement te, DataBindingInfo info) {
for(AnnotationMirror am :te.getAnnotationMirrors()){
TypeElement e1 = (TypeElement) am.getAnnotationType().asElement();
String annoFullname = e1.getQualifiedName().toString();
if(annoFullname.equals(BinderClass.class.getName())){
info.setBinderClass(getSimpleTypeMirror(am));
}else if(annoFullname.equals(BinderFactoryClass.class.getName())){
info.setBinderFactoryClass(getSimpleTypeMirror(am));
}else if(annoFullname.equals(BindMethodSupplierClass.class.getName())){
info.setBindMethodSupplier(getSimpleTypeMirror(am));
}
}
return true;
}
/** parse array properties */
public boolean parseBindsProperty(Element element, DataBindingInfo info) {
final ProcessorPrinter pp = mContext.getProcessorPrinter();
final VariableElement ve = (VariableElement) element;
final String varName = ve.getSimpleName().toString();
BindsView bindsView = element.getAnnotation(BindsView.class);
pp.note(TAG, "parseBindsProperty", "bindsView = " + bindsView);
if(bindsView != null){
if(!parseBindsInternal(info, varName, bindsView.index(), bindsView.value(), bindsView.methods())){
return false;
}
}
BindsTextView bindsTextViewRes = element.getAnnotation(BindsTextView.class);
if(bindsTextViewRes != null){
if(!parseBindsInternal(info, varName, bindsTextViewRes.index(), bindsTextViewRes.value(), bindsTextViewRes.methods())){
return false;
}
}
return true;
}
public boolean parseBindAny(Element element, DataBindingInfo info) {
final String methodName = "parseBindAny";
final ProcessorPrinter pp = mContext.getProcessorPrinter();
final VariableElement ve = (VariableElement) element;
final String varName = ve.getSimpleName().toString();
BindAny bindAny = element.getAnnotation(BindAny.class);
pp.note(TAG, methodName, "bindsView = " + bindAny);
if(bindAny != null){
if (!addBindInfo(info, varName, bindAny.value(), bindAny.method(), bindAny.index())) {
return false;
}
}
return true;
}
public boolean parseBindsAny(Element element, DataBindingInfo info) {
final String methodName = "parseBindsAny";
final ProcessorPrinter pp = mContext.getProcessorPrinter();
final VariableElement ve = (VariableElement) element;
final String varName = ve.getSimpleName().toString();
BindsAny bindsAny = element.getAnnotation(BindsAny.class);
pp.note(TAG, methodName, "bindsView = " + bindsAny);
if(bindsAny != null){
int index = bindsAny.index();
String[] props = bindsAny.value();
String[] methods = bindsAny.methods();
if(props.length != methods.length){
pp.error(TAG, methodName, String.format("@BindsAny for field '%s' property names count($d) != methods count(%d)",
varName, props.length, methods.length));
return false;
}
for (int len = props.length , i = 0 ; i < len ; i++){
if(!addBindInfo(info, varName, props[i], methods[i], index)){
return false;
}
}
}
return true;
}
public boolean parseBindProperty(Element element, DataBindingInfo info) {
final VariableElement ve = (VariableElement) element;
final String varName = ve.getSimpleName().toString();
for(AnnotationMirror am : element.getAnnotationMirrors()){
TypeElement e1 = (TypeElement) am.getAnnotationType().asElement();
String annoFullname = e1.getQualifiedName().toString();
if(!annoFullname.startsWith(PKG_DATA_BINDING_ANNO)){
continue;
}
//get @BindMethod
DataBindingInfo.BindMethodInfo bmi = getBindMethodInfo(e1);
DataBindingInfo.BindInfo bindInfo = new DataBindingInfo.BindInfo(varName, bmi.name, bmi.types);
if(annoFullname.equals(BindCheckable.class.getName())){
//get @BindMethod
BindCheckable checkable = element.getAnnotation(BindCheckable.class);
bindInfo.setIndex(checkable.index());
bindInfo.setPropName(checkable.value());
}else if(annoFullname.equals(BindVisibility.class.getName())){
BindVisibility annotation = element.getAnnotation(BindVisibility.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
bindInfo.setExtras(new Object[]{annotation.forceAsBoolean()});
}else if(annoFullname.equals(BindEnable.class.getName())){
BindEnable annotation = element.getAnnotation(BindEnable.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindBackground.class.getName())){
BindBackground annotation = element.getAnnotation(BindBackground.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindBackgroundRes.class.getName())){
BindBackgroundRes annotation = element.getAnnotation(BindBackgroundRes.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindBackgroundColor.class.getName())){
BindBackgroundColor annotation = element.getAnnotation(BindBackgroundColor.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}
// text view
else if(annoFullname.equals(BindText.class.getName())){
BindText annotation = element.getAnnotation(BindText.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
} else if(annoFullname.equals(BindTextRes.class.getName())){
BindTextRes annotation = element.getAnnotation(BindTextRes.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
} else if(annoFullname.equals(BindTextSize.class.getName())){
BindTextSize annotation = element.getAnnotation(BindTextSize.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindTextSizeRes.class.getName())){
BindTextSizeRes annotation = element.getAnnotation(BindTextSizeRes.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindTextSizePx.class.getName())){
BindTextSizePx annotation = element.getAnnotation(BindTextSizePx.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindTextColor.class.getName())){
BindTextColor annotation = element.getAnnotation(BindTextColor.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindTextColorRes.class.getName())){
BindTextColorRes annotation = element.getAnnotation(BindTextColorRes.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}
//image view
else if(annoFullname.equals(BindImageBitmap.class.getName())){
BindImageBitmap annotation = element.getAnnotation(BindImageBitmap.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
} else if(annoFullname.equals(BindImageDrawable.class.getName())){
BindImageDrawable annotation = element.getAnnotation(BindImageDrawable.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
} else if(annoFullname.equals(BindImageRes.class.getName())){
BindImageRes annotation = element.getAnnotation(BindImageRes.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindImageUri.class.getName())){
BindImageUri annotation = element.getAnnotation(BindImageUri.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindImageUrl.class.getName())){
BindImageUrl annotation = element.getAnnotation(BindImageUrl.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}
//high light and hints.
else if(annoFullname.equals(BindHighlightColor.class.getName())){
BindHighlightColor annotation = element.getAnnotation(BindHighlightColor.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindHintText.class.getName())){
BindHintText annotation = element.getAnnotation(BindHintText.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindHintTextRes.class.getName())){
BindHintTextRes annotation = element.getAnnotation(BindHintTextRes.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindHintTextColor.class.getName())){
BindHintTextColor annotation = element.getAnnotation(BindHintTextColor.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindHintTextColorRes.class.getName())){
BindHintTextColorRes annotation = element.getAnnotation(BindHintTextColorRes.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}else if(annoFullname.equals(BindTextGravity.class.getName())){
BindTextGravity annotation = element.getAnnotation(BindTextGravity.class);
bindInfo.setIndex(annotation.index());
bindInfo.setPropName(annotation.value());
}
//add
if(bindInfo.isValid()) {
info.addBindInfo(bindInfo);
}
}
return true;
}
/** only used for {@linkplain BindAny} and {@linkplain BindsAny}. */
private boolean addBindInfo(DataBindingInfo info, String varName, String prop, String method, int index) {
final String methodName = "addBindInfo";
final ProcessorPrinter pp = mContext.getProcessorPrinter();
if(!checkEmptyString(prop)){
pp.error(TAG, methodName, "property name = '" + prop + "' for field '"+ varName + " 'is invalid.");
return false;
}
if(!checkEmptyString(method)){
pp.error(TAG, methodName, "bind method name = '" + method + "' for field '"+ varName + " 'is invalid.");
return false;
}
//type is from dynamic invoke
DataBindingInfo.BindInfo bindInfo = new DataBindingInfo.BindInfo(varName, method, null);
bindInfo.setIndex(index);
bindInfo.setPropName(prop);
info.addBindInfo(bindInfo);
return true;
}
private DataBindingInfo.BindMethodInfo getBindMethodInfo(TypeElement e1) {
DataBindingInfo.BindMethodInfo bmi = new DataBindingInfo.BindMethodInfo();
for(AnnotationMirror am : e1.getAnnotationMirrors()) {
Map<? extends ExecutableElement, ? extends AnnotationValue> map = am.getElementValues();
for (Map.Entry<? extends ExecutableElement, ? extends AnnotationValue> en : map.entrySet()) {
ExecutableElement key = en.getKey();//the method of annotation
final AnnotationValue enVal = en.getValue();
String fullName = key.getSimpleName().toString();
if (fullName.equals("value")) {
bmi.name = String.valueOf(enVal.getValue().toString());
} else if (fullName.equals("paramTypes")) {
List mirrorList = (List) enVal.getValue();
TypeUtils.convertToClassname(mirrorList, bmi.types);
}
}
}
return bmi;
}
private boolean parseBindsInternal(DataBindingInfo info, String varName, int index,String[] props, BindMethod[] methods) {
final ProcessorPrinter pp = mContext.getProcessorPrinter();
if(props.length == 0){
pp.error(TAG, "parseBindsProperty", "props.length must > 0");
return false;
}
if(props.length > methods.length){
pp.error(TAG, "parseBindsProperty", "props.length can't > methods.length");
return false;
}
//truncate if props.length < methods.length
if(props.length < methods.length){
methods = truncate(methods, props.length);
}
for(int i =0 ; i < props.length ; i ++){
final String prop = props[i];
if(prop == null || prop.isEmpty()){
continue;
}
List<String> types = null;
//read Class<?> in compile time is wrong. see https://area-51.blog/2009/02/13/getting-class-values-from-annotations-in-an-annotationprocessor/.
try {
methods[i].paramTypes();
}catch (MirroredTypesException mte){
List<? extends TypeMirror> mirrors = mte.getTypeMirrors();
types = TypeUtils.convertToClassname(mirrors, null);
}
info.addBindInfo(new DataBindingInfo.BindInfo(varName, prop, index, methods[i].value(), types));
}
return true;
}
private BindMethod[] truncate(BindMethod[] methods, int length) {
BindMethod[] arr = new BindMethod[length];
for(int i = 0 ; i < length ; i++){
arr[i] = methods[i];
}
return arr;
}
//eg: @BinderClass(XXX.class)
private TypeMirror getSimpleTypeMirror(AnnotationMirror am) {
Map<? extends ExecutableElement, ? extends AnnotationValue> map = am.getElementValues();
for (Map.Entry<? extends ExecutableElement, ? extends AnnotationValue> en : map.entrySet()) {
ExecutableElement key = en.getKey();//the method of annotation
if(key.getSimpleName().toString().equals("value")){
return (TypeMirror) en.getValue().getValue();
}
}
return null;
}
}
| |
/**
* Copyright 2009-2010 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* $Id$
*/
package org.wso2.developerstudio.eclipse.esb.mediators.impl;
import java.util.Map;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.w3c.dom.Element;
import org.wso2.developerstudio.eclipse.esb.NamespacedProperty;
import org.wso2.developerstudio.eclipse.esb.impl.ModelObjectImpl;
import org.wso2.developerstudio.eclipse.esb.mediators.MediatorsPackage;
import org.wso2.developerstudio.eclipse.esb.mediators.SmooksIODataType;
import org.wso2.developerstudio.eclipse.esb.mediators.SmooksInConfiguration;
import org.wso2.developerstudio.eclipse.esb.util.ObjectValidator;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Smooks In Configuration</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.SmooksInConfigurationImpl#getType <em>Type</em>}</li>
* <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.SmooksInConfigurationImpl#getExpression <em>Expression</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class SmooksInConfigurationImpl extends ModelObjectImpl implements SmooksInConfiguration {
/**
* The default value of the '{@link #getType() <em>Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getType()
* @generated
* @ordered
*/
protected static final SmooksIODataType TYPE_EDEFAULT = SmooksIODataType.XML;
/**
* The cached value of the '{@link #getType() <em>Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getType()
* @generated
* @ordered
*/
protected SmooksIODataType type = TYPE_EDEFAULT;
/**
* The cached value of the '{@link #getExpression() <em>Expression</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getExpression()
* @generated
* @ordered
*/
protected NamespacedProperty expression;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @!generated
*/
protected SmooksInConfigurationImpl() {
super();
// Property expression.
NamespacedProperty expression = getEsbFactory().createNamespacedProperty();
expression.setPrettyName("Expression");
expression.setPropertyName("expression");
expression.setPropertyValue("");
setExpression(expression);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return MediatorsPackage.Literals.SMOOKS_IN_CONFIGURATION;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public SmooksIODataType getType() {
return type;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setType(SmooksIODataType newType) {
SmooksIODataType oldType = type;
type = newType == null ? TYPE_EDEFAULT : newType;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.SMOOKS_IN_CONFIGURATION__TYPE, oldType, type));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NamespacedProperty getExpression() {
if (expression != null && expression.eIsProxy()) {
InternalEObject oldExpression = (InternalEObject)expression;
expression = (NamespacedProperty)eResolveProxy(oldExpression);
if (expression != oldExpression) {
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.RESOLVE, MediatorsPackage.SMOOKS_IN_CONFIGURATION__EXPRESSION, oldExpression, expression));
}
}
return expression;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NamespacedProperty basicGetExpression() {
return expression;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setExpression(NamespacedProperty newExpression) {
NamespacedProperty oldExpression = expression;
expression = newExpression;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, MediatorsPackage.SMOOKS_IN_CONFIGURATION__EXPRESSION, oldExpression, expression));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case MediatorsPackage.SMOOKS_IN_CONFIGURATION__TYPE:
return getType();
case MediatorsPackage.SMOOKS_IN_CONFIGURATION__EXPRESSION:
if (resolve) return getExpression();
return basicGetExpression();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case MediatorsPackage.SMOOKS_IN_CONFIGURATION__TYPE:
setType((SmooksIODataType)newValue);
return;
case MediatorsPackage.SMOOKS_IN_CONFIGURATION__EXPRESSION:
setExpression((NamespacedProperty)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case MediatorsPackage.SMOOKS_IN_CONFIGURATION__TYPE:
setType(TYPE_EDEFAULT);
return;
case MediatorsPackage.SMOOKS_IN_CONFIGURATION__EXPRESSION:
setExpression((NamespacedProperty)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case MediatorsPackage.SMOOKS_IN_CONFIGURATION__TYPE:
return type != TYPE_EDEFAULT;
case MediatorsPackage.SMOOKS_IN_CONFIGURATION__EXPRESSION:
return expression != null;
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (type: ");
result.append(type);
result.append(')');
return result.toString();
}
protected void doLoad(Element self) throws Exception {
if (self.hasAttribute("type")) {
String typeAttribute = self.getAttribute("type");
setType(typeAttribute.equalsIgnoreCase("text") ? SmooksIODataType.TEXT
: SmooksIODataType.XML);
}
if (self.hasAttribute("expression")) {
String expressionAttribute = self.getAttribute("expression");
getExpression().setPropertyValue(expressionAttribute);
}
super.doLoad(self);
}
protected Element doSave(Element parent) throws Exception {
Element self = createChildElement(parent, "input");
self.setAttribute("type", getType().toString().toLowerCase());
if (!getExpression().getPropertyValue().trim().equals("")) {
self.setAttribute("expression", getExpression().getPropertyValue());
}
addComments(self);
return self;
}
public Map<String, ObjectValidator> validate() {
// TODO Auto-generated method stub
return null;
}
} //SmooksInConfigurationImpl
| |
/**
* JReversePro - Java Decompiler / Disassembler.
* Copyright (C) 2008 Karthik Kumar.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. *
*/
package org.jreversepro.output;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import org.jreversepro.CustomLoggerFactory;
import org.jreversepro.jls.JLSConstants;
import org.jreversepro.jvm.JVMConstants;
import org.jreversepro.jvm.TypeInferrer;
import org.jreversepro.reflect.ClassInfo;
import org.jreversepro.reflect.Field;
import org.jreversepro.reflect.Import;
import org.jreversepro.reflect.Method;
import org.jreversepro.reflect.variabletable.VariableTable;
abstract class AbstractClassOutputterImpl implements JVMConstants {
protected AbstractClassOutputterImpl(ClassInfo _clazz, CodeStyler _styler) {
clazz = _clazz;
sb = new StringBuilder();
styler = _styler;
}
public void clearContents() {
sb = null;
sb = new StringBuilder();
}
public String getContents() {
return sb.toString();
}
public abstract void process();
/**
* Writes the header comments
*/
protected void outputHeaderComments() {
sb.append("\n// Source: " + clazz.getSourceFile());
}
/**
* Writes the package Imports
*/
protected void outputPackageImports() {
String packageName = Import.getPackageName(clazz.getThisClass());
if (packageName.length() != 0) {
sb.append("\n" + JLSConstants.PACKAGE + " " + packageName
+ JLSConstants.END_OF_STATEMENT);
}
sb.append("\n\n");
Import imports = clazz.getConstantPool().getImportedClasses();
this.outputImports(imports, packageName);
}
/**
* Outputs current and super classes
*/
protected void outputThisSuperClasses() {
sb.append("\n\n" + getTypeAsString() + " ");
sb.append(Import.getClassName(clazz.getThisClass()));
String superClass = clazz.getSuperClass();
if (!superClass.equals(JVMConstants.CLASS_LANG_OBJECT)) {
sb.append(" " + JLSConstants.EXTENDS + " ");
sb.append(Import.getClassName(superClass) + " ");
}
}
/**
* outputs the interfaces implemented by this class.
*/
protected void outputInterfaces() {
List<String> interfaces = clazz.getInterfaces();
if (interfaces.size() != 0) {
sb.append("\n\t\t " + JLSConstants.IMPLEMENTS + " ");
for (int i = 0; i < interfaces.size(); i++) {
if (i != 0) {
sb.append(JLSConstants.INTERFACE_DELIMITER);
}
sb.append(Import.getClassName(interfaces.get(i)));
}
}
}
/**
* output the Fields of the given class
*/
protected void outputFields() {
sb.append("\n");
for (Field field : clazz.getFields()) {
String datatype = Import.getClassName(TypeInferrer.getJLSType(field
.getDatatype(), false));
String access = this.getAccessQualifier(field.getQualifier(), true);
sb.append("\n\t" + access);
sb.append(datatype);
sb.append(" " + field.getName());
String val = field.getValue();
if (field.isFinal() && val.length() != 0) {
sb.append(JLSConstants.EQUALTO + val);
}
sb.append(JLSConstants.END_OF_STATEMENT);
}
}
/**
* Returns the access string of this class.
*
* @return Returns the access string of this class.
*/
private String getTypeAsString() {
StringBuilder accString = new StringBuilder();
accString.append(getAccessQualifier(clazz.getAccess(), false));
if (clazz.isClass()) {
accString.append(JLSConstants.CLASS);
} else {
accString.append(JLSConstants.INTERFACE);
}
return accString.toString();
}
/**
* Returns a string that contains all the imported classes in the proper
* format as written in code. For eg, if the list contains p1.class1 ,
* p2.class2 , this generates a string with import statements for both of
* them. classes belonging to the default package are excluded. Also there is
* an option by which we can exclude the classes that belong to a given
* package ( current package ).
*
* @param packageName
* current packagename and name for which package name is to be
* excluded.
* String containing the code mentioned.
*/
private void outputImports(Import imports, String packageName) {
List<String> restrictPackages = new ArrayList<String>(2);
restrictPackages.add(packageName);
restrictPackages.add(JLSConstants.DEFAULT_PACKAGE);
logger.fine("Number of imports" + imports.getClasses().size());
for (String currentClass : imports.getClasses()) {
if (currentClass.indexOf(JVMConstants.JVM_PACKAGE_DELIMITER) != -1) {
String currentPackage = Import.getPackageName(currentClass);
if (!restrictPackages.contains(currentPackage)) {
currentClass = currentClass.replace(
JVMConstants.JVM_PACKAGE_DELIMITER,
JLSConstants.JLS_PACKAGE_DELIMITER);
sb.append(JLSConstants.IMPORT + " ");
sb.append(currentClass + JLSConstants.END_OF_STATEMENT + "\n");
}
}
}
}
/**
* Returns the String Representation of the qualifier. Certain qualifiers like
* volatile, transient, sync. are applicable only for methods and fields. and
* not classes. To identify them separately, we also pass another parameter
* called memberOnly. Only if this is set then those bits are checked for.
* Else they are ignored, since for a class/interface they may not be
* applicable.
*
* @param rhsQualifier
* Qualifier byte with the bits set.
* @param memberOnly
* Only if this is set then the bits relevant to fields and methods
* only are checked for. Else ignored.
* @return String Containing the representation.
*/
private String getAccessQualifier(int rhsQualifier, boolean memberOnly) {
StringBuilder access = new StringBuilder("");
if ((rhsQualifier & ACC_PUBLIC) != 0) {
access.append(JLSConstants.ACCESS_PUBLIC);
access.append(" ");
} else if ((rhsQualifier & ACC_PRIVATE) != 0) {
access.append(JLSConstants.ACCESS_PRIVATE);
access.append(" ");
} else if ((rhsQualifier & ACC_PROTECTED) != 0) {
access.append(JLSConstants.ACCESS_PROTECTED);
access.append(" ");
}
if ((rhsQualifier & ACC_STATIC) != 0) {
access.append(JLSConstants.ACCESS_STATIC);
access.append(" ");
}
if ((rhsQualifier & ACC_FINAL) != 0) {
access.append(JLSConstants.ACCESS_FINAL);
access.append(" ");
}
if ((rhsQualifier & ACC_ABSTRACT) != 0) {
access.append(JLSConstants.ACCESS_ABSTRACT);
access.append(" ");
}
if (memberOnly) {
// Fields only
if ((rhsQualifier & ACC_VOLATILE) != 0) {
access.append(JLSConstants.ACCESS_VOLATILE);
access.append(" ");
}
if ((rhsQualifier & ACC_TRANSIENT) != 0) {
access.append(JLSConstants.ACCESS_TRANSIENT);
access.append(" ");
}
// Methods only
if ((rhsQualifier & ACC_SYNCHRONIZED) != 0) {
access.append(JLSConstants.ACCESS_SYNCHRONIZED);
access.append(" ");
}
if ((rhsQualifier & ACC_NATIVE) != 0) {
access.append(JLSConstants.ACCESS_NATIVE);
access.append(" ");
}
if ((rhsQualifier & ACC_STRICT) != 0) {
access.append(JLSConstants.ACCESS_STRICTFP);
access.append(" ");
}
}
return access.toString();
}
/**
* Returns the headers for the method.
*
* @param method
* Method information for which method header needs to be outputted.
* Returns the method header information.
*/
protected void outputMethodHeader(Method method) {
String returnType = Import.getClassName(TypeInferrer.getJLSType(method
.getReturnType(), false));
String name = method.getName();
sb.append("\n\n ");
if (name.compareTo(CLINIT) == 0) {
sb.append(JLSConstants.STATIC);
} else if (name.compareTo(INIT) == 0) {
sb.append(getAccessQualifier(method.getQualifier(), true));
sb.append(extractClassOnly(clazz.getThisClass()));
} else {
sb.append(getAccessQualifier(method.getQualifier(), true));
sb.append(returnType);
sb.append(" " + method.getName());
}
List<String> args = method.getArgList();
if (method.getName().compareTo(CLINIT) != 0) {
sb.append("(");
int baseVariableIndex = method.isStatic() ? 0 : 1;
for (int i = 0; i < args.size(); i++) {
if (i != 0) {
sb.append(" ,");
}
String jvmArgType = args.get(i);
String argType = Import.getClassName(TypeInferrer.getJLSType(
jvmArgType, false));
sb.append(argType);
// TODO Later move this code to MethodEmitter
sb.append(" ");
// 0 is ok here - since the method arguments are
// going to be in the full scope of the method.
sb.append(method.getVariableTable().getName(baseVariableIndex++,
VariableTable.FULL_SCOPE_INSTRUCTION_INDEX));
if (TypeInferrer.doesTypeOccupy2EntriesInVariableTable(jvmArgType)) {
baseVariableIndex++; // Ignore this.
}
}
sb.append(")");
}
outputThrowsClause(method.getThrowsClasses(), clazz.getConstantPool()
.getImportedClasses());
}
/**
*
* returns a throws clause for the method
*
* @param importInfo
* containing the import information.
* @return Returns a string that contains the code representation.
*/
private void outputThrowsClause(List<String> throwsClasses, Import importInfo) {
int size = throwsClasses.size();
if (size != 0) {
sb.append("\n\t\t" + JLSConstants.THROWS + " ");
for (int i = 0; i < size; i++) {
String thrownClass = throwsClasses.get(i);
if (i > 0) {
sb.append(" ,");
}
sb.append(Import.getClassName(thrownClass));
}
}
}
private static String extractClassOnly(String jvmType) {
int lastIndex = jvmType.lastIndexOf(JVMConstants.JVM_PACKAGE_DELIMITER);
if (lastIndex != -1) {
return jvmType.substring(lastIndex + 1);
} else {
return jvmType;
}
}
protected void openBlock() {
sb.append(styler.openBlock());
}
protected void closeBlock() {
sb.append(styler.closeBlock());
}
protected void outputString(String str) {
sb.append(str);
}
protected ClassInfo clazz;
private final Logger logger = CustomLoggerFactory.createLogger();
private StringBuilder sb;
protected CodeStyler styler;
}
| |
/*
* Copyright 2015-2016 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.hal.client.configuration.subsystem.ee;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import elemental2.dom.HTMLElement;
import org.jboss.hal.ballroom.VerticalNavigation;
import org.jboss.hal.ballroom.form.Form;
import org.jboss.hal.ballroom.table.Table;
import org.jboss.hal.core.mbui.form.ModelNodeForm;
import org.jboss.hal.core.mbui.table.ModelNodeTable;
import org.jboss.hal.core.mbui.table.TableButtonFactory;
import org.jboss.hal.core.mvp.HalViewImpl;
import org.jboss.hal.dmr.ModelDescriptionConstants;
import org.jboss.hal.dmr.ModelNode;
import org.jboss.hal.dmr.NamedNode;
import org.jboss.hal.meta.AddressTemplate;
import org.jboss.hal.meta.Metadata;
import org.jboss.hal.meta.MetadataRegistry;
import org.jboss.hal.resources.Ids;
import org.jboss.hal.resources.Names;
import org.jboss.hal.resources.Resources;
import static org.jboss.gwt.elemento.core.Elements.div;
import static org.jboss.gwt.elemento.core.Elements.h;
import static org.jboss.gwt.elemento.core.Elements.p;
import static org.jboss.gwt.elemento.core.Elements.section;
import static org.jboss.hal.ballroom.LayoutBuilder.column;
import static org.jboss.hal.ballroom.LayoutBuilder.row;
import static org.jboss.hal.client.configuration.subsystem.ee.AddressTemplates.*;
import static org.jboss.hal.dmr.ModelDescriptionConstants.*;
import static org.jboss.hal.dmr.ModelNodeHelper.asNamedNodes;
import static org.jboss.hal.resources.CSS.fontAwesome;
import static org.jboss.hal.resources.CSS.pfIcon;
import static org.jboss.hal.resources.Ids.*;
public class EEView extends HalViewImpl implements EEPresenter.MyView {
// local names
private static final String DEFAULT_BINDINGS_NAME = "Default Bindings";
private static final String CONTEXT_SERVICE_NAME = "Context Service";
private static final String MANAGED_EXECUTOR_NAME = "Executor";
private static final String MANAGED_EXECUTOR_SCHEDULED_NAME = "Scheduled Executor";
private static final String SERVICES_NAME = "Services";
private static final String MANAGED_THREAD_FACTORY_NAME = "Thread Factories";
private final MetadataRegistry metadataRegistry;
private final TableButtonFactory tableButtonFactory;
private final Resources resources;
private final VerticalNavigation navigation;
private final Map<String, ModelNodeForm> forms;
private final Table<ModelNode> globalModulesTable;
private final Map<String, Table<NamedNode>> tables;
private EEPresenter presenter;
@Inject
public EEView(MetadataRegistry metadataRegistry,
TableButtonFactory tableButtonFactory,
Resources resources) {
this.metadataRegistry = metadataRegistry;
this.tableButtonFactory = tableButtonFactory;
this.resources = resources;
this.forms = new HashMap<>();
this.tables = new HashMap<>(4);
this.navigation = new VerticalNavigation();
registerAttachable(navigation);
// ============================================
// attributes - deployments
Metadata eeMetadata = metadataRegistry.lookup(AddressTemplates.EE_SUBSYSTEM_TEMPLATE);
ModelNodeForm<ModelNode> eeAttributesForm = new ModelNodeForm.Builder<>(EE_ATTRIBUTES_FORM, eeMetadata)
.onSave((f, changedValues) -> presenter.save(AddressTemplates.EE_SUBSYSTEM_TEMPLATE, changedValues,
eeMetadata, resources.messages()
.modifyResourceSuccess(Names.EE, resources.constants().deploymentAttributes())))
.prepareReset(f -> presenter.resetSingleton(resources.constants().deploymentAttributes(),
AddressTemplates.EE_SUBSYSTEM_TEMPLATE, f, eeMetadata))
.build();
forms.put(EE_ATTRIBUTES_FORM, eeAttributesForm);
registerAttachable(eeAttributesForm);
HTMLElement navigationElement = div()
.add(h(1).textContent(Names.DEPLOYMENTS))
.add(p().textContent(eeMetadata.getDescription().getDescription()))
.add(eeAttributesForm).element();
navigation.addPrimary(EE_ATTRIBUTES_ITEM, Names.CONFIGURATION, pfIcon("settings"), navigationElement);
// ============================================
// global modules
Metadata globalModulesMetadata = eeMetadata.forComplexAttribute(GLOBAL_MODULES);
globalModulesTable = new ModelNodeTable.Builder<>(Ids.EE_GLOBAL_MODULES_TABLE, globalModulesMetadata)
.columns(NAME, "slot", "annotations", "services", "meta-inf")
.button(tableButtonFactory.add(EE_SUBSYSTEM_TEMPLATE,
table -> presenter.launchAddDialogGlobalModule()))
.button(tableButtonFactory.remove(EE_SUBSYSTEM_TEMPLATE,
table -> presenter.removeGlobalModule(table.selectedRow())))
.build();
registerAttachable(globalModulesTable);
navigationElement = div()
.add(h(1).textContent(Names.GLOBAL_MODULES))
.add(p().textContent(globalModulesMetadata.getDescription().getDescription()))
.add(globalModulesTable).element();
navigation.addPrimary(EE_GLOBAL_MODULES_ITEM, Names.GLOBAL_MODULES, fontAwesome("cubes"), navigationElement);
// ============================================
// service=default-bindings
Metadata defaultBindingsMetadata = metadataRegistry.lookup(AddressTemplates.SERVICE_DEFAULT_BINDINGS_TEMPLATE);
ModelNodeForm<ModelNode> defaultBindingsForm = new ModelNodeForm.Builder<>(EE_DEFAULT_BINDINGS_FORM,
defaultBindingsMetadata)
.onSave((form, changedValues) -> presenter.save(AddressTemplates.SERVICE_DEFAULT_BINDINGS_TEMPLATE,
changedValues, defaultBindingsMetadata,
resources.messages().modifyResourceSuccess(Names.EE, DEFAULT_BINDINGS_NAME)))
.prepareReset(f -> presenter.resetSingleton(DEFAULT_BINDINGS_NAME,
AddressTemplates.SERVICE_DEFAULT_BINDINGS_TEMPLATE, f, defaultBindingsMetadata))
.build();
forms.put(EE_DEFAULT_BINDINGS_FORM, defaultBindingsForm);
registerAttachable(defaultBindingsForm);
navigationElement = div()
.add(h(1).textContent(DEFAULT_BINDINGS_NAME))
.add(p().textContent(defaultBindingsMetadata.getDescription().getDescription()))
.add(defaultBindingsForm).element();
navigation.addPrimary(EE_DEFAULT_BINDINGS_ITEM, DEFAULT_BINDINGS_NAME, fontAwesome("link"),
navigationElement);
// ============================================
// services
navigation.addPrimary(EE_SERVICES_ITEM, SERVICES_NAME, pfIcon("service"));
navigation.addSecondary(EE_SERVICES_ITEM, EE_CONTEXT_SERVICE, CONTEXT_SERVICE_NAME,
buildServicePanel(EE_CONTEXT_SERVICE, CONTEXT_SERVICE_TEMPLATE, CONTEXT_SERVICE_NAME));
navigation.addSecondary(EE_SERVICES_ITEM, EE_MANAGED_EXECUTOR, MANAGED_EXECUTOR_NAME,
buildServicePanel(EE_MANAGED_EXECUTOR, MANAGED_EXECUTOR_TEMPLATE, MANAGED_EXECUTOR_NAME));
navigation.addSecondary(EE_SERVICES_ITEM, EE_MANAGED_EXECUTOR_SCHEDULED, MANAGED_EXECUTOR_SCHEDULED_NAME,
buildServicePanel(EE_MANAGED_EXECUTOR_SCHEDULED, MANAGED_EXECUTOR_SCHEDULED_TEMPLATE,
MANAGED_EXECUTOR_SCHEDULED_NAME));
navigation.addSecondary(EE_SERVICES_ITEM, EE_MANAGED_THREAD_FACTORY, MANAGED_THREAD_FACTORY_NAME,
buildServicePanel(EE_MANAGED_THREAD_FACTORY, MANAGED_THREAD_FACTORY_TEMPLATE,
MANAGED_THREAD_FACTORY_NAME));
// ============================================
// main layout
initElement(row()
.add(column()
.addAll(navigation.panes())));
}
@Override
@SuppressWarnings("unchecked")
public void attach() {
super.attach();
tables.forEach((id, table) -> {
if (forms.containsKey(id)) {
table.bindForm(forms.get(id));
}
});
}
@Override
public void setPresenter(EEPresenter presenter) {
this.presenter = presenter;
}
@Override
@SuppressWarnings("unchecked")
public void update(ModelNode eeData) {
// update the attributes - deployments tab
Form<ModelNode> formDeployments = forms.get(EE_ATTRIBUTES_FORM);
formDeployments.view(eeData);
// update the global modules tab
globalModulesTable.clear();
if (eeData.hasDefined(GLOBAL_MODULES)) {
List<ModelNode> globalModulesList = eeData.get(GLOBAL_MODULES).asList();
globalModulesTable.update(globalModulesList);
}
// update the default-bindings tab
if (eeData.hasDefined(SERVICE)) {
ModelNode defaultBindings = eeData.get(SERVICE).get(ModelDescriptionConstants.DEFAULT_BINDINGS);
Form<ModelNode> formDefaultBindings = forms.get(EE_DEFAULT_BINDINGS_FORM);
formDefaultBindings.view(defaultBindings);
}
// update the context-service table
update(eeData, CONTEXT_SERVICE, EE_CONTEXT_SERVICE);
// update the managed-executor-service table
update(eeData, MANAGED_EXECUTOR_SERVICE, EE_MANAGED_EXECUTOR);
// update the managed-scheduled-executor-service table
update(eeData, MANAGED_SCHEDULED_EXECUTOR_SERVICE, EE_MANAGED_EXECUTOR_SCHEDULED);
// update the managed-thread-factory table
update(eeData, MANAGED_THREAD_FACTORY, EE_MANAGED_THREAD_FACTORY);
}
@SuppressWarnings("unchecked")
private void update(ModelNode eeData, String resourceType, String navigationId) {
if (eeData.hasDefined(resourceType)) {
List<NamedNode> models = asNamedNodes(eeData.get(resourceType).asPropertyList());
Form form = forms.get(resourceType);
form.clear();
Table<NamedNode> table = tables.get(resourceType);
table.update(models);
navigation.updateBadge(navigationId, models.size());
}
}
@SuppressWarnings("ConstantConditions")
private HTMLElement buildServicePanel(String baseId, AddressTemplate template, String type) {
Metadata metadata = metadataRegistry.lookup(template);
Table<NamedNode> table = new ModelNodeTable.Builder<NamedNode>(Ids.build(baseId, Ids.TABLE),
metadata)
.column(NAME, (cell, t, row, meta) -> row.getName())
.button(tableButtonFactory.add(Ids.build(baseId, Ids.ADD), type, template,
(name, address) -> presenter.reload()))
.button(tableButtonFactory.remove(type, template, (api) -> api.selectedRow().getName(),
() -> presenter.reload()))
.build();
registerAttachable(table);
tables.put(template.lastName(), table);
ModelNodeForm<NamedNode> form = new ModelNodeForm.Builder<NamedNode>(Ids.build(baseId, Ids.FORM),
metadata)
.onSave((f, changedValues) -> {
AddressTemplate fullyQualified = template.replaceWildcards(table.selectedRow().getName());
presenter.save(fullyQualified, changedValues, metadata,
resources.messages().modifyResourceSuccess(Names.EE, template.lastName()));
})
.prepareReset(f -> {
String name = table.selectedRow().getName();
AddressTemplate fullyQualified = template.replaceWildcards(name);
presenter.reset(type, name, fullyQualified, f, metadata,
resources.messages().modifyResourceSuccess(Names.EE, template.lastName()));
})
.build();
forms.put(template.lastName(), form);
registerAttachable(form);
return section()
.add(h(1).textContent(type))
.add(p().textContent(metadata.getDescription().getDescription()))
.add(table)
.add(form).element();
}
}
| |
package crazypants.enderio.base;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.enderio.core.common.BlockEnder;
import crazypants.enderio.api.IModObject;
import crazypants.enderio.api.tool.ITool;
import crazypants.enderio.base.gui.handler.IEioGuiHandler;
import crazypants.enderio.base.init.ModObjectRegistry;
import crazypants.enderio.base.lang.Lang;
import crazypants.enderio.base.machine.base.te.AbstractMachineEntity;
import crazypants.enderio.base.tool.ToolUtil;
import net.minecraft.block.material.MapColor;
import net.minecraft.block.material.Material;
import net.minecraft.block.state.BlockFaceShape;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.RayTraceResult;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.fml.common.event.FMLInitializationEvent;
import net.minecraftforge.server.permission.DefaultPermissionLevel;
import net.minecraftforge.server.permission.PermissionAPI;
import net.minecraftforge.server.permission.context.BlockPosContext;
public abstract class BlockEio<T extends TileEntityEio> extends BlockEnder<T> implements IModObject.LifecycleInit, IModObject.WithBlockItem {
protected @Nonnull String permissionNodeWrenching = "(block not initialized)";
protected @Nonnull String permissionNodeIOWrenching = permissionNodeWrenching;
@SuppressWarnings("unchecked")
protected BlockEio(@Nonnull IModObject modObject) {
super((Class<? extends T>) modObject.getTEClass());
modObject.apply(this);
setCreativeTab(EnderIOTab.tabEnderIOMachines);
if (translucent || lightOpacity == 0) {
useNeighborBrightness = true;
}
}
@SuppressWarnings("unchecked")
protected BlockEio(@Nonnull IModObject modObject, @Nonnull Material mat) {
super((Class<? extends T>) modObject.getTEClass(), mat);
modObject.apply(this);
setCreativeTab(EnderIOTab.tabEnderIOMachines);
if (translucent || lightOpacity == 0) {
useNeighborBrightness = true;
}
}
@SuppressWarnings("unchecked")
protected BlockEio(@Nonnull IModObject modObject, @Nonnull Material mat, MapColor mapColor) {
super((Class<? extends T>) modObject.getTEClass(), mat, mapColor);
modObject.apply(this);
setCreativeTab(EnderIOTab.tabEnderIOMachines);
if (translucent || lightOpacity == 0) {
useNeighborBrightness = true;
}
}
/**
* Stuff that needs to be done directly after constructing the object
*/
protected void init() {
}
/**
* Stuff that has to be done in the init phase (as opposed to preInit/postInit)
*/
@Override
public void init(@Nonnull IModObject modObject, @Nonnull FMLInitializationEvent event) {
if (canBeWrenched()) {
permissionNodeWrenching = PermissionAPI.registerNode(EnderIO.DOMAIN + ".wrench.break." + modObject.getUnlocalisedName(), DefaultPermissionLevel.ALL,
"Permission to wrench-break the block " + modObject.getUnlocalisedName() + " of Ender IO");
permissionNodeIOWrenching = PermissionAPI.registerNode(EnderIO.DOMAIN + ".wrench.iomode." + modObject.getUnlocalisedName(), DefaultPermissionLevel.ALL,
"Permission to set IO mode by wrench-clicking the block " + modObject.getUnlocalisedName() + " of Ender IO");
}
}
@Override
public @Nullable ItemEIO createBlockItem(@Nonnull IModObject modObject) {
return modObject.apply(new ItemEIO(this));
};
@Override
public boolean onBlockActivated(@Nonnull World world, @Nonnull BlockPos pos, @Nonnull IBlockState state, @Nonnull EntityPlayer entityPlayer,
@Nonnull EnumHand hand, @Nonnull EnumFacing side, float hitX, float hitY, float hitZ) {
if (shouldWrench(world, pos, entityPlayer, side) && ToolUtil.breakBlockWithTool(this, world, pos, side, entityPlayer, hand, permissionNodeWrenching)) {
return true;
}
TileEntity te = getTileEntity(world, pos);
if (te instanceof AbstractMachineEntity) {
ITool tool = ToolUtil.getEquippedTool(entityPlayer, hand);
if (tool != null && !entityPlayer.isSneaking() && tool.canUse(hand, entityPlayer, pos)) {
if (!world.isRemote) {
if (!PermissionAPI.hasPermission(entityPlayer.getGameProfile(), permissionNodeIOWrenching, new BlockPosContext(entityPlayer, pos, state, side))) {
entityPlayer.sendMessage(Lang.WRENCH_DENIED.toChatServer());
} else {
((AbstractMachineEntity) te).toggleIoModeForFace(side);
}
}
return true;
}
}
return super.onBlockActivated(world, pos, state, entityPlayer, hand, side, hitX, hitY, hitZ);
}
@Override
protected @Nonnull ItemStack processPickBlock(@Nonnull IBlockState state, @Nonnull RayTraceResult target, @Nonnull World world, @Nonnull BlockPos pos,
@Nonnull EntityPlayer player, @Nonnull ItemStack pickBlock) {
T te = getTileEntity(world, pos);
if (te != null) {
return te.processPickBlock(target, player, pickBlock);
}
return pickBlock;
}
public boolean shouldWrench(@Nonnull World world, @Nonnull BlockPos pos, @Nonnull EntityPlayer entityPlayer, @Nonnull EnumFacing side) {
return canBeWrenched();
}
/**
*
* @return <code>true</code> if this block can be wrenched at all. If this returns <code>false</code>,
* {@link #shouldWrench(World, BlockPos, EntityPlayer, EnumFacing)} <strong>must never</strong> return <code>true</code>.
*/
protected boolean canBeWrenched() {
return true;
}
// GUI
@Override
protected boolean openGui(@Nonnull World world, @Nonnull BlockPos pos, @Nonnull EntityPlayer entityPlayer, @Nonnull EnumFacing side) {
return openGui(world, pos, entityPlayer, side, 0);
}
/**
* To be called from mod code, e.g. a GUI button's network packet to switch GUIs.
*/
public boolean openGui(@Nonnull World world, @Nonnull BlockPos pos, @Nonnull EntityPlayer entityPlayer, @Nullable EnumFacing side, int param) {
if (this instanceof IEioGuiHandler) {
return ModObjectRegistry.getModObjectNN(this).openGui(world, pos, entityPlayer, side, param);
}
return false;
}
// BlockFaceShape
@Override
protected @Nonnull IShape<T> mkShape(@Nonnull BlockFaceShape allFaces) {
return new IShape<T>() {
@Override
@Nonnull
public BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face,
@Nonnull T te) {
IBlockState paintSource = te.getPaintSource();
if (paintSource != null) {
try {
return paintSource.getBlockFaceShape(worldIn, pos, face);
} catch (Exception e) {
}
}
return IShape.super.getBlockFaceShape(worldIn, state, pos, face, te);
}
@Override
@Nonnull
public BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face) {
return allFaces;
}
};
}
@Override
protected @Nonnull IShape<T> mkShape(@Nonnull BlockFaceShape upDown, @Nonnull BlockFaceShape allSides) {
return new IShape<T>() {
@Override
@Nonnull
public BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face,
@Nonnull T te) {
IBlockState paintSource = te.getPaintSource();
if (paintSource != null) {
try {
return paintSource.getBlockFaceShape(worldIn, pos, face);
} catch (Exception e) {
}
}
return IShape.super.getBlockFaceShape(worldIn, state, pos, face, te);
}
@Override
@Nonnull
public BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face) {
return face == EnumFacing.UP || face == EnumFacing.DOWN ? upDown : allSides;
}
};
}
@Override
protected @Nonnull IShape<T> mkShape(@Nonnull BlockFaceShape down, @Nonnull BlockFaceShape up, @Nonnull BlockFaceShape allSides) {
return new IShape<T>() {
@Override
@Nonnull
public BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face,
@Nonnull T te) {
IBlockState paintSource = te.getPaintSource();
if (paintSource != null) {
try {
return paintSource.getBlockFaceShape(worldIn, pos, face);
} catch (Exception e) {
}
}
return IShape.super.getBlockFaceShape(worldIn, state, pos, face, te);
}
@Override
@Nonnull
public BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face) {
return face == EnumFacing.UP ? up : face == EnumFacing.DOWN ? down : allSides;
}
};
}
@Override
protected @Nonnull IShape<T> mkShape(@Nonnull BlockFaceShape... faces) {
return new IShape<T>() {
@Override
@Nonnull
public BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face,
@Nonnull T te) {
IBlockState paintSource = te.getPaintSource();
if (paintSource != null) {
try {
return paintSource.getBlockFaceShape(worldIn, pos, face);
} catch (Exception e) {
}
}
return IShape.super.getBlockFaceShape(worldIn, state, pos, face, te);
}
@SuppressWarnings("null")
@Override
@Nonnull
public BlockFaceShape getBlockFaceShape(@Nonnull IBlockAccess worldIn, @Nonnull IBlockState state, @Nonnull BlockPos pos, @Nonnull EnumFacing face) {
return faces[face.ordinal()];
}
};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.entries;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import org.apache.geode.cache.EntryEvent;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.cache.DiskId;
import org.apache.geode.internal.cache.DiskStoreImpl;
import org.apache.geode.internal.cache.InternalRegion;
import org.apache.geode.internal.cache.RegionEntry;
import org.apache.geode.internal.cache.RegionEntryContext;
import org.apache.geode.internal.cache.eviction.EvictionController;
import org.apache.geode.internal.cache.persistence.DiskRecoveryStore;
import org.apache.geode.internal.cache.versions.VersionSource;
import org.apache.geode.internal.cache.versions.VersionStamp;
import org.apache.geode.internal.cache.versions.VersionTag;
import org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
/*
* macros whose definition changes this class:
*
* disk: DISK lru: LRU stats: STATS versioned: VERSIONED offheap: OFFHEAP
*
* One of the following key macros must be defined:
*
* key object: KEY_OBJECT key int: KEY_INT key long: KEY_LONG key uuid: KEY_UUID key string1:
* KEY_STRING1 key string2: KEY_STRING2
*/
/**
* Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run
* ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory).
*/
public class VersionedThinDiskRegionEntryHeapStringKey1 extends VersionedThinDiskRegionEntryHeap {
// --------------------------------------- common fields ----------------------------------------
private static final AtomicLongFieldUpdater<VersionedThinDiskRegionEntryHeapStringKey1> LAST_MODIFIED_UPDATER =
AtomicLongFieldUpdater.newUpdater(VersionedThinDiskRegionEntryHeapStringKey1.class,
"lastModified");
protected int hash;
private HashEntry<Object, Object> nextEntry;
@SuppressWarnings("unused")
private volatile long lastModified;
private volatile Object value;
// ---------------------------------------- disk fields -----------------------------------------
/**
* @since GemFire 5.1
*/
protected DiskId id;
// ------------------------------------- versioned fields ---------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private VersionSource memberId;
private short entryVersionLowBytes;
private short regionVersionHighBytes;
private int regionVersionLowBytes;
private byte entryVersionHighByte;
private byte distributedSystemId;
// --------------------------------------- key fields -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private final long bits1;
public VersionedThinDiskRegionEntryHeapStringKey1(final RegionEntryContext context,
final String key, final Object value, final boolean byteEncode) {
super(context, (value instanceof RecoveredEntry ? null : value));
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
initialize(context, value);
// caller has already confirmed that key.length <= MAX_INLINE_STRING_KEY
long tempBits1 = 0L;
if (byteEncode) {
for (int i = key.length() - 1; i >= 0; i--) {
// Note: we know each byte is <= 0x7f so the "& 0xff" is not needed. But I added it in to
// keep findbugs happy.
tempBits1 |= (byte) key.charAt(i) & 0xff;
tempBits1 <<= 8;
}
tempBits1 |= 1 << 6;
} else {
for (int i = key.length() - 1; i >= 0; i--) {
tempBits1 |= key.charAt(i);
tempBits1 <<= 16;
}
}
tempBits1 |= key.length();
this.bits1 = tempBits1;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
protected Object getValueField() {
return this.value;
}
@Override
protected void setValueField(final Object value) {
this.value = value;
}
@Override
protected long getLastModifiedField() {
return LAST_MODIFIED_UPDATER.get(this);
}
@Override
protected boolean compareAndSetLastModifiedField(final long expectedValue, final long newValue) {
return LAST_MODIFIED_UPDATER.compareAndSet(this, expectedValue, newValue);
}
@Override
public int getEntryHash() {
return this.hash;
}
@Override
protected void setEntryHash(final int hash) {
this.hash = hash;
}
@Override
public HashEntry<Object, Object> getNextEntry() {
return this.nextEntry;
}
@Override
public void setNextEntry(final HashEntry<Object, Object> nextEntry) {
this.nextEntry = nextEntry;
}
// ----------------------------------------- disk code ------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
protected void initialize(final RegionEntryContext context, final Object value) {
diskInitialize(context, value);
}
@Override
public int updateAsyncEntrySize(final EvictionController evictionController) {
throw new IllegalStateException("should never be called");
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public DiskId getDiskId() {
return this.id;
}
@Override
public void setDiskId(final RegionEntry oldEntry) {
this.id = ((DiskEntry) oldEntry).getDiskId();
}
private void diskInitialize(final RegionEntryContext context, final Object value) {
DiskRecoveryStore diskRecoveryStore = (DiskRecoveryStore) context;
DiskStoreImpl diskStore = diskRecoveryStore.getDiskStore();
long maxOplogSize = diskStore.getMaxOplogSize();
// get appropriate instance of DiskId implementation based on maxOplogSize
this.id = DiskId.createDiskId(maxOplogSize, true, diskStore.needsLinkedList());
Helper.initialize(this, diskRecoveryStore, value);
}
// -------------------------------------- versioned code ----------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public int getEntryVersion() {
return ((entryVersionHighByte << 16) & 0xFF0000) | (entryVersionLowBytes & 0xFFFF);
}
@Override
public long getRegionVersion() {
return (((long) regionVersionHighBytes) << 32) | (regionVersionLowBytes & 0x00000000FFFFFFFFL);
}
@Override
public long getVersionTimeStamp() {
return getLastModified();
}
@Override
public void setVersionTimeStamp(final long timeStamp) {
setLastModified(timeStamp);
}
@Override
public VersionSource getMemberID() {
return this.memberId;
}
@Override
public int getDistributedSystemId() {
return this.distributedSystemId;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public void setVersions(final VersionTag versionTag) {
this.memberId = versionTag.getMemberID();
int eVersion = versionTag.getEntryVersion();
this.entryVersionLowBytes = (short) (eVersion & 0xffff);
this.entryVersionHighByte = (byte) ((eVersion & 0xff0000) >> 16);
this.regionVersionHighBytes = versionTag.getRegionVersionHighBytes();
this.regionVersionLowBytes = versionTag.getRegionVersionLowBytes();
if (!versionTag.isGatewayTag()
&& this.distributedSystemId == versionTag.getDistributedSystemId()) {
if (getVersionTimeStamp() <= versionTag.getVersionTimeStamp()) {
setVersionTimeStamp(versionTag.getVersionTimeStamp());
} else {
versionTag.setVersionTimeStamp(getVersionTimeStamp());
}
} else {
setVersionTimeStamp(versionTag.getVersionTimeStamp());
}
this.distributedSystemId = (byte) (versionTag.getDistributedSystemId() & 0xff);
}
@Override
public void setMemberID(final VersionSource memberId) {
this.memberId = memberId;
}
@Override
public VersionStamp getVersionStamp() {
return this;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public VersionTag asVersionTag() {
VersionTag tag = VersionTag.create(memberId);
tag.setEntryVersion(getEntryVersion());
tag.setRegionVersion(this.regionVersionHighBytes, this.regionVersionLowBytes);
tag.setVersionTimeStamp(getVersionTimeStamp());
tag.setDistributedSystemId(this.distributedSystemId);
return tag;
}
@Override
public void processVersionTag(final InternalRegion region, final VersionTag versionTag,
final boolean isTombstoneFromGII, final boolean hasDelta, final VersionSource versionSource,
final InternalDistributedMember sender, final boolean checkForConflicts) {
basicProcessVersionTag(region, versionTag, isTombstoneFromGII, hasDelta, versionSource, sender,
checkForConflicts);
}
@Override
public void processVersionTag(final EntryEvent cacheEvent) {
// this keeps IDE happy. without it the sender chain becomes confused while browsing this code
super.processVersionTag(cacheEvent);
}
/** get rvv internal high byte. Used by region entries for transferring to storage */
@Override
public short getRegionVersionHighBytes() {
return this.regionVersionHighBytes;
}
/** get rvv internal low bytes. Used by region entries for transferring to storage */
@Override
public int getRegionVersionLowBytes() {
return this.regionVersionLowBytes;
}
// ----------------------------------------- key code -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private int getKeyLength() {
return (int) (this.bits1 & 0x003fL);
}
private int getEncoding() {
// 0 means encoded as char
// 1 means encoded as bytes that are all <= 0x7f;
return (int) (this.bits1 >> 6) & 0x03;
}
@Override
public Object getKey() {
int keyLength = getKeyLength();
char[] chars = new char[keyLength];
long tempBits1 = this.bits1;
if (getEncoding() == 1) {
for (int i = 0; i < keyLength; i++) {
tempBits1 >>= 8;
chars[i] = (char) (tempBits1 & 0x00ff);
}
} else {
for (int i = 0; i < keyLength; i++) {
tempBits1 >>= 16;
chars[i] = (char) (tempBits1 & 0x00FFff);
}
}
return new String(chars);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public boolean isKeyEqual(final Object key) {
if (key instanceof String) {
String stringKey = (String) key;
int keyLength = getKeyLength();
if (stringKey.length() == keyLength) {
long tempBits1 = this.bits1;
if (getEncoding() == 1) {
for (int i = 0; i < keyLength; i++) {
tempBits1 >>= 8;
char character = (char) (tempBits1 & 0x00ff);
if (stringKey.charAt(i) != character) {
return false;
}
}
} else {
for (int i = 0; i < keyLength; i++) {
tempBits1 >>= 16;
char character = (char) (tempBits1 & 0x00FFff);
if (stringKey.charAt(i) != character) {
return false;
}
}
}
return true;
}
}
return false;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
}
| |
package com.eas.widgets;
import com.eas.core.HasPublished;
import com.eas.core.Utils;
import com.eas.core.XElement;
import com.eas.menu.HasComponentPopupMenu;
import com.eas.menu.PlatypusPopupMenu;
import com.eas.ui.HasEmptyText;
import com.eas.ui.HasEventsExecutor;
import com.eas.ui.HasJsFacade;
import com.eas.ui.events.ActionEvent;
import com.eas.ui.events.ActionHandler;
import com.eas.ui.events.EventsExecutor;
import com.eas.ui.events.HasActionHandlers;
import com.eas.ui.events.HasHideHandlers;
import com.eas.ui.events.HasShowHandlers;
import com.eas.ui.events.HideEvent;
import com.eas.ui.events.HideHandler;
import com.eas.ui.events.ShowEvent;
import com.eas.ui.events.ShowHandler;
import com.eas.widgets.boxes.FormattedObjectBox;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.event.dom.client.ContextMenuEvent;
import com.google.gwt.event.dom.client.ContextMenuHandler;
import com.google.gwt.event.logical.shared.HasResizeHandlers;
import com.google.gwt.event.logical.shared.ResizeEvent;
import com.google.gwt.event.logical.shared.ResizeHandler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.ui.RequiresResize;
public class PlatypusFormattedTextField extends FormattedObjectBox implements HasJsFacade, HasEmptyText, HasComponentPopupMenu, HasActionHandlers, HasEventsExecutor, HasShowHandlers, HasHideHandlers,
HasResizeHandlers, RequiresResize {
protected EventsExecutor eventsExecutor;
protected PlatypusPopupMenu menu;
protected String emptyText;
protected String name;
protected JavaScriptObject published;
//
protected boolean settingValue;
public PlatypusFormattedTextField() {
super();
getElement().<XElement> cast().addResizingTransitionEnd(this);
}
@Override
public HandlerRegistration addResizeHandler(ResizeHandler handler) {
return addHandler(handler, ResizeEvent.getType());
}
@Override
public void onResize() {
if (isAttached()) {
ResizeEvent.fire(this, getElement().getOffsetWidth(), getElement().getOffsetHeight());
}
}
@Override
public HandlerRegistration addHideHandler(HideHandler handler) {
return addHandler(handler, HideEvent.getType());
}
@Override
public HandlerRegistration addShowHandler(ShowHandler handler) {
return addHandler(handler, ShowEvent.getType());
}
@Override
public void setVisible(boolean visible) {
boolean oldValue = isVisible();
super.setVisible(visible);
if (oldValue != visible) {
if (visible) {
ShowEvent.fire(this, this);
} else {
HideEvent.fire(this, this);
}
}
}
protected int actionHandlers;
protected HandlerRegistration valueChangeReg;
@Override
public HandlerRegistration addActionHandler(ActionHandler handler) {
final HandlerRegistration superReg = super.addHandler(handler, ActionEvent.getType());
if (actionHandlers == 0) {
valueChangeReg = addValueChangeHandler(new ValueChangeHandler<Object>() {
@Override
public void onValueChange(ValueChangeEvent<Object> event) {
if (!settingValue) {
ActionEvent.fire(PlatypusFormattedTextField.this, PlatypusFormattedTextField.this);
}
}
});
}
actionHandlers++;
return new HandlerRegistration() {
@Override
public void removeHandler() {
superReg.removeHandler();
actionHandlers--;
if (actionHandlers == 0) {
assert valueChangeReg != null : "Erroneous use of addActionHandler/removeHandler detected in PlatypusFormattedTextField";
valueChangeReg.removeHandler();
valueChangeReg = null;
}
}
};
}
@Override
public EventsExecutor getEventsExecutor() {
return eventsExecutor;
}
@Override
public void setEventsExecutor(EventsExecutor aExecutor) {
eventsExecutor = aExecutor;
}
@Override
public PlatypusPopupMenu getPlatypusPopupMenu() {
return menu;
}
protected HandlerRegistration menuTriggerReg;
@Override
public void setPlatypusPopupMenu(PlatypusPopupMenu aMenu) {
if (menu != aMenu) {
if (menuTriggerReg != null)
menuTriggerReg.removeHandler();
menu = aMenu;
if (menu != null) {
menuTriggerReg = super.addDomHandler(new ContextMenuHandler() {
@Override
public void onContextMenu(ContextMenuEvent event) {
event.preventDefault();
event.stopPropagation();
menu.setPopupPosition(event.getNativeEvent().getClientX(), event.getNativeEvent().getClientY());
menu.show();
}
}, ContextMenuEvent.getType());
}
}
}
@Override
public String getJsName() {
return name;
}
@Override
public void setJsName(String aValue) {
name = aValue;
setName(name);
}
public Object getJsValue() {
return Utils.toJs(getValue());
}
public void setJsValue(Object aValue) throws Exception {
settingValue = true;
try {
setValue(Utils.toJava(aValue), true);
} finally {
settingValue = false;
}
}
@Override
public String getEmptyText() {
return emptyText;
}
@Override
public void setEmptyText(String aValue) {
emptyText = aValue;
WidgetsUtils.applyEmptyText(getElement(), emptyText);
}
public JavaScriptObject getPublished() {
return published;
}
@Override
public void setPublished(JavaScriptObject aValue) {
if (published != aValue) {
published = aValue;
setEventThis(published);
if (published != null) {
publish(this, aValue);
}
}
}
private native static void publish(HasPublished aWidget, JavaScriptObject published)/*-{
var B = @com.eas.core.Predefine::boxing;
Object.defineProperty(published, "text", {
get : function() {
return aWidget.@com.eas.widgets.PlatypusFormattedTextField::getText()();
},
set : function(aValue) {
aWidget.@com.eas.widgets.PlatypusFormattedTextField::setText(Ljava/lang/String;)(aValue != null ? '' + aValue : null);
}
});
Object.defineProperty(published, "emptyText", {
get : function() {
return aWidget.@com.eas.ui.HasEmptyText::getEmptyText()();
},
set : function(aValue) {
aWidget.@com.eas.ui.HasEmptyText::setEmptyText(Ljava/lang/String;)(aValue!=null?''+aValue:null);
}
});
Object.defineProperty(published, "value", {
get : function() {
return B.boxAsJs(aWidget.@com.eas.widgets.PlatypusFormattedTextField::getJsValue()());
},
set : function(aValue) {
aWidget.@com.eas.widgets.PlatypusFormattedTextField::setJsValue(Ljava/lang/Object;)(B.boxAsJava(aValue));
}
});
Object.defineProperty(published, "valueType", {
get : function() {
var typeNum = aWidget.@com.eas.widgets.PlatypusFormattedTextField::getValueType()()
var type;
if (typeNum === @com.eas.widgets.boxes.ObjectFormat::NUMBER ){
type = $wnd.Number;
} else if (typeNum === @com.eas.widgets.boxes.ObjectFormat::DATE ){
type = $wnd.Date;
} else if (typeNum === @com.eas.widgets.boxes.ObjectFormat::REGEXP ){
type = $wnd.RegExp;
} else {
type = $wnd.String;
}
return type;
},
set : function(aValue) {
var typeNum;
if (aValue === $wnd.Number ){
typeNum = @com.eas.widgets.boxes.ObjectFormat::NUMBER;
} else if (aValue === $wnd.Date ){
typeNum = @com.eas.widgets.boxes.ObjectFormat::DATE;
} else if (aValue === $wnd.RegExp ){
typeNum = @com.eas.widgets.boxes.ObjectFormat::REGEXP;
} else {
typeNum = @com.eas.widgets.boxes.ObjectFormat::TEXT;
}
aWidget.@com.eas.widgets.PlatypusFormattedTextField::setValueType(I)(typeNum);
}
});
Object.defineProperty(published, "format", {
get : function() {
return aWidget.@com.eas.widgets.PlatypusFormattedTextField::getFormat()();
},
set : function(aValue) {
aWidget.@com.eas.widgets.PlatypusFormattedTextField::setFormat(Ljava/lang/String;)(aValue != null ? '' + aValue : null);
}
});
Object.defineProperty(published, "onFormat", {
get : function() {
return aWidget.@com.eas.widgets.PlatypusFormattedTextField::getOnFormat()();
},
set : function(aValue) {
aWidget.@com.eas.widgets.PlatypusFormattedTextField::setOnFormat(Lcom/google/gwt/core/client/JavaScriptObject;)(aValue);
}
});
Object.defineProperty(published, "onParse", {
get : function() {
return aWidget.@com.eas.widgets.PlatypusFormattedTextField::getOnParse()();
},
set : function(aValue) {
aWidget.@com.eas.widgets.PlatypusFormattedTextField::setOnParse(Lcom/google/gwt/core/client/JavaScriptObject;)(aValue);
}
});
}-*/;
}
| |
/*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.configurationprocessor;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.Element;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.PrimitiveType;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.TypeKindVisitor8;
import javax.tools.Diagnostic.Kind;
/**
* A {@link PropertyDescriptor} for a constructor parameter.
*
* @author Stephane Nicoll
*/
class ConstructorParameterPropertyDescriptor extends PropertyDescriptor<VariableElement> {
ConstructorParameterPropertyDescriptor(TypeElement ownerElement, ExecutableElement factoryMethod,
VariableElement source, String name, TypeMirror type, VariableElement field, ExecutableElement getter,
ExecutableElement setter) {
super(ownerElement, factoryMethod, source, name, type, field, getter, setter);
}
@Override
protected boolean isProperty(MetadataGenerationEnvironment env) {
// If it's a constructor parameter, it doesn't matter as we must be able to bind
// it to build the object.
return !isNested(env);
}
@Override
protected Object resolveDefaultValue(MetadataGenerationEnvironment environment) {
Object defaultValue = getDefaultValueFromAnnotation(environment, getSource());
if (defaultValue != null) {
return defaultValue;
}
return getSource().asType().accept(DefaultPrimitiveTypeVisitor.INSTANCE, null);
}
private Object getDefaultValueFromAnnotation(MetadataGenerationEnvironment environment, Element element) {
AnnotationMirror annotation = environment.getDefaultValueAnnotation(element);
List<String> defaultValue = getDefaultValue(environment, annotation);
if (defaultValue != null) {
try {
TypeMirror specificType = determineSpecificType(environment);
if (defaultValue.size() == 1) {
return coerceValue(specificType, defaultValue.get(0));
}
return defaultValue.stream().map((value) -> coerceValue(specificType, value))
.collect(Collectors.toList());
}
catch (IllegalArgumentException ex) {
environment.getMessager().printMessage(Kind.ERROR, ex.getMessage(), element, annotation);
}
}
return null;
}
@SuppressWarnings("unchecked")
private List<String> getDefaultValue(MetadataGenerationEnvironment environment, AnnotationMirror annotation) {
if (annotation == null) {
return null;
}
Map<String, Object> values = environment.getAnnotationElementValues(annotation);
return (List<String>) values.get("value");
}
private TypeMirror determineSpecificType(MetadataGenerationEnvironment environment) {
TypeMirror candidate = getSource().asType();
TypeMirror elementCandidate = environment.getTypeUtils().extractElementType(candidate);
if (elementCandidate != null) {
candidate = elementCandidate;
}
PrimitiveType primitiveType = environment.getTypeUtils().getPrimitiveType(candidate);
return (primitiveType != null) ? primitiveType : candidate;
}
private Object coerceValue(TypeMirror type, String value) {
Object coercedValue = type.accept(DefaultValueCoercionTypeVisitor.INSTANCE, value);
return (coercedValue != null) ? coercedValue : value;
}
private static class DefaultValueCoercionTypeVisitor extends TypeKindVisitor8<Object, String> {
private static final DefaultValueCoercionTypeVisitor INSTANCE = new DefaultValueCoercionTypeVisitor();
private Integer parseInteger(String value) {
try {
return Integer.valueOf(value);
}
catch (NumberFormatException ex) {
throw new IllegalArgumentException(String.format("Invalid number representation '%s'", value));
}
}
private Double parseFloatingPoint(String value) {
try {
return Double.valueOf(value);
}
catch (NumberFormatException ex) {
throw new IllegalArgumentException(String.format("Invalid floating point representation '%s'", value));
}
}
@Override
public Object visitPrimitiveAsBoolean(PrimitiveType t, String value) {
return Boolean.parseBoolean(value);
}
@Override
public Object visitPrimitiveAsByte(PrimitiveType t, String value) {
return parseInteger(value);
}
@Override
public Object visitPrimitiveAsShort(PrimitiveType t, String value) {
return parseInteger(value);
}
@Override
public Object visitPrimitiveAsInt(PrimitiveType t, String value) {
return parseInteger(value);
}
@Override
public Object visitPrimitiveAsLong(PrimitiveType t, String value) {
return parseInteger(value);
}
@Override
public Object visitPrimitiveAsChar(PrimitiveType t, String value) {
if (value.length() > 1) {
throw new IllegalArgumentException(String.format("Invalid character representation '%s'", value));
}
return value;
}
@Override
public Object visitPrimitiveAsFloat(PrimitiveType t, String value) {
return parseFloatingPoint(value);
}
@Override
public Object visitPrimitiveAsDouble(PrimitiveType t, String value) {
return parseFloatingPoint(value);
}
}
private static class DefaultPrimitiveTypeVisitor extends TypeKindVisitor8<Object, Void> {
private static final DefaultPrimitiveTypeVisitor INSTANCE = new DefaultPrimitiveTypeVisitor();
@Override
public Object visitPrimitiveAsBoolean(PrimitiveType t, Void ignore) {
return false;
}
@Override
public Object visitPrimitiveAsByte(PrimitiveType t, Void ignore) {
return 0;
}
@Override
public Object visitPrimitiveAsShort(PrimitiveType t, Void ignore) {
return 0;
}
@Override
public Object visitPrimitiveAsInt(PrimitiveType t, Void ignore) {
return 0;
}
@Override
public Object visitPrimitiveAsLong(PrimitiveType t, Void ignore) {
return 0L;
}
@Override
public Object visitPrimitiveAsChar(PrimitiveType t, Void ignore) {
return null;
}
@Override
public Object visitPrimitiveAsFloat(PrimitiveType t, Void ignore) {
return 0;
}
@Override
public Object visitPrimitiveAsDouble(PrimitiveType t, Void ignore) {
return 0D;
}
}
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.cp.internal.datastructures.semaphore;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.cp.ISemaphore;
import com.hazelcast.cp.internal.HazelcastRaftTestSupport;
import com.hazelcast.cp.internal.RaftGroupId;
import com.hazelcast.cp.internal.RaftInvocationManager;
import com.hazelcast.cp.internal.datastructures.exception.WaitKeyCancelledException;
import com.hazelcast.cp.internal.datastructures.semaphore.operation.AcquirePermitsOp;
import com.hazelcast.cp.internal.datastructures.semaphore.operation.DrainPermitsOp;
import com.hazelcast.cp.internal.session.ProxySessionManagerService;
import com.hazelcast.cp.internal.session.SessionAwareProxy;
import com.hazelcast.spi.impl.InternalCompletableFuture;
import org.junit.Before;
import org.junit.Test;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import static com.hazelcast.internal.util.UuidUtil.newUnsecureUUID;
import static com.hazelcast.test.Accessors.getNodeEngineImpl;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeFalse;
public abstract class AbstractSemaphoreFailureTest extends HazelcastRaftTestSupport {
protected HazelcastInstance[] instances;
protected HazelcastInstance primaryInstance;
protected HazelcastInstance proxyInstance;
protected ProxySessionManagerService sessionManagerService;
protected ISemaphore semaphore;
protected String objectName = "semaphore";
@Before
public void setup() {
instances = createInstances();
primaryInstance = getPrimaryInstance();
proxyInstance = getProxyInstance();
semaphore = proxyInstance.getCPSubsystem().getSemaphore(getProxyName());
sessionManagerService = getNodeEngineImpl(proxyInstance).getService(ProxySessionManagerService.SERVICE_NAME);
}
protected abstract HazelcastInstance[] createInstances();
protected abstract String getProxyName();
protected abstract HazelcastInstance getPrimaryInstance();
protected HazelcastInstance getProxyInstance() {
return getPrimaryInstance();
}
abstract boolean isJDKCompatible();
private RaftGroupId getGroupId(ISemaphore semaphore) {
return ((SessionAwareProxy) semaphore).getGroupId();
}
abstract long getSessionId(HazelcastInstance semaphoreInstance, RaftGroupId groupId);
long getThreadId(RaftGroupId groupId) {
return sessionManagerService.getOrCreateUniqueThreadId(groupId);
}
@Test(timeout = 300_000)
public void testRetriedAcquireDoesNotCancelPendingAcquireRequestWhenAlreadyAcquired() throws InterruptedException {
semaphore.init(1);
semaphore.acquire();
RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
invocationManager.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid, 1, MINUTES.toMillis(5)));
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertNotNull(registry);
assertEquals(1, registry.getWaitTimeouts().size());
});
invocationManager.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid, 1, -1));
assertTrueAllTheTime(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertEquals(1, registry.getWaitTimeouts().size());
}, 10);
}
@Test(timeout = 300_000)
public void testNewAcquireCancelsPendingAcquireRequestWhenAlreadyAcquired() throws InterruptedException {
semaphore.init(1);
semaphore.acquire();
RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid1 = newUnsecureUUID();
UUID invUid2 = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
InternalCompletableFuture<Object> f = invocationManager
.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid1, 1, MINUTES.toMillis(5)));
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertNotNull(registry);
assertEquals(1, registry.getWaitTimeouts().size());
});
invocationManager.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid2, 1, -1));
try {
f.joinInternal();
fail();
} catch (WaitKeyCancelledException ignored) {
}
}
@Test(timeout = 300_000)
public void testNewAcquireCancelsPendingAcquireRequestWhenNotAcquired() throws InterruptedException {
semaphore.init(1);
semaphore.acquire();
semaphore.release();
// if the session-aware semaphore is used, we guarantee that there is a session id now...
RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid1 = newUnsecureUUID();
UUID invUid2 = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
InternalCompletableFuture<Object> f = invocationManager
.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid1, 2, MINUTES.toMillis(5)));
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertNotNull(registry);
assertEquals(1, registry.getWaitTimeouts().size());
});
invocationManager.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid2, 1, -1));
try {
f.joinInternal();
fail();
} catch (WaitKeyCancelledException ignored) {
}
}
@Test(timeout = 300_000)
public void testTryAcquireWithTimeoutCancelsPendingAcquireRequestWhenAlreadyAcquired() throws InterruptedException {
semaphore.init(1);
semaphore.acquire();
RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid1 = newUnsecureUUID();
UUID invUid2 = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
InternalCompletableFuture<Object> f = invocationManager
.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid1, 1, MINUTES.toMillis(5)));
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertNotNull(registry);
assertEquals(1, registry.getWaitTimeouts().size());
});
invocationManager.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid2, 1, 100));
try {
f.joinInternal();
fail();
} catch (WaitKeyCancelledException ignored) {
}
}
@Test(timeout = 300_000)
public void testNewTryAcquireWithTimeoutCancelsPendingAcquireRequestWhenNotAcquired() throws InterruptedException {
semaphore.init(1);
semaphore.acquire();
semaphore.release();
// if the session-aware semaphore is used, we guarantee that there is a session id now...
RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid1 = newUnsecureUUID();
UUID invUid2 = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
InternalCompletableFuture<Object> f = invocationManager
.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid1, 2, MINUTES.toMillis(5)));
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertNotNull(registry);
assertEquals(1, registry.getWaitTimeouts().size());
});
invocationManager.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid2, 1, 100));
try {
f.joinInternal();
fail();
} catch (WaitKeyCancelledException ignored) {
}
}
@Test(timeout = 300_000)
public void testNewTryAcquireWithoutTimeoutCancelsPendingAcquireRequestWhenAlreadyAcquired() throws InterruptedException {
semaphore.init(1);
semaphore.acquire();
RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid1 = newUnsecureUUID();
UUID invUid2 = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
InternalCompletableFuture<Object> f = invocationManager
.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid1, 1, MINUTES.toMillis(5)));
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertNotNull(registry);
assertEquals(1, registry.getWaitTimeouts().size());
});
invocationManager.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid2, 1, 0));
try {
f.joinInternal();
fail();
} catch (WaitKeyCancelledException ignored) {
}
}
@Test(timeout = 300_000)
public void testNewTryAcquireWithoutTimeoutCancelsPendingAcquireRequestsWhenNotAcquired() throws InterruptedException {
semaphore.init(1);
semaphore.acquire();
semaphore.release();
// if the session-aware semaphore is used, we guarantee that there is a session id now...
RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid1 = newUnsecureUUID();
UUID invUid2 = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
InternalCompletableFuture<Object> f = invocationManager
.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid1, 2, MINUTES.toMillis(5)));
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertNotNull(registry);
assertEquals(1, registry.getWaitTimeouts().size());
});
invocationManager.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid2, 1, 0));
try {
f.joinInternal();
fail();
} catch (WaitKeyCancelledException ignored) {
}
}
@Test(timeout = 300_000)
public void testReleaseCancelsPendingAcquireRequestWhenPermitsAcquired() throws InterruptedException {
semaphore.init(1);
semaphore.acquire();
RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
InternalCompletableFuture<Object> f = invocationManager
.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid, 1, MINUTES.toMillis(5)));
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertNotNull(registry);
assertEquals(1, registry.getWaitTimeouts().size());
});
try {
semaphore.release();
} catch (IllegalArgumentException ignored) {
}
try {
f.joinInternal();
fail();
} catch (WaitKeyCancelledException ignored) {
}
}
@Test(timeout = 300_000)
public void testReleaseCancelsPendingAcquireRequestWhenNoPermitsAcquired() throws InterruptedException {
semaphore.init(1);
semaphore.acquire();
semaphore.release();
// if the session-aware semaphore is used, we guarantee that there is a session id now...
RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
InternalCompletableFuture<Object> f = invocationManager
.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid, 2, MINUTES.toMillis(5)));
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertNotNull(registry);
assertEquals(1, registry.getWaitTimeouts().size());
});
try {
semaphore.release();
} catch (IllegalStateException ignored) {
}
try {
f.joinInternal();
fail();
} catch (WaitKeyCancelledException ignored) {
}
}
@Test(timeout = 300_000)
public void testDrainCancelsPendingAcquireRequestWhenNotAcquired() throws InterruptedException {
semaphore.init(1);
semaphore.acquire();
semaphore.release();
// if the session-aware semaphore is used, we guarantee that there is a session id now...
RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
InternalCompletableFuture<Object> f = invocationManager
.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid, 2, MINUTES.toMillis(5)));
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertNotNull(registry);
assertEquals(1, registry.getWaitTimeouts().size());
});
semaphore.drainPermits();
try {
f.joinInternal();
fail();
} catch (WaitKeyCancelledException ignored) {
}
}
@Test(timeout = 300_000)
public void testRetriedAcquireReceivesPermitsOnlyOnce() throws InterruptedException, ExecutionException {
semaphore.init(1);
semaphore.acquire();
semaphore.release();
// if the session-aware semaphore is used, we guarantee that there is a session id now...
RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid1 = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
InternalCompletableFuture<Object> f1 = invocationManager
.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid1, 2, MINUTES.toMillis(5)));
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertNotNull(registry);
assertEquals(1, registry.getWaitTimeouts().size());
});
spawn(() -> {
try {
semaphore.tryAcquire(20, 5, TimeUnit.MINUTES);
} catch (InterruptedException e) {
e.printStackTrace();
}
});
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
assertEquals(2, registry.getWaitTimeouts().size());
});
InternalCompletableFuture<Object> f2 = invocationManager
.invoke(groupId, new AcquirePermitsOp(objectName, sessionId, threadId, invUid1, 2, MINUTES.toMillis(5)));
assertTrueEventually(() -> {
SemaphoreService service = getNodeEngineImpl(primaryInstance).getService(SemaphoreService.SERVICE_NAME);
SemaphoreRegistry registry = service.getRegistryOrNull(groupId);
Semaphore semaphore = registry.getResourceOrNull(objectName);
assertEquals(2, semaphore.getInternalWaitKeysMap().size());
});
spawn(() -> semaphore.increasePermits(3)).get();
f1.joinInternal();
f2.joinInternal();
assertEquals(2, semaphore.availablePermits());
}
@Test(timeout = 300_000)
public void testExpiredAndRetriedTryAcquireRequestReceivesFailureResponse() throws InterruptedException, ExecutionException {
assumeFalse(isJDKCompatible());
semaphore.init(1);
semaphore.acquire();
final RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
InternalCompletableFuture<Boolean> f1 = invocationManager.invoke(groupId,
new AcquirePermitsOp(objectName, sessionId, threadId, invUid, 1, SECONDS.toMillis(5)));
assertFalse(f1.joinInternal());
spawn(() -> semaphore.release()).get();
InternalCompletableFuture<Boolean> f2 = invocationManager.invoke(groupId,
new AcquirePermitsOp(objectName, sessionId, threadId, invUid, 1, SECONDS.toMillis(5)));
assertFalse(f2.joinInternal());
}
@Test(timeout = 300_000)
public void testRetriedDrainRequestIsNotProcessedAgain() throws InterruptedException, ExecutionException {
assumeFalse(isJDKCompatible());
semaphore.init(1);
semaphore.acquire();
final RaftGroupId groupId = getGroupId(semaphore);
long sessionId = getSessionId(proxyInstance, groupId);
long threadId = getThreadId(groupId);
UUID invUid = newUnsecureUUID();
RaftInvocationManager invocationManager = getRaftInvocationManager(proxyInstance);
InternalCompletableFuture<Integer> f1 = invocationManager
.invoke(groupId, new DrainPermitsOp(objectName, sessionId, threadId, invUid));
assertEquals(0, (int) f1.joinInternal());
spawn(() -> semaphore.release()).get();
InternalCompletableFuture<Integer> f2 = invocationManager
.invoke(groupId, new DrainPermitsOp(objectName, sessionId, threadId, invUid));
assertEquals(0, (int) f2.joinInternal());
}
@Test
public void testAcquireOnMultipleProxies() {
HazelcastInstance otherInstance = instances[0] == proxyInstance ? instances[1] : instances[0];
ISemaphore semaphore2 = otherInstance.getCPSubsystem().getSemaphore(semaphore.getName());
semaphore.init(1);
semaphore.tryAcquire(1);
assertFalse(semaphore2.tryAcquire());
}
}
| |
/*
* Copyright (C) 2012-2021, TomTom (http://tomtom.com).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tomtom.speedtools.rest.security;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import javax.ws.rs.core.SecurityContext;
import java.io.Serializable;
import java.security.Principal;
/**
* The session manager is the central place to start and terminate sessions, and create a {@link SecurityContext} for
* the ongoing session. It encapsulates how information is stored in a session, and how sessions are marked as
* authenticated.
*
* <em>None</em> of the methods provided by this class are thread-safe in the sense that no other threads can interfere
* with the session during method calls of another thread. This is due to the asynchronous nature of HTTP, where, even
* if the methods here were thread-safe, a session that was created could e.g. be torn down immediately again by a
* different thread while the HTTP request of the first thread is still being processed by the application.
* <em>However</em>, the session manager guarantees that any data set in the session is set atomically, so that the
* session can never contain mixed data from invocations by multiple concurrent calls. For more information, see the
* JavaDoc of each method in question.
*/
public class SessionManager {
private static final Logger LOG = LoggerFactory.getLogger(SessionManager.class);
/**
* Authentication schemes used by this class in {@code startWebSession} and {@code startAppSession}.
* Any other strings may be used as well, for example, by derived classes of {@code SessionManager}.
*/
public static final String AUTHENTICATION_SCHEME_USERNAME = "USERNAME";
public static final String AUTHENTICATION_SCHEME_APPTOKEN = "APPTOKEN";
/**
* The name of the key that is used to store our session data in the {@link HttpSession}. The name must be
* name-spaced to prevent collisions with e.g. container attributes or attributes set by other frameworks.
*/
@Nonnull
private static final String SESSION_DATA_KEY = "com.tomtom.speedtools.rest.security.SESSIONDATA";
/**
* The current servlet request.
*/
@Nonnull
private final HttpServletRequest httpServletRequest;
/**
* Constructs a new {@code SessionManager} that is initialised with the current request context. <em>Note</em> that
* the constructor must be called on the same thread that called the current JAX-RS resource, otherwise
* request-contextual data cannot be retrieved. This constructor will throw an error in case it is not called on the
* correct thread.
*
* @throws RuntimeException In case this constructor is not accessed on the thread that is used to call the JAX-RS
* resource method.
*/
public SessionManager() {
this(ContextHelper.getContextData(HttpServletRequest.class));
}
/**
* Package-private constructor for use by unit tests.
*
* @param httpServletRequest The current request.
*/
SessionManager(@Nonnull final HttpServletRequest httpServletRequest) {
assert httpServletRequest != null;
this.httpServletRequest = httpServletRequest;
}
/**
* Starts a new web session for the given {@code principal} in the current {@code httpServletRequest}. A potentially
* existing session is terminated first to prevent session fixation attacks.
*
* This method is <em>not</em> thread-safe in the sense that a session actually exists when this method returns,
* because a racing request may have torn down the session that is being created here while this method is
* executing, or the data set in this call is actually the data stored in the session at the point this method
* returns. This cannot be prevented even with locking, because HTTP is inherently asynchronous, and even if this
* method would lock, as soon as the method returns a racing request could then tear down the session or modify the
* session data. <em>However</em>, this method guarantees that all data that is being set as part of the
* authenticated session is set atomically, i.e. multiple racing calls will not cause the session to contain mixed
* data of those different calls, one call will always win. Also, data set during this call is stored in the session
* before this call returns, but as described above, may have already been overwritten by a racing thread at the
* point this call has returned.
*
* @param principal The {@link Principal} for which to create a session.
* @return The session ID of the created session.
*/
@Nonnull
public String startWebSession(@Nonnull final Principal principal) {
assert principal != null;
return startSession(principal, AUTHENTICATION_SCHEME_USERNAME);
}
/**
* Starts a new app session for the given {@code Principal} in the current {@code httpServletRequest}. A potentially
* existing session is terminated first to prevent session fixation attacks.
*
* This method is <em>not</em> thread-safe in the sense that a session actually exists when this method returns,
* because a racing request may have torn down the session that is being created here while this method is
* executing, or the data set in this call is actually the data stored in the session at the point this method
* returns. This cannot be prevented even with locking, because HTTP is inherently asynchronous, and even if this
* method would lock, as soon as the method returns a racing request could then tear down the session or modify the
* session data. <em>However</em>, this method guarantees that all data that is being set as part of the
* authenticated session is set atomically, i.e. multiple racing calls will not cause the session to contain mixed
* data of those different calls, one call will always win. Also, data set during this call is stored in the session
* before this call returns, but as described above, may have already been overwritten by a racing thread at the
* point this call has returned.
*
* @param principal The {@link Principal} for which to create a session.
* @return The session ID of the created session.
*/
@Nonnull
public String startAppSession(@Nonnull final Principal principal) {
assert principal != null;
return startSession(principal, AUTHENTICATION_SCHEME_APPTOKEN);
}
/**
* Terminates the current session. If there is no current session, the method does nothing.
*
* This method is <em>not</em> thread-safe, as a racing request may be setting up a new session for the current
* request while this method is terminating the current session.
*/
public void terminateSession() {
// Check whether there actually is an ongoing session.
@Nullable final HttpSession httpSession = getCurrentSession();
if (httpSession == null) {
return;
}
clearSessionData(httpSession);
try {
httpSession.invalidate();
} catch (final IllegalStateException e) {
// To be expected, since multiple racing HTTP requests may terminate the same session.
LOG.info("terminateSession: session has already been terminated", e);
}
}
/**
* Returns the session ID of the current session, even if there is <em>no user authenticated</em> in the current
* session. Returns {@code null} if there is no current session. <em>Note</em> however that the session may have
* already been terminated by the time this method returns.
*
* @return The session ID of the current session, or {@code null} if there is no ongoing session.
*/
@Nullable
public String getCurrentSessionId() {
// Get the current session. Return {@code null} if there is no ongoing session.
@Nullable final HttpSession httpSession = getCurrentSession();
if (httpSession == null) {
return null;
}
return getSessionId(httpSession);
}
/**
* Creates the {@link SecurityContext} for the current session. It does so by inspecting the {@link HttpSession} for
* the presence of a username. If and only if a user is authenticated in the session, a {@link SecurityContext}
* representing that user is created and returned. If no user is authenticated in the session, {@code null} will be
* returned.
*
* <strong>Note:</strong> must not be used to access a {@link SecurityContext}, only to <em>create</em> the initial
* {@code SecurityContext}! The way to access a {@code SecurityContext} is by either getting it injected via the
* {@link javax.ws.rs.core.Context @Context} annotation, or by retrieving it from the RestEasy context stack.
*
* This method is <em>not</em> thread-safe in the sense that an authenticated session will actually exist even if
* this method returns a {@link SecurityContext}, because a racing request may terminate the session while this
* method here is executing. <em>However</em>, this method guarantees that the data in the returned {@link
* SecurityContext} does not contain data from racing threads, but will always contain the atomic set of data that
* was set on the authenticated session.
*
* @param httpServletRequest The current request.
* @return If a user is authenticated in this session, then a {@link SecurityContext} is returned. Returns {@code
* null} otherwise.
*/
@Nullable
public static SecurityContext createSecurityContextForSession(@Nonnull final HttpServletRequest httpServletRequest) {
assert httpServletRequest != null;
// Get the current session. {@link HttpServletRequest#getSession(boolean) getSession(false)} returns
// {@code null} if there is no ongoing session.
@Nullable final HttpSession httpSession = httpServletRequest.getSession(false);
if (httpSession == null) {
return null;
}
// Get the session data. The simple fact that the session contains our session data is sufficient proof that
// the session is authenticated. Return no {@link SecurityContext} if no session data (i.e. current session is
// not authenticated).
@Nullable final SessionData sessionData = getSessionData(httpSession);
if (sessionData == null) {
return null;
}
final SecurityContext securityContext =
new SecurityContextImpl(new PrincipalImpl(sessionData.getUserId()), sessionData.getAuthenticationScheme());
return securityContext;
}
/**
* Starts a new session of the given {@code authenticationScheme} for the given {@code principal} in the current
* servlet request. A potentially existing session is terminated first to prevent session fixation attacks.
*
* This method is <em>not</em> thread-safe in the sense that a session actually exists when this method returns,
* because a racing request may have torn down the session that is being created here while this method is
* executing, or the data set in this call is actually the data stored in the session at the point this method
* returns. This cannot be prevented even with locking, because HTTP is inherently asynchronous, and even if this
* method would lock, as soon as the method returns a racing request could then tear down the session or modify the
* session data. <em>However</em>, this method guarantees that all data that is being set as part of the
* authenticated session is set atomically, i.e. multiple racing calls will not cause the session to contain mixed
* data of those different calls, one call will always win. Also, data set during this call is stored in the session
* before this call returns, but as described above, may have already been overwritten by a racing thread at the
* point this call has returned.
*
* @param principal The {@link Principal} for which to create a session.
* @param authenticationScheme The authentication scheme of the session to create.
* @return The session ID of the created session.
*/
@Nonnull
private String startSession(
@Nonnull final Principal principal,
@Nonnull final String authenticationScheme) {
assert principal != null;
assert authenticationScheme != null;
@Nonnull final HttpSession httpSession = createNewSession();
try {
// Set authenticated user ID and authentication scheme on session. Throws an {@link IllegalStateException}
// in case the session has been concurrently invalidated before the session data could be set.
@Nonnull final String userId = setSessionData(httpSession, principal, authenticationScheme);
@Nonnull final String sessionId = getSessionId(httpSession);
LOG.debug("startSession: created new session for user with ID {}. Session ID is {}", userId, sessionId);
return sessionId;
} catch (final IllegalStateException e) {
// To be expected, since a racing HTTP request may have terminated this session before we were able to set
// the session data.
LOG.info("startSession: session has already been terminated", e);
// We carry on as if we were able to set the session data, because the method does not guarantee that there
// actually is a valid authenticated session at the end of this method.
return getSessionId(httpSession);
}
}
/**
* Returns the current session, even if there is <em>no user authenticated</em> in the current session. Returns
* {@code null} if there is no current session.
*
* This method is <em>not</em> thread-safe in the sense that a concurrent request request may have invalidated the
* returned session before this method could complete, but after it retrieved the session from the underlying
* request. Also, a concurrent request may have created a new session after this method checked for session
* existence but before it returned (i.e. this method would return {@code null} even though at the point it does so,
* a session could exist).
*
* @return The currently ongoing session (authenticated or unauthenticated) or {@code null} if there is no current
* session.
*/
@Nullable
private HttpSession getCurrentSession() {
// Get the current session. {@link HttpServletRequest#getSession(boolean) getSession(false)} returns
// {@code null} if there is no ongoing session.
return httpServletRequest.getSession(false);
}
/**
* Creates a new session, and terminates any existing session to prevent session fixation attacks (see <a
* href="https://www.owasp.org/index.php/Session_fixation">https://www.owasp.org/index.php/Session_fixation</a>).
*
* This method is <em>not</em> thread-safe in the sense that a concurrent request request may have invalidated the
* returned session before this method could complete, but after it created the session.
*
* @return The newly created session.
*/
@Nonnull
private HttpSession createNewSession() {
// Terminate any existing session first to prevent session fixation attacks.
terminateSession();
/**
* {@link HttpServletRequest#getSession()} gets the current session or creates a new session if one does not
* exist already. Since we've just terminated a potentially existing session, there shouldn't be a session, but
* a concurrent thread may have created a new one already in between. That does not create a session fixation
* attack vector, because the session ID will be different from the session ID that came in with current
* request.
*/
@Nonnull final HttpSession httpSession = httpServletRequest.getSession();
assert httpSession != null; // Check explicitly since {@link HttpSession#getSession()} has no codified post-conditions.
return httpSession;
}
/**
* Returns the session ID of the given {@code httpSession}.
*
* @param httpSession The session for which to retrieve the session ID.
* @return The session ID of the given {@code httpSession}.
*/
@Nonnull
private static String getSessionId(@Nonnull final HttpSession httpSession) {
@Nonnull final String sessionId = httpSession.getId();
assert sessionId != null; // Check explicitly since {@link HttpSession#getId()} has no codified post-conditions.
return sessionId;
}
/**
* Sets all data necessary to establish an authenticated session in the given {@code httpSession}. This method is
* <em>not</em> thread-safe in the sense that one thread may set the data here, but another thread may have already
* removed the data again before this method returns. <em>However</em>, this method ensures that all data to be set
* in this call is either set or not (in case a racing thread terminates the session). The session will never
* contain mixed data from different racing threads, one thread will always win.
*
* @param httpSession The session to set the session data on.
* @param principal The {@link Principal} from which to take the user ID.
* @param authenticationScheme The authentication scheme to set on the session.
* @return The user ID that was set in the session.
* @throws IllegalStateException Throws an {@link IllegalStateException} in case the session has been invalidated
* before this method could set the session data.
*/
@Nonnull
private static String setSessionData(
@Nonnull final HttpSession httpSession,
@Nonnull final Principal principal,
@Nonnull final String authenticationScheme) throws IllegalStateException {
assert httpSession != null;
assert principal != null;
assert authenticationScheme != null;
@Nonnull final String userId = principal.getName();
final SessionData sessionData = new SessionData(userId, authenticationScheme);
/**
* Throws an {@link IllegalStateException} in case session has already been invalidated. Don't catch it to allow
* the caller to deal with this situation, because unlike in the other methods, we can't decided locally what
* the correct course of action is, since the goal of this method was to set the session data, and we may not be
* able to.
*/
httpSession.setAttribute(SESSION_DATA_KEY, sessionData);
return userId;
}
/**
* Gets all session data from the given {@code httpSession}. This method is <em>not</em> thread-safe in the sense
* that one thread may get the data here, but another thread may have already removed the data from the session
* before this method returns, or the current session may have been concurrently invalidated before this method can
* read the session data. <em>However</em>, this method ensures that either all data is retrieved, or nothing (in
* case a racing thread terminates the session). The returned session data will never contain mixed data from
* different racing threads.
*
* @param httpSession The session to retrieve the session data from.
* @return The session data, or {@code null} if not an authenticated session.
*/
@Nullable
private static SessionData getSessionData(@Nonnull final HttpSession httpSession) {
assert httpSession != null;
@Nullable SessionData sessionData;
try {
sessionData = (SessionData) httpSession.getAttribute(SESSION_DATA_KEY);
} catch (final IllegalStateException e) {
// To be expected, since a racing HTTP request may have terminated this session before we got here.
LOG.info("getSessionData: session has already been terminated", e);
sessionData = null;
}
return sessionData;
}
/**
* Clears all session data that was previously set by the SessionManager on the given {@code httpSession} (but not
* data set by other parties than the SessionManager). Does nothing if no session data is currently set or the
* session has been concurrently terminated before this method could remove the session data.
*
* @param httpSession The {@link HttpSession} to clear.
*/
private static void clearSessionData(@Nonnull final HttpSession httpSession) {
assert httpSession != null;
try {
httpSession.removeAttribute(SESSION_DATA_KEY);
} catch (final IllegalStateException e) {
// To be expected, since a racing HTTP request may have terminated this session before we got here. We
// carry on as if nothing had happened, because the goal was to remove the session data, and an invalidated
// session will not hold the session data, hence we have achieved that goal.
LOG.info("clearSessionData: session has already been terminated", e);
}
}
/**
* Immutable container to hold session data. The immutability ensures that either all or nothing is set in the
* session, and racing requests cannot cause the session to contain a mixed set of data from different requests.
*
* This class is serializable, to enable containers to replicate session state between multiple nodes.
*
* Made package private for unit test.
*/
static class SessionData implements Serializable {
/**
* Version of this class to support serialization. Must be increased iff a change is made to this class that
* breaks serialisation compatibility with previous versions of this class (see <a
* href="http://docs.oracle.com/javase/6/docs/platform/serialization/spec/version.html#6678">http://docs.oracle.com/javase/6/docs/platform/serialization/spec/version.html#6678</a>
* for when exactly the version must be changed).
*/
private static final long serialVersionUID = 1L;
/**
* The ID of the authenticated user in the {@link HttpSession}.
*
* @serial
*/
@Nonnull
private final String userId;
/**
* The authentication scheme of the authentication session.
*
* @serial
*/
@Nonnull
private final String authenticationScheme;
/**
* Constructs a session data object.
*
* Made package private for unit test.
*
* @param userId The user ID of the user authenticated in this session.
* @param authenticationScheme The authentication scheme of this session.
*/
SessionData(
@Nonnull final String userId,
@Nonnull final String authenticationScheme) {
assert userId != null;
assert authenticationScheme != null;
this.userId = userId;
this.authenticationScheme = authenticationScheme;
}
/**
* Returns the ID of the user authenticated in this session.
*
* Made package private for unit test.
*
* @return The ID of the authenticated user.
*/
@Nonnull
String getUserId() {
return userId;
}
/**
* Returns the authentication scheme of this session.
*
* Made package private for unit test.
*
* @return The authentication scheme.
*/
@Nonnull
String getAuthenticationScheme() {
return authenticationScheme;
}
}
}
| |
/*
* Copyright (C) 2008, Google Inc.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.patch;
import static org.eclipse.jgit.util.RawParseUtils.nextLF;
import static org.eclipse.jgit.util.RawParseUtils.parseBase10;
import java.io.IOException;
import java.io.OutputStream;
import org.eclipse.jgit.lib.AbbreviatedObjectId;
import org.eclipse.jgit.util.MutableInteger;
/** Hunk header for a hunk appearing in a "diff --cc" style patch. */
public class CombinedHunkHeader extends HunkHeader {
private static abstract class CombinedOldImage extends OldImage {
int nContext;
}
private CombinedOldImage[] old;
CombinedHunkHeader(final CombinedFileHeader fh, final int offset) {
super(fh, offset, null);
old = new CombinedOldImage[fh.getParentCount()];
for (int i = 0; i < old.length; i++) {
final int imagePos = i;
old[i] = new CombinedOldImage() {
@Override
public AbbreviatedObjectId getId() {
return fh.getOldId(imagePos);
}
};
}
}
@Override
public CombinedFileHeader getFileHeader() {
return (CombinedFileHeader) super.getFileHeader();
}
@Override
public OldImage getOldImage() {
return getOldImage(0);
}
/**
* Get the OldImage data related to the nth ancestor
*
* @param nthParent
* the ancestor to get the old image data of
* @return image data of the requested ancestor.
*/
public OldImage getOldImage(final int nthParent) {
return old[nthParent];
}
@Override
void parseHeader() {
// Parse "@@@ -55,12 -163,13 +163,15 @@@ protected boolean"
//
final byte[] buf = file.buf;
final MutableInteger ptr = new MutableInteger();
ptr.value = nextLF(buf, startOffset, ' ');
for (int n = 0; n < old.length; n++) {
old[n].startLine = -parseBase10(buf, ptr.value, ptr);
if (buf[ptr.value] == ',')
old[n].lineCount = parseBase10(buf, ptr.value + 1, ptr);
else
old[n].lineCount = 1;
}
newStartLine = parseBase10(buf, ptr.value + 1, ptr);
if (buf[ptr.value] == ',')
newLineCount = parseBase10(buf, ptr.value + 1, ptr);
else
newLineCount = 1;
}
@Override
int parseBody(final Patch script, final int end) {
final byte[] buf = file.buf;
int c = nextLF(buf, startOffset);
for (final CombinedOldImage o : old) {
o.nDeleted = 0;
o.nAdded = 0;
o.nContext = 0;
}
nContext = 0;
int nAdded = 0;
SCAN: for (int eol; c < end; c = eol) {
eol = nextLF(buf, c);
if (eol - c < old.length + 1) {
// Line isn't long enough to mention the state of each
// ancestor. It must be the end of the hunk.
break SCAN;
}
switch (buf[c]) {
case ' ':
case '-':
case '+':
break;
default:
// Line can't possibly be part of this hunk; the first
// ancestor information isn't recognizable.
//
break SCAN;
}
int localcontext = 0;
for (int ancestor = 0; ancestor < old.length; ancestor++) {
switch (buf[c + ancestor]) {
case ' ':
localcontext++;
old[ancestor].nContext++;
continue;
case '-':
old[ancestor].nDeleted++;
continue;
case '+':
old[ancestor].nAdded++;
nAdded++;
continue;
default:
break SCAN;
}
}
if (localcontext == old.length)
nContext++;
}
for (int ancestor = 0; ancestor < old.length; ancestor++) {
final CombinedOldImage o = old[ancestor];
final int cmp = o.nContext + o.nDeleted;
if (cmp < o.lineCount) {
final int missingCnt = o.lineCount - cmp;
script.error(buf, startOffset, "Truncated hunk, at least "
+ missingCnt + " lines is missing for ancestor "
+ (ancestor + 1));
}
}
if (nContext + nAdded < newLineCount) {
final int missingCount = newLineCount - (nContext + nAdded);
script.error(buf, startOffset, "Truncated hunk, at least "
+ missingCount + " new lines is missing");
}
return c;
}
@Override
void extractFileLines(final OutputStream[] out) throws IOException {
final byte[] buf = file.buf;
int ptr = startOffset;
int eol = nextLF(buf, ptr);
if (endOffset <= eol)
return;
// Treat the hunk header as though it were from the ancestor,
// as it may have a function header appearing after it which
// was copied out of the ancestor file.
//
out[0].write(buf, ptr, eol - ptr);
SCAN: for (ptr = eol; ptr < endOffset; ptr = eol) {
eol = nextLF(buf, ptr);
if (eol - ptr < old.length + 1) {
// Line isn't long enough to mention the state of each
// ancestor. It must be the end of the hunk.
break SCAN;
}
switch (buf[ptr]) {
case ' ':
case '-':
case '+':
break;
default:
// Line can't possibly be part of this hunk; the first
// ancestor information isn't recognizable.
//
break SCAN;
}
int delcnt = 0;
for (int ancestor = 0; ancestor < old.length; ancestor++) {
switch (buf[ptr + ancestor]) {
case '-':
delcnt++;
out[ancestor].write(buf, ptr, eol - ptr);
continue;
case ' ':
out[ancestor].write(buf, ptr, eol - ptr);
continue;
case '+':
continue;
default:
break SCAN;
}
}
if (delcnt < old.length) {
// This line appears in the new file if it wasn't deleted
// relative to all ancestors.
//
out[old.length].write(buf, ptr, eol - ptr);
}
}
}
void extractFileLines(final StringBuilder sb, final String[] text,
final int[] offsets) {
final byte[] buf = file.buf;
int ptr = startOffset;
int eol = nextLF(buf, ptr);
if (endOffset <= eol)
return;
copyLine(sb, text, offsets, 0);
SCAN: for (ptr = eol; ptr < endOffset; ptr = eol) {
eol = nextLF(buf, ptr);
if (eol - ptr < old.length + 1) {
// Line isn't long enough to mention the state of each
// ancestor. It must be the end of the hunk.
break SCAN;
}
switch (buf[ptr]) {
case ' ':
case '-':
case '+':
break;
default:
// Line can't possibly be part of this hunk; the first
// ancestor information isn't recognizable.
//
break SCAN;
}
boolean copied = false;
for (int ancestor = 0; ancestor < old.length; ancestor++) {
switch (buf[ptr + ancestor]) {
case ' ':
case '-':
if (copied)
skipLine(text, offsets, ancestor);
else {
copyLine(sb, text, offsets, ancestor);
copied = true;
}
continue;
case '+':
continue;
default:
break SCAN;
}
}
if (!copied) {
// If none of the ancestors caused the copy then this line
// must be new across the board, so it only appears in the
// text of the new file.
//
copyLine(sb, text, offsets, old.length);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.kafka.source.testutils;
import org.apache.flink.api.connector.source.Boundedness;
import org.apache.flink.api.connector.source.Source;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.KafkaSourceBuilder;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.connectors.test.common.external.ExternalContext;
import org.apache.flink.connectors.test.common.external.SourceSplitDataWriter;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.NewPartitions;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.KafkaContainer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
/**
* A Kafka external context that will create only one topic and use partitions in that topic as
* source splits.
*/
public class KafkaSingleTopicExternalContext implements ExternalContext<String> {
private static final Logger LOG =
LoggerFactory.getLogger(KafkaSingleTopicExternalContext.class);
private static final String TOPIC_NAME_PREFIX = "kafka-single-topic";
private static final int DEFAULT_TIMEOUT = 30;
private static final int NUM_RECORDS_UPPER_BOUND = 500;
private static final int NUM_RECORDS_LOWER_BOUND = 100;
protected String bootstrapServers;
private final String topicName;
private final Map<Integer, SourceSplitDataWriter<String>> partitionToSplitWriter =
new HashMap<>();
private int numSplits = 0;
protected final AdminClient kafkaAdminClient;
public KafkaSingleTopicExternalContext(String bootstrapServers) {
this.bootstrapServers = bootstrapServers;
this.topicName =
TOPIC_NAME_PREFIX + "-" + ThreadLocalRandom.current().nextLong(Long.MAX_VALUE);
kafkaAdminClient = createAdminClient();
}
protected void createTopic(String topicName, int numPartitions, short replicationFactor) {
LOG.debug(
"Creating new Kafka topic {} with {} partitions and {} replicas",
topicName,
numPartitions,
replicationFactor);
NewTopic newTopic = new NewTopic(topicName, numPartitions, replicationFactor);
try {
kafkaAdminClient
.createTopics(Collections.singletonList(newTopic))
.all()
.get(DEFAULT_TIMEOUT, TimeUnit.SECONDS);
} catch (Exception e) {
throw new RuntimeException(String.format("Cannot create topic '%s'", topicName), e);
}
}
protected void deleteTopic(String topicName) {
LOG.debug("Deleting Kafka topic {}", topicName);
try {
kafkaAdminClient
.deleteTopics(Collections.singletonList(topicName))
.all()
.get(DEFAULT_TIMEOUT, TimeUnit.SECONDS);
} catch (Exception e) {
if (ExceptionUtils.getRootCause(e) instanceof UnknownTopicOrPartitionException) {
throw new RuntimeException(String.format("Cannot delete topic '%s'", topicName), e);
}
}
}
private AdminClient createAdminClient() {
Properties config = new Properties();
config.setProperty(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
return AdminClient.create(config);
}
@Override
public Source<String, ?, ?> createSource(Boundedness boundedness) {
KafkaSourceBuilder<String> builder = KafkaSource.builder();
if (boundedness == Boundedness.BOUNDED) {
builder = builder.setBounded(OffsetsInitializer.latest());
}
return builder.setGroupId("flink-kafka-test")
.setDeserializer(
KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
.setTopics(topicName)
.setBootstrapServers(bootstrapServers)
.build();
}
@Override
public SourceSplitDataWriter<String> createSourceSplitDataWriter() {
if (numSplits == 0) {
createTopic(topicName, 1, (short) 1);
numSplits++;
} else {
LOG.debug("Creating new partition for topic {}", topicName);
kafkaAdminClient.createPartitions(
Collections.singletonMap(topicName, NewPartitions.increaseTo(++numSplits)));
}
KafkaPartitionDataWriter splitWriter =
new KafkaPartitionDataWriter(
getKafkaProducerProperties(numSplits - 1),
new TopicPartition(topicName, numSplits - 1));
partitionToSplitWriter.put(numSplits - 1, splitWriter);
return splitWriter;
}
@Override
public List<String> generateTestData(int splitIndex, long seed) {
Random random = new Random(seed);
List<String> randomStringRecords = new ArrayList<>();
int recordNum =
random.nextInt(NUM_RECORDS_UPPER_BOUND - NUM_RECORDS_LOWER_BOUND)
+ NUM_RECORDS_LOWER_BOUND;
for (int i = 0; i < recordNum; i++) {
int stringLength = random.nextInt(50) + 1;
randomStringRecords.add(generateRandomString(splitIndex, stringLength, random));
}
return randomStringRecords;
}
private String generateRandomString(int splitIndex, int length, Random random) {
String alphaNumericString =
"ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "abcdefghijklmnopqrstuvwxyz" + "0123456789";
StringBuilder sb = new StringBuilder().append(splitIndex).append("-");
for (int i = 0; i < length; ++i) {
sb.append(alphaNumericString.charAt(random.nextInt(alphaNumericString.length())));
}
return sb.toString();
}
protected Properties getKafkaProducerProperties(int producerId) {
Properties kafkaProducerProperties = new Properties();
kafkaProducerProperties.setProperty(
ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
kafkaProducerProperties.setProperty(
ProducerConfig.CLIENT_ID_CONFIG,
String.join(
"-",
"flink-kafka-split-writer",
Integer.toString(producerId),
Long.toString(ThreadLocalRandom.current().nextLong(Long.MAX_VALUE))));
kafkaProducerProperties.setProperty(
ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
kafkaProducerProperties.setProperty(
ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
return kafkaProducerProperties;
}
@Override
public void close() {
deleteTopic(topicName);
partitionToSplitWriter.forEach(
(partitionId, splitWriter) -> {
try {
splitWriter.close();
} catch (Exception e) {
kafkaAdminClient.close();
throw new RuntimeException("Cannot close split writer", e);
}
});
partitionToSplitWriter.clear();
kafkaAdminClient.close();
}
@Override
public String toString() {
return "Single-topic Kafka";
}
/** Factory of {@link KafkaSingleTopicExternalContext}. */
public static class Factory implements ExternalContext.Factory<String> {
private final KafkaContainer kafkaContainer;
public Factory(KafkaContainer kafkaContainer) {
this.kafkaContainer = kafkaContainer;
}
protected String getBootstrapServer() {
final String internalEndpoints =
kafkaContainer.getNetworkAliases().stream()
.map(host -> String.join(":", host, Integer.toString(9092)))
.collect(Collectors.joining(","));
return String.join(",", kafkaContainer.getBootstrapServers(), internalEndpoints);
}
@Override
public ExternalContext<String> createExternalContext() {
return new KafkaSingleTopicExternalContext(getBootstrapServer());
}
}
}
| |
package net.glowstone.chunk;
import lombok.Getter;
import net.glowstone.constants.GlowBiome;
import org.bukkit.ChunkSnapshot;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.block.Biome;
import org.bukkit.block.data.BlockData;
import org.jetbrains.annotations.NotNull;
/**
* Class representing a snapshot of a chunk.
*/
public class GlowChunkSnapshot implements ChunkSnapshot {
@Getter
private final int x;
@Getter
private final int z;
@Getter
private final String worldName;
@Getter
private final long captureFullTime;
/**
* The ChunkSection array backing this snapshot. In general, it should not be modified
* externally.
*
* @return The array of ChunkSections.
*/
@Getter
private final ChunkSection[] rawSections;
private final byte[] height;
private final double[] temp;
private final double[] humid;
@Getter
private final byte[] rawBiomes;
@Getter
private final boolean isSlimeChunk;
/**
* Creates a snapshot of a chunk.
*
* @param x the chunk x coordinate
* @param z the chunk z coordinate
* @param world the world the chunk is in
* @param sections the chunk contents
* @param height the heightmap
* @param biomes the biome map
* @param svTemp if true, copy temperature and humidity from the world
*/
public GlowChunkSnapshot(int x, int z, World world, ChunkSection[] sections, byte[] height,
byte[] biomes, boolean svTemp, boolean isSlimeChunk) {
this.x = x;
this.z = z;
this.worldName = world.getName();
captureFullTime = world.getFullTime();
this.isSlimeChunk = isSlimeChunk;
int numSections = sections != null ? sections.length : 0;
this.rawSections = new ChunkSection[numSections];
for (int i = 0; i < numSections; ++i) {
if (sections[i] != null) {
this.rawSections[i] = sections[i].snapshot();
}
}
this.height = height;
this.rawBiomes = biomes;
if (svTemp) {
int baseX = x << 4;
int baseZ = z << 4;
temp = new double[(16 << 4)];
humid = new double[(16 << 4)];
for (int xx = 0; xx < 16; ++xx) {
for (int zz = 0; zz < 16; ++zz) {
temp[coordToIndex(xx, zz)] = world.getTemperature(baseX + xx, baseZ + zz);
humid[coordToIndex(xx, zz)] = world.getHumidity(baseX + xx, baseZ + zz);
}
}
} else {
temp = humid = null;
}
}
private ChunkSection getSection(int y) {
int idx = y >> 4;
if (idx < 0 || idx >= rawSections.length) {
return null;
}
return rawSections[idx];
}
/**
* Returns the heightmap, converted to an {@code int[]}.
*
* @return the heightmap as an {@code int[]}
*/
public int[] getRawHeightmap() {
int[] result = new int[height.length];
for (int i = 0; i < result.length; ++i) {
result[i] = height[i];
}
return result;
}
@Override
public boolean isSectionEmpty(int sy) {
return sy < 0 || sy >= rawSections.length || rawSections[sy] == null;
}
@Override
public boolean contains(@NotNull BlockData blockData) {
// TODO: 1.16
throw new UnsupportedOperationException("Not supported yet.");
}
public int getBlockTypeId(int x, int y, int z) {
ChunkSection section = getSection(y);
return section == null ? 0 : section.getType(x, y, z) >> 4;
}
@Override
public Material getBlockType(int x, int y, int z) {
BlockData data = getBlockData(x, y, z);
return data == null ? Material.AIR : data.getMaterial();
}
@Override
public int getData(int x, int y, int z) {
ChunkSection section = getSection(y);
return section == null ? 0 : section.getType(x, y, z) & 0xF;
}
@Override
public BlockData getBlockData(int x, int y, int z) {
ChunkSection section = getSection(y);
return section == null ? null : section.getBlockData(x, y, z);
}
@Override
public int getBlockSkyLight(int x, int y, int z) {
ChunkSection section = getSection(y);
return section == null ? ChunkSection.EMPTY_SKYLIGHT : section.getSkyLight(x, y, z);
}
@Override
public int getBlockEmittedLight(int x, int y, int z) {
ChunkSection section = getSection(y);
return section == null ? ChunkSection.EMPTY_BLOCK_LIGHT : section.getBlockLight(x, y, z);
}
@Override
public int getHighestBlockYAt(int x, int z) {
return height[coordToIndex(x, z)];
}
@Override
public Biome getBiome(int x, int z) {
return GlowBiome.getBiome(rawBiomes[coordToIndex(x, z)]).getType();
}
@Override
public @NotNull Biome getBiome(int x, int y, int z) {
// TODO: Support 3D biomes
return getBiome(x, z);
}
@Override
public double getRawBiomeTemperature(int x, int z) {
return temp[coordToIndex(x, z)];
}
@Override
public double getRawBiomeTemperature(int x, int y, int z) {
// TODO: Support 3D biomes
return getRawBiomeTemperature(x, z);
}
public double getRawBiomeRainfall(int x, int z) {
return humid[coordToIndex(x, z)];
}
private int coordToIndex(int x, int z) {
if (x < 0 || z < 0 || x >= GlowChunk.WIDTH || z >= GlowChunk.HEIGHT) {
throw new IndexOutOfBoundsException();
}
return z * GlowChunk.WIDTH + x;
}
public static class EmptySnapshot extends GlowChunkSnapshot {
public EmptySnapshot(int x, int z, World world, boolean svBiome, boolean svTemp) {
super(x, z, world, null, null, svBiome ? new byte[256] : null, svTemp, false);
}
@Override
public int getBlockTypeId(int x, int y, int z) {
return 0;
}
@Override
public Material getBlockType(int x, int y, int z) {
return Material.AIR;
}
@Override
public BlockData getBlockData(int x, int y, int z) {
return null;
}
@Override
public int getBlockSkyLight(int x, int y, int z) {
return 15;
}
@Override
public int getBlockEmittedLight(int x, int y, int z) {
return 0;
}
@Override
public int getHighestBlockYAt(int x, int z) {
return 0;
}
}
}
| |
/*
* Copyright 2015 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence;
import java.util.List;
import org.apache.axis2.Constants;
import org.apache.commons.lang.StringUtils;
import org.apache.synapse.config.xml.endpoints.EndpointSerializer;
import org.apache.synapse.endpoints.Endpoint;
import org.apache.synapse.mediators.MediatorProperty;
import org.apache.synapse.mediators.base.SequenceMediator;
import org.apache.synapse.rest.RESTConstants;
import org.apache.synapse.util.xpath.SynapseXPath;
import org.eclipse.core.runtime.Assert;
import org.eclipse.emf.ecore.EObject;
import org.jaxen.JaxenException;
import org.wso2.developerstudio.eclipse.gmf.esb.EndPoint;
import org.wso2.developerstudio.eclipse.gmf.esb.EndPointProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbNode;
import org.wso2.developerstudio.eclipse.gmf.esb.HTTPEndpoint;
import org.wso2.developerstudio.eclipse.gmf.esb.InputConnector;
import org.wso2.developerstudio.eclipse.gmf.esb.Sequence;
import org.wso2.developerstudio.eclipse.gmf.esb.SequenceInputConnector;
import org.wso2.developerstudio.eclipse.gmf.esb.TemplateParameter;
import org.wso2.developerstudio.eclipse.gmf.esb.persistence.TransformationInfo;
import org.wso2.developerstudio.eclipse.gmf.esb.persistence.TransformerException;
import org.wso2.developerstudio.esb.form.editors.article.rcp.endpoints.HttpEndpointFormPage;
import com.damnhandy.uri.template.UriTemplate;
public class HTTPEndPointTransformer extends AbstractEndpointTransformer {
public void transform(TransformationInfo information, EsbNode subject) throws TransformerException {
// Check subject.
Assert.isTrue(subject instanceof HTTPEndpoint, "Invalid subject");
HTTPEndpoint visualEndPoint = (HTTPEndpoint) subject;
Endpoint synapseEP = create(visualEndPoint, visualEndPoint.getEndPointName());
setEndpointToSendCallOrProxy(information, visualEndPoint, synapseEP);
if (!information.isEndPointFound) {
information.isEndPointFound = true;
information.firstEndPoint = visualEndPoint;
}
if (visualEndPoint.getOutputConnector() != null) {
if (visualEndPoint.getOutputConnector().getOutgoingLink() != null) {
InputConnector nextInputConnector = visualEndPoint.getOutputConnector().getOutgoingLink().getTarget();
if ((!(nextInputConnector instanceof SequenceInputConnector))
|| ((((Sequence) nextInputConnector.eContainer()).getOutputConnector().get(0)
.getOutgoingLink() != null)
&& (!(((Sequence) nextInputConnector.eContainer()).getOutputConnector().get(0)
.getOutgoingLink().getTarget().eContainer() instanceof EndPoint)))) {
information.setParentSequence(information.getOriginOutSequence());
information.setTraversalDirection(TransformationInfo.TRAVERSAL_DIRECTION_OUT);
} else if ((visualEndPoint.getInputConnector().getIncomingLinks().get(0).getSource()
.eContainer() instanceof Sequence)) {
information.setParentSequence(information.getCurrentReferredSequence());
}
}
}
List<EsbNode> transformedMediators = information.getTransformedMediators();
if (visualEndPoint.getOutputConnector() != null
&& visualEndPoint.getOutputConnector().getOutgoingLink() != null) {
EsbNode nextElement = (EsbNode) visualEndPoint.getOutputConnector().getOutgoingLink().getTarget()
.eContainer();
if (transformedMediators.contains(nextElement)) {
return;
}
transformedMediators.add(nextElement);
}
// Transform endpoint output data flow.
doTransform(information, visualEndPoint.getOutputConnector());
}
public org.apache.synapse.endpoints.HTTPEndpoint create(HTTPEndpoint visualEndPoint, String name)
throws TransformerException {
HTTPEndpoint httpEndPoint = visualEndPoint;
org.apache.synapse.endpoints.HTTPEndpoint synapseHttpEP = new org.apache.synapse.endpoints.HTTPEndpoint();
if (StringUtils.isNotBlank(name)) {
synapseHttpEP.setName(name);
}
try {
createAdvanceOptions(httpEndPoint, synapseHttpEP);
} catch (JaxenException e) {
throw new TransformerException(e);
}
if (httpEndPoint.getURITemplate() != null) {
UriTemplate template = UriTemplate.fromTemplate(httpEndPoint.getURITemplate());
synapseHttpEP.setUriTemplate(template);
}
switch (visualEndPoint.getHttpMethod()) {
case GET:
synapseHttpEP.setHttpMethod(Constants.Configuration.HTTP_METHOD_GET.toLowerCase());
break;
case POST:
synapseHttpEP.setHttpMethod(Constants.Configuration.HTTP_METHOD_POST.toLowerCase());
break;
case PUT:
synapseHttpEP.setHttpMethod(Constants.Configuration.HTTP_METHOD_PUT.toLowerCase());
break;
case DELETE:
synapseHttpEP.setHttpMethod(Constants.Configuration.HTTP_METHOD_DELETE.toLowerCase());
break;
case HEAD:
synapseHttpEP.setHttpMethod(Constants.Configuration.HTTP_METHOD_HEAD.toLowerCase());
break;
case OPTIONS:
synapseHttpEP.setHttpMethod(RESTConstants.METHOD_OPTIONS.toLowerCase());
break;
case PATCH:
synapseHttpEP.setHttpMethod(Constants.Configuration.HTTP_METHOD_PATCH.toLowerCase());
break;
case LEAVE_AS_IS:
break;
}
return synapseHttpEP;
}
public org.apache.synapse.SynapseArtifact create(HttpEndpointFormPage httpFormPage)
throws NumberFormatException, JaxenException {
org.apache.synapse.endpoints.HTTPEndpoint synapseHttpEP = new org.apache.synapse.endpoints.HTTPEndpoint();
if (StringUtils.isNotBlank(httpFormPage.getEndpointName().getText())) {
synapseHttpEP.setName(httpFormPage.getEndpointName().getText());
}
if (StringUtils.isNotEmpty(httpFormPage.httpEP_UriTemplate.getText())) {
UriTemplate template = UriTemplate.fromTemplate(httpFormPage.httpEP_UriTemplate.getText());
synapseHttpEP.setUriTemplate(template);
}
if (httpFormPage.getEndpointCommentList() != null) {
synapseHttpEP.getCommentsList().addAll(httpFormPage.getEndpointCommentList());
}
createAdvanceOptions(httpFormPage, synapseHttpEP);
if (httpFormPage.httpEP_Method.getSelectionIndex() != 0) {
if (httpFormPage.httpEP_Method.getSelectionIndex() == 1) {
synapseHttpEP.setHttpMethod(Constants.Configuration.HTTP_METHOD_GET.toLowerCase());
} else if (httpFormPage.httpEP_Method.getSelectionIndex() == 2) {
synapseHttpEP.setHttpMethod(Constants.Configuration.HTTP_METHOD_POST.toLowerCase());
} else if (httpFormPage.httpEP_Method.getSelectionIndex() == 3) {
synapseHttpEP.setHttpMethod(Constants.Configuration.HTTP_METHOD_PUT.toLowerCase());
} else if (httpFormPage.httpEP_Method.getSelectionIndex() == 4) {
synapseHttpEP.setHttpMethod(Constants.Configuration.HTTP_METHOD_DELETE.toLowerCase());
} else if (httpFormPage.httpEP_Method.getSelectionIndex() == 5) {
synapseHttpEP.setHttpMethod(Constants.Configuration.HTTP_METHOD_HEAD.toLowerCase());
} else if (httpFormPage.httpEP_Method.getSelectionIndex() == 6) {
synapseHttpEP.setHttpMethod(RESTConstants.METHOD_OPTIONS.toLowerCase());
} else if (httpFormPage.httpEP_Method.getSelectionIndex() == 7) {
synapseHttpEP.setHttpMethod(Constants.Configuration.HTTP_METHOD_PATCH.toLowerCase());
}
} else {
}
if (httpFormPage.endpointPropertyList != null && httpFormPage.endpointPropertyList.size() > 0) {
saveProperties(httpFormPage, synapseHttpEP);
}
if (httpFormPage.isTemplate()) {
return createTemplate(httpFormPage, synapseHttpEP);
} else {
return synapseHttpEP;
}
}
/**
* Save endpoint properties
*
* @param model
* @param endpoint
*/
protected void saveProperties(HttpEndpointFormPage endpointFormPage,
org.apache.synapse.endpoints.HTTPEndpoint synapseHttpEP) {
for (EndPointProperty property : endpointFormPage.endpointPropertyList) {
MediatorProperty mediatorProperty = new MediatorProperty();
mediatorProperty.setName(property.getName());
if (property.getValueType().toString().equals("EXPRESSION")) {
SynapseXPath XPath = null;
try {
XPath = new SynapseXPath(property.getValueExpression().getPropertyValue());
for (int i = 0; i < property.getValueExpression().getNamespaces().keySet().size(); ++i) {
String prefix = (String) property.getValueExpression().getNamespaces().keySet().toArray()[i];
String namespaceUri = property.getValueExpression().getNamespaces().get(prefix);
XPath.addNamespace(prefix, namespaceUri);
}
mediatorProperty.setExpression(XPath);
} catch (JaxenException e) {
log.error("Error while persisting Endpoint properties", e);
}
} else if (property.getValueType().toString().equals("LITERAL")) {
mediatorProperty.setValue(property.getValue());
}
mediatorProperty.setScope(property.getScope().toString().toLowerCase());
synapseHttpEP.addProperty(mediatorProperty);
}
}
public void createSynapseObject(TransformationInfo info, EObject subject, List<Endpoint> endPoints)
throws TransformerException {
Assert.isTrue(subject instanceof HTTPEndpoint, "Invalid subject");
HTTPEndpoint httpEndPoint = (HTTPEndpoint) subject;
org.apache.synapse.endpoints.HTTPEndpoint synapseHttpEP = new org.apache.synapse.endpoints.HTTPEndpoint();
try {
createAdvanceOptions(httpEndPoint, synapseHttpEP);
} catch (JaxenException e) {
throw new TransformerException(e);
}
UriTemplate template = UriTemplate.fromTemplate(httpEndPoint.getURITemplate());
synapseHttpEP.setUriTemplate(template);
Endpoint endPoint = (Endpoint) synapseHttpEP;
endPoints.add(endPoint);
transformEndpointOutflow(info);
}
public void transformWithinSequence(TransformationInfo information, EsbNode subject, SequenceMediator sequence)
throws TransformerException {
Assert.isTrue(subject instanceof HTTPEndpoint, "Invalid subject");
HTTPEndpoint visualEndPoint = (HTTPEndpoint) subject;
Endpoint synapseEP = create(visualEndPoint, visualEndPoint.getEndPointName());
setEndpointToSendOrCallMediator(sequence, synapseEP);
}
}
| |
package org.springframework.cloud.contract.stubrunner.spring.cloud.eureka;
import java.lang.invoke.MethodHandles;
import java.util.Collection;
import java.util.Collections;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.cloud.commons.util.InetUtils;
import org.springframework.cloud.netflix.eureka.CloudEurekaClient;
import org.springframework.cloud.netflix.eureka.EurekaClientConfigBean;
import org.springframework.cloud.netflix.eureka.EurekaInstanceConfigBean;
import org.springframework.cloud.netflix.eureka.InstanceInfoFactory;
import org.springframework.http.HttpStatus;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.netflix.appinfo.InstanceInfo;
import com.netflix.discovery.EurekaClientConfig;
import com.netflix.discovery.shared.Applications;
import com.netflix.discovery.shared.resolver.ClosableResolver;
import com.netflix.discovery.shared.resolver.EurekaEndpoint;
import com.netflix.discovery.shared.resolver.aws.ApplicationsResolver;
import com.netflix.discovery.shared.resolver.aws.AwsEndpoint;
import com.netflix.discovery.shared.transport.EurekaHttpClient;
import com.netflix.discovery.shared.transport.EurekaHttpClientFactory;
import com.netflix.discovery.shared.transport.EurekaHttpClients;
import com.netflix.discovery.shared.transport.EurekaHttpResponse;
import com.netflix.discovery.shared.transport.EurekaTransportConfig;
import com.netflix.discovery.shared.transport.TransportClientFactory;
import com.netflix.discovery.shared.transport.decorator.MetricsCollectingEurekaHttpClient;
import com.netflix.discovery.shared.transport.jersey.JerseyEurekaHttpClientFactory;
import com.sun.jersey.api.client.filter.ClientFilter;
import static java.util.concurrent.TimeUnit.SECONDS;
/**
* Taken from https://github.com/spencergibb/spring-cloud-netflix-eureka-lite
*
* @author Spencer Gibb
*
* @since 1.0.0
*/
public class Eureka {
private static final Log log = LogFactory.getLog(MethodHandles.lookup().lookupClass());
private final InetUtils inetUtils;
final CloudEurekaClient eurekaClient;
final EurekaClientConfigBean clientConfig;
final EurekaTransport transport;
public Eureka(InetUtils inetUtils, EurekaClientConfigBean eurekaClientConfigBean, CloudEurekaClient eurekaClient) {
this.inetUtils = inetUtils;
this.eurekaClient = eurekaClient;
this.clientConfig = new EurekaClientConfigBean();
this.clientConfig.setRegisterWithEureka(false); // turn off registering with eureka, let apps send heartbeats.
this.clientConfig.setServiceUrl(eurekaClientConfigBean.getServiceUrl());
this.transport = createTransport();
}
public Registration register(Application application) {
long start = System.currentTimeMillis();
if (log.isDebugEnabled()) {
log.debug(String.format("Starting registration of %s", application));
}
InstanceInfo instanceInfo = getInstanceInfo(application);
Registration registration = new Registration(instanceInfo, application);
long duration = (System.currentTimeMillis() - start) ;
if (log.isDebugEnabled()) {
log.debug(String.format("Created registration for %s in %s ms", application, duration));
}
register(registration);
return registration;
}
public InstanceInfo getInstanceInfo(Application application, long lastUpdatedTimestamp, long lastDirtyTimestamp) {
InstanceInfo instanceInfo = getInstanceInfo(application);
instanceInfo = new InstanceInfo.Builder(instanceInfo)
.setLastDirtyTimestamp(lastDirtyTimestamp)
.setLastUpdatedTimestamp(lastUpdatedTimestamp)
.build();
return instanceInfo;
}
public InstanceInfo getInstanceInfo(Application application) {
EurekaInstanceConfigBean instanceConfig = new EurekaInstanceConfigBean(this.inetUtils);
instanceConfig.setInstanceEnabledOnit(true);
instanceConfig.setAppname(application.getName());
instanceConfig.setVirtualHostName(application.getName());
instanceConfig.setInstanceId(application.getInstance_id());
instanceConfig.setHostname(application.getHostname());
instanceConfig.setNonSecurePort(application.getPort());
return new InstanceInfoFactory().create(instanceConfig);
}
public EurekaTransport createTransport() {
TransportClientFactory transportClientFactory = newTransportClientFactory(this.clientConfig, Collections.<ClientFilter>emptyList());
EurekaTransportConfig transportConfig = this.clientConfig.getTransportConfig();
ClosableResolver<AwsEndpoint> bootstrapResolver = EurekaHttpClients.newBootstrapResolver(
this.clientConfig,
transportConfig,
transportClientFactory,
null,
new ApplicationsResolver.ApplicationsSource() {
@Override
public Applications getApplications(int stalenessThreshold, TimeUnit timeUnit) {
long thresholdInMs = TimeUnit.MILLISECONDS.convert(stalenessThreshold, timeUnit);
long delay = Eureka.this.eurekaClient.getLastSuccessfulRegistryFetchTimePeriod();
if (delay > thresholdInMs) {
log.info(String.format("Local registry is too stale for local lookup. Threshold:%s, actual:%s",
thresholdInMs, delay));
return null;
} else {
return Eureka.this.eurekaClient.getApplications();
}
}
}
);
EurekaHttpClientFactory httpClientFactory;
try {
httpClientFactory = EurekaHttpClients.registrationClientFactory(
bootstrapResolver,
transportClientFactory,
transportConfig
);
} catch (Exception e) {
log.warn("Experimental transport initialization failure", e);
throw new RuntimeException(e);
}
return new EurekaTransport(httpClientFactory, httpClientFactory.newClient(), transportClientFactory, bootstrapResolver);
}
public static TransportClientFactory newTransportClientFactory(
final EurekaClientConfig clientConfig,
final Collection<ClientFilter> additionalFilters) {
final TransportClientFactory jerseyFactory = JerseyEurekaHttpClientFactory.create(
clientConfig, additionalFilters, null, null);
final TransportClientFactory metricsFactory = MetricsCollectingEurekaHttpClient.createFactory(jerseyFactory);
return new TransportClientFactory() {
@Override
public EurekaHttpClient newClient(EurekaEndpoint serviceUrl) {
return metricsFactory.newClient(serviceUrl);
}
@Override
public void shutdown() {
metricsFactory.shutdown();
jerseyFactory.shutdown();
}
};
}
/**
* Renew with the eureka service by making the appropriate REST call
*/
public boolean renew(Registration registration) {
InstanceInfo instanceInfo = registration.getInstanceInfo();
EurekaHttpResponse<InstanceInfo> httpResponse;
try {
httpResponse = this.transport.getEurekaHttpClient().sendHeartBeat(instanceInfo.getAppName(), instanceInfo.getId(), instanceInfo, null);
if (log.isDebugEnabled()) {
log.debug(String.format("EurekaLite_%s/%s - Heartbeat status: %s", instanceInfo.getAppName(), instanceInfo.getId(), httpResponse.getStatusCode()));
}
if (httpResponse.getStatusCode() == HttpStatus.NOT_FOUND.value()) {
log.info(String.format("EurekaLite_%s/%s - Re-registering apps/%s", instanceInfo.getAppName(), instanceInfo.getId(), instanceInfo.getAppName()));
return register(registration);
}
return httpResponse.getStatusCode() == HttpStatus.OK.value();
} catch (Exception e) {
log.error("EurekaLite_"+instanceInfo.getAppName()+"/"+ instanceInfo.getId() + " - was unable to send heartbeat!", e);
return false;
}
}
/**
* Register with the eureka service by making the appropriate REST call.
*/
protected boolean register(Registration registration) {
InstanceInfo instanceInfo = registration.getInstanceInfo();
log.info(String.format("EurekaLite_%s/%s: registering service...", instanceInfo.getAppName(), instanceInfo.getId()));
EurekaHttpResponse<Void> httpResponse;
try {
httpResponse = this.transport.getEurekaHttpClient().register(instanceInfo);
} catch (Exception e) {
log.warn("EurekaLite_"+instanceInfo.getAppName()+"/"+ instanceInfo.getId() + " - registration failed " + e.getMessage(), e);
throw e;
}
if (log.isInfoEnabled()) {
log.info(String.format("EurekaLite_%s/%s - registration status: %s", instanceInfo.getAppName(), instanceInfo.getId(), httpResponse.getStatusCode()));
}
return httpResponse.getStatusCode() == HttpStatus.NO_CONTENT.value();
}
public void shutdown(Registration registration) {
InstanceInfo instanceInfo = registration.getInstanceInfo();
try {
EurekaHttpResponse<Void> httpResponse = this.transport.getEurekaHttpClient().cancel(instanceInfo.getAppName(), instanceInfo.getInstanceId());
log.info(String.format("EurekaLite_%s/%s - deregister status: %s", instanceInfo.getAppName(), instanceInfo.getId(), httpResponse.getStatusCode()));
} catch (Exception e) {
log.error("EurekaLite_"+instanceInfo.getAppName()+"/"+ instanceInfo.getId() + " - de-registration failed " + e.getMessage(), e);
}
this.transport.shutdown();
}
}
/**
* Taken from https://github.com/spencergibb/spring-cloud-netflix-eureka-lite
*
* @author Spencer Gibb
*
* @since 1.0.0
*/
class EurekaTransport {
private final EurekaHttpClientFactory eurekaHttpClientFactory;
private final EurekaHttpClient eurekaHttpClient;
private final TransportClientFactory transportClientFactory;
private final ClosableResolver closableResolver;
public EurekaTransport(EurekaHttpClientFactory eurekaHttpClientFactory,
EurekaHttpClient eurekaHttpClient,
TransportClientFactory transportClientFactory,
ClosableResolver closableResolver) {
this.eurekaHttpClientFactory = eurekaHttpClientFactory;
this.eurekaHttpClient = eurekaHttpClient;
this.transportClientFactory = transportClientFactory;
this.closableResolver = closableResolver;
}
public void shutdown() {
this.eurekaHttpClientFactory.shutdown();
this.eurekaHttpClient.shutdown();
this.transportClientFactory.shutdown();
this.closableResolver.shutdown();
}
public EurekaHttpClientFactory getEurekaHttpClientFactory() {
return this.eurekaHttpClientFactory;
}
public EurekaHttpClient getEurekaHttpClient() {
return this.eurekaHttpClient;
}
public TransportClientFactory getTransportClientFactory() {
return this.transportClientFactory;
}
public ClosableResolver getClosableResolver() {
return this.closableResolver;
}
}
/**
* Taken from https://github.com/spencergibb/spring-cloud-netflix-eureka-lite
*
* @author Spencer Gibb
*
* @since 1.0.0
*/
class Application {
private String name;
private String instance_id;
private String hostname;
private int port;
public Application(String name, String instance_id, String hostname, int port) {
this.name = name;
this.instance_id = instance_id;
this.hostname = hostname;
this.port = port;
}
public Application() {
}
@JsonIgnore
public String getRegistrationKey() {
return computeRegistrationKey(this.name, this.instance_id);
}
static String computeRegistrationKey(String name, String instanceId) {
return name + ":" + instanceId;
}
public String getName() {
return this.name;
}
public String getInstance_id() {
return this.instance_id;
}
public String getHostname() {
return this.hostname;
}
public int getPort() {
return this.port;
}
}
/**
* Scheduled service that automatically will renew registrations in Eureka
*/
class Renewer implements Runnable {
private static final Log log = LogFactory.getLog(MethodHandles.lookup().lookupClass());
final Eureka eureka;
final Registration registration;
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
Renewer(int flushInterval, Eureka eureka,
Registration registration) {
this.eureka = eureka;
this.registration = registration;
this.scheduler.scheduleWithFixedDelay(this, 0, flushInterval, SECONDS);
}
@Override
public void run() {
if (log.isTraceEnabled()) {
log.trace("Renewing registration [" + this.registration + "]");
}
this.eureka.renew(this.registration);
}
}
| |
package stastnarodina.workflowVisualiser;
import jargs.gnu.CmdLineParser;
import jargs.gnu.CmdLineParser.Option;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.text.Normalizer;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
public class Main {
public static String NL = System.getProperty("line.separator");
public static final String VERSION = "2010-04-09.2";
private static String filename;
/**
* @param args Command line arguments
*/
public static void main(String[] args) {
parseConfiguration(args);
File outputDir = new File(Configuration.getOutputDir());
try {
InputStream is = new BufferedInputStream(new FileInputStream(filename));
Parser parser = new Parser();
for(Workflow workflow : parser.parse(is)) {
String dotSource = getDotSource(workflow);
if(Configuration.isSaveDotSource()) {
saveSource(dotSource, workflow);
}
File outputFile = new File(outputDir, workflow.getId() + "." + Configuration.getOutputFormat());
String[] command = {
Configuration.getDotLocation(),
"-T" + Configuration.getOutputFormat(),
"-o" + outputFile.getAbsolutePath()
};
Process process = Runtime.getRuntime().exec(command);
process.getOutputStream().write(dotSource.getBytes());
process.getOutputStream().close();
InputStream stderr = new BufferedInputStream(process.getErrorStream());
InputStream stdout = new BufferedInputStream(process.getInputStream());
int exitCode = process.waitFor();
if(exitCode == 0) {
System.out.println("Successfully created " + outputFile.getAbsolutePath());
}
String line;
BufferedReader outReader = new BufferedReader(new InputStreamReader(stdout));
while((line = outReader.readLine()) != null) {
System.out.println(line);
}
stdout.close();
BufferedReader errReader = new BufferedReader(new InputStreamReader(stderr));
while((line = errReader.readLine()) != null) {
System.err.println(line);
}
stderr.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
private static void parseConfiguration(String[] args) {
CmdLineParser parser = new CmdLineParser();
Option help = parser.addBooleanOption('h', "help");
Option version = parser.addBooleanOption("version");
Option dot = parser.addStringOption('d', "dot");
Option out = parser.addStringOption('o', "out");
Option format = parser.addStringOption('f', "format");
Option source = parser.addBooleanOption('s', "source");
Option hideResolution = parser.addBooleanOption("hide-resolutions");
try {
parser.parse(args);
Boolean helpVal = (Boolean) parser.getOptionValue(help, Boolean.FALSE);
if(helpVal) {
System.out.println(getUsage());
System.exit(3);
}
Boolean versionVal = (Boolean) parser.getOptionValue(version, Boolean.FALSE);
if(versionVal) {
System.out.println("Workflow Visualiser, Version " + VERSION);
System.exit(4);
}
String dotVal = (String) parser.getOptionValue(dot);
if(dotVal != null) {
Configuration.setDotLocation(dotVal);
}
String outVal = (String) parser.getOptionValue(out);
if(outVal != null) {
Configuration.setOutputDir(outVal);
}
String formatVal = (String) parser.getOptionValue(format);
if(formatVal != null) {
Configuration.setOutputFormat(formatVal);
}
Boolean sourceVal = (Boolean) parser.getOptionValue(source, Boolean.FALSE);
if(sourceVal) {
Configuration.setSaveDotSource(true);
}
Boolean hideResolVal = (Boolean) parser.getOptionValue(hideResolution, Boolean.FALSE);
if(hideResolVal != null) {
Configuration.setShowResolution(!hideResolVal);
}
String[] remaining = parser.getRemainingArgs();
if(remaining.length == 0) {
throw new IllegalArgumentException("The input file has to be set");
}
filename = remaining[0];
}
catch ( Exception e ) {
System.err.println(e.getMessage());
System.out.println(getUsage());
System.exit(2);
}
}
private static String getUsage() {
return "Usage: Main [options] source-xml-file\n" +
" Options: \n" +
" -h|--help Print help and exit\n" +
" --version Print program version and exit\n" +
" -d|--dot Location of dot executable\n" +
" -o|--out Output directory\n" +
" -f|--format Output format (see the output formats of dot)\n" +
" -s|--source Save the DOT source file in the output directory\n" +
" --hide-resolutions Set it to avoid displaying resolutions";
}
private static void saveSource(String source, Workflow workflow) {
OutputStreamWriter writer = null;
try {
File outputFile = new File(new File(Configuration.getOutputDir()), workflow.getId() + ".dot");
writer = new OutputStreamWriter(new BufferedOutputStream(new FileOutputStream(outputFile)));
writer.write(source);
System.out.println("Source successfully saved to " + outputFile);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} finally {
if(writer != null) {
try {
writer.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
private static String utf2ascii(String input) {
String temp = Normalizer.normalize(input, Normalizer.Form.NFD);
return temp.replaceAll("[^\\p{ASCII}]", "");
}
/**
* Get the dot source
* @return Dot source
*/
public static String getDotSource(Workflow workflow) {
StringBuilder out = new StringBuilder();
out.append("digraph \"").append(workflow.getId()).append("\" {").append(NL);
out.append("label=\"").append(utf2ascii(workflow.getName())).append("\" ");
out.append(NL);
out.append("fontsize=20");
out.append(NL);
out.append("labelloc=t");
out.append(NL);
// So that there are no duplicate edges for resolutions
Map<Resolution, HashSet<State>> drawnResolutions = new HashMap<>();
// All the states
for(State state: workflow.getStates()) {
out.append("\"").append(state.getId()).append("\" ");
out.append("[");
switch(state.getGroup()) {
case OPEN:
out.append("color=green,");
break;
case IN_PROGRESS:
out.append("color=blue,");
break;
case CLOSED:
out.append("color=red,");
break;
}
if(workflow.getStartAction().getTargetState().equals(state)) {
out.append("shape=doublecircle,");
}
// Not solving the reopen and resolve states here
out.append("label=\"").append(utf2ascii(state.getName())).append("\"");
out.append("]");
out.append(NL);
// Iterate over the actions
for(Action action: state.getActions()) {
out.append("\"").append(state.getId()).append("\" -> ").append("\"").append(action.getTargetState().getId()).append("\" ");
out.append("[");
out.append("label=\"").append(utf2ascii(action.getName())).append("\", fontsize=11");
out.append("]");
out.append(NL);
if((action.getTargetState().isShowResolution() && (Configuration.showResolution()))) {
for(Resolution resolution: action.getResolutions()) {
if(!drawnResolutions.containsKey(resolution)) {
drawnResolutions.put(resolution, new HashSet<State>());
out.append("\"").append(resolution.getId()).append("\" [label=\"").append(utf2ascii(resolution.getName())).append("\", shape=box, fontsize=10]");
out.append(NL);
}
if (!drawnResolutions.get(resolution).contains(action.getTargetState())){
drawnResolutions.get(resolution).add(action.getTargetState());
out.append("\"").append(action.getTargetState().getId()).append("\" -> ").append("\"").append(resolution.getId()).append("\" ");
out.append("[");
out.append("style=dotted");
out.append("]");
out.append(NL);
}
}
}
}
}
// // All closed on the bottom of the graph
// out.append("{rank=max; ");
// for(State state: workflow.getStatesByGroup(Group.CLOSED)) {
// out.append("\"" + state.getId() + "\"; ");
// }
// out.append("}");
// out.append(NL);
// All open on the top of the graph
// out.append("{rank=min; ");
// for(State state: workflow.getStatesByGroup(Group.OPEN)) {
// out.append("\"" + state.getId() + "\"; ");
// }
// out.append("}");
// out.append(NL);
//
// initial state on the top
if(workflow.getStartAction() != null) {
out.append("{rank=min; \"").append(workflow.getStartAction().getTargetState().getId()).append("\" }");
}
out.append("}").append(NL);
return out.toString();
}
}
| |
/*
* =================================================================================================
* Copyright (C) 2015 Martin Albedinsky
* =================================================================================================
* Licensed under the Apache License, Version 2.0 or later (further "License" only).
* -------------------------------------------------------------------------------------------------
* You may use this file only in compliance with the License. More details and copy of this License
* you may obtain at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* You can redistribute, modify or publish any part of the code written within this file but as it
* is described in the License, the software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES or CONDITIONS OF ANY KIND.
*
* See the License for the specific language governing permissions and limitations under the License.
* =================================================================================================
*/
package com.albedinsky.android.ui.navigation;
import android.content.res.ColorStateList;
import android.content.res.Resources;
import android.support.annotation.ColorRes;
import android.support.annotation.DrawableRes;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.StringRes;
import com.albedinsky.android.ui.util.ResourceUtils;
/**
* A NavigationItem represents a model that can be used to populate data set of
* {@link BaseNavigationAdapter BaseNavigationAdapter}.
* A new instance of NavigationItem can be build via {@link NavigationItem.Builder}.
*
* @author Martin Albedinsky
*/
public class NavigationItem {
/**
* Interface ===================================================================================
*/
/**
* Constants ===================================================================================
*/
/**
* Log TAG.
*/
// private static final String TAG = "NavigationItem";
/**
* Flat indicating whether a specific navigation item instance is enabled or not.
*/
private static final int PFLAG_ENABLED = 0x00000001;
/**
* Flag indicating whether a specific navigation item instance is selectable or not.
*/
private static final int PFLAG_SELECTABLE = 0x00000002;
/**
* Flag indicating whether a specific navigation item instance hes its color for title text specified
* or not.
*/
private static final int PFLAG_HAS_TITLE_TEXT_COLOR = 0x00000004;
/**
* Flag indicating whether a specific navigation item instance has its color tint for icon drawable
* specified or not.
*/
private static final int PFLAG_HAS_ICON_TINT_LIST = 0x00000008;
/**
* Static members ==============================================================================
*/
/**
* Members =====================================================================================
*/
/**
* Id of this navigation item.
*/
private final int mId;
/**
* Title text.
*/
private final CharSequence mTitle;
/**
* Colors state list for the title text.
*/
private ColorStateList mTitleTextColor;
/**
* Resource id of icon drawable.
*/
private final int mIconRes;
/**
* Resource id of vector icon drawable.
*/
private final int mVectorIconRes;
/**
* Type of the view that should present data of this item
*/
private final int mViewType;
/**
* Colors state list used to tint a graphics of the icon.
*/
private ColorStateList mIconTintList;
/**
* Data object specified for this navigation item.
*/
private Object mData;
/**
* Set of private flags determining current state of this item and its configuration.
*/
private int mPrivateFlags = PFLAG_ENABLED | PFLAG_SELECTABLE;
/**
* Constructors ================================================================================
*/
/**
* Creates a new instance of NavigationItem from the current data specified within the given
* <var>builder</var>.
*
* @param builder The builder of which data to use to create new navigation item.
*/
protected NavigationItem(@NonNull Builder builder) {
this.mId = builder.id;
this.mIconRes = builder.iconRes;
this.mVectorIconRes = builder.vectorIconRes;
this.mTitle = builder.title;
this.mViewType = builder.viewType;
if (builder.iconTintSpecified) {
setIconTintList(builder.iconTintList);
}
if (builder.titleColorSpecified) {
setTitleTextColor(builder.titleColorList);
}
setEnabled(builder.enabled);
setSelectable(builder.selectable);
this.mData = builder.data;
}
/**
* Methods =====================================================================================
*/
/**
* Returns id of this navigation item.
*
* @return This item's id.
*/
public final int getId() {
return mId;
}
/**
* Returns the view type specific for this navigation item. This view type can be used to determine
* which view should be created for this navigation item.
*
* @return This item's view type or {@code 0} if no view type has been specified.
*/
public final int getViewType() {
return mViewType;
}
/**
* Returns the resource if of the icon drawable of this navigation item.
*
* @return Icon resource or {@code 0} if no resource id has been specified.
*/
@DrawableRes
public int getIconRes() {
return mIconRes;
}
/**
* Returns the resource if of the icon drawable of this navigation item.
*
* @return Vector icon resource or {@code 0} if no resource id has been specified.
*/
@DrawableRes
public int getVectorIconRes() {
return mVectorIconRes;
}
/**
* Sets a color tint list for the icon drawable of this navigation item.
*
* @param tint The desired color tint list that should be used to tint graphics of the icon.
* @see #hasTitleTextColor()
* @see #getTitleTextColor()
*/
public void setIconTintList(@Nullable ColorStateList tint) {
this.updatePrivateFlags(PFLAG_HAS_ICON_TINT_LIST, true);
this.mIconTintList = tint;
}
/**
* Checks whether this navigation item has its color tint list for icon drawable specified or not.
*
* @return {@code True} if icon color tint list has been specified, {@code false} otherwise.
* @see #getIconTintList()
* @see #setIconTintList(ColorStateList)
*/
public boolean hasIconTintList() {
return (mPrivateFlags & PFLAG_HAS_ICON_TINT_LIST) != 0;
}
/**
* Returns the color tint list that should be used to tint graphics of icon drawable of this
* navigation item.
*
* @return Color tint list or {@code null} if there was no icon tint list specified.
* @see #hasIconTintList()
* @see #setIconTintList(ColorStateList)
*/
@Nullable
public ColorStateList getIconTintList() {
return mIconTintList;
}
/**
* Returns the title text specified for this navigation item.
*
* @return This navigation item's title text.
*/
@NonNull
public CharSequence getTitle() {
return mTitle;
}
/**
* Sets a color list for the title text of this navigation item.
*
* @param textColor The desired text color list that should be used for title text.
* @see #hasTitleTextColor()
* @see #getTitleTextColor()
*/
public void setTitleTextColor(@NonNull ColorStateList textColor) {
this.updatePrivateFlags(PFLAG_HAS_TITLE_TEXT_COLOR, true);
this.mTitleTextColor = textColor;
}
/**
* Checks whether this navigation item has its color list for title text specified or not.
*
* @return {@code True} if title color list has been specified, {@code false} otherwise.
* @see #getTitleTextColor()
* @see #setTitleTextColor(ColorStateList)
*/
public boolean hasTitleTextColor() {
return (mPrivateFlags & PFLAG_HAS_TITLE_TEXT_COLOR) != 0;
}
/**
* Returns the color state list for title text of this navigation item.
*
* @return Color state list or {@code null} if there was no color list specified.
*/
@Nullable
public ColorStateList getTitleTextColor() {
return mTitleTextColor;
}
/**
* Specifies a data object for this navigation item.
*
* @param data The desired data that should be associated with this item.
* @see #getData()
*/
public void setData(@Nullable Object data) {
this.mData = data;
}
/**
* Returns the data object specified for this navigation item (if any).
*
* @return This items's current data or {@code null} if there were no data specified yet.
* @see #setData(Object)
*/
@Nullable
public Object getData() {
return mData;
}
/**
* Specifies whether this navigation item is enabled or not.
*
* @param enabled {@code True} if this navigation item should be enabled, {@code false} otherwise.
* @see #isEnabled()
*/
public void setEnabled(boolean enabled) {
this.updatePrivateFlags(PFLAG_ENABLED, enabled);
}
/**
* Returns a boolean flag indicating whether this navigation item is enabled or not.
* <p>
* <b>Note</b>, that disabled items are most likely not clickable.
*
* @return {@code True} if this navigation item is enabled (clickable), {@code false} otherwise.
* @see #setEnabled(boolean)
*/
public boolean isEnabled() {
return (mPrivateFlags & PFLAG_ENABLED) != 0;
}
/**
* Specifies whether this navigation item can be selected or not.
*
* @param selectable {@code True} if can be selected, {@code false} otherwise.
* @see #isSelectable()
*/
public void setSelectable(boolean selectable) {
this.updatePrivateFlags(PFLAG_SELECTABLE, selectable);
}
/**
* Returns a boolean flag indicating whether this navigation item can be selected or not.
*
* @return {@code True} if this navigation item can be selected, {@code false} otherwise.
* @see #setSelectable(boolean)
*/
public boolean isSelectable() {
return (mPrivateFlags & PFLAG_SELECTABLE) != 0;
}
/**
* Updates the current private flags.
*
* @param flag Value of the desired flag to add/remove to/from the current private flags.
* @param add Boolean flag indicating whether to add or remove the specified <var>flag</var>.
*/
private void updatePrivateFlags(int flag, boolean add) {
if (add) this.mPrivateFlags |= flag;
else this.mPrivateFlags &= ~flag;
}
/**
* Inner classes ===============================================================================
*/
/**
* Builder that can be used to build a new instances of {@link NavigationItem}.
* <p>
* <b>Note, that none of values that can be supplied to this builder to build a new instance
* of NavigationItem are required.</b>
*
* @author Martin Albedinsky
*/
public static class Builder {
/**
* See {@link NavigationItem#getId()}.
*/
private int id;
/**
* See {@link NavigationItem#getIconRes()}.
*/
private int iconRes;
/**
* See {@link NavigationItem#getVectorIconRes()}.
*/
private int vectorIconRes;
/**
* See {@link NavigationItem#getViewType()}.
*/
private int viewType;
/**
* See {@link NavigationItem#getTitle()}.
*/
private CharSequence title = "";
/**
* See {@link NavigationItem#isSelectable()}.
*/
private boolean selectable;
/**
* See {@link NavigationItem#isEnabled()}.
*/
private boolean enabled;
/**
* See {@link NavigationItem#getTitleTextColor()}.
*/
private ColorStateList titleColorList;
/**
* See {@link NavigationItem#getIconTintList()}.
*/
private ColorStateList iconTintList;
/**
* Boolean flag indicating whether the associated color has been specified or not.
*/
private boolean titleColorSpecified, iconTintSpecified;
/**
* See {@link NavigationItem#mData}.
*/
private Object data;
/**
* Application resources used to obtain values for the new item via theirs resource ids
* (if specified).
*/
protected final Resources resources;
/**
* Theme that is used to resolve theme attributes when obtaining resources (drawables, color
* state lists, ...).
*/
protected final Resources.Theme theme;
/**
* Creates a new empty Builder.
*/
public Builder() {
this.resources = null;
this.theme = null;
reset();
}
/**
* Creates a new empty Builder with the specified <var>resources</var>.
*
* @param resources The resources used to obtain data via theirs specified resource ids.
* @param theme Theme that will be used to resolve theme attributes for requested resources
* (drawables, color state lists, ...).
*/
public Builder(@NonNull Resources resources, @Nullable Resources.Theme theme) {
this.resources = resources;
this.theme = theme;
reset();
}
/**
* Resets this build to its initial state.
*
* @return This builder to allow methods chaining.
*/
public Builder reset() {
this.id = -1;
this.iconRes = 0;
this.vectorIconRes = 0;
this.viewType = 0;
this.iconTintSpecified = false;
this.iconTintList = null;
this.title = "";
this.titleColorSpecified = false;
this.titleColorList = null;
this.data = null;
this.enabled = true;
this.selectable = true;
return this;
}
/**
* Specifies an id for the new NavigationItem.
*
* @param id The desired navigation item's id.
* @return This builder to allow methods chaining.
*/
public Builder id(int id) {
this.id = id;
return this;
}
/**
* Specifies a view type for the new NavigationItem.
*
* @param viewType The desired view type that will be used to determine which view should be
* created for the new NavigationItem. See view types specified by
* {@link BaseNavigationAdapter}.
* @return This builder to allow methods chaining.
*/
public Builder viewType(int viewType) {
this.viewType = viewType;
return this;
}
/**
* Specifies a resource id of icon for the new NavigationItem.
*
* @param resId Resource id of the desired icon drawable.
* @return This builder to allow methods chaining.
*/
public Builder icon(@DrawableRes int resId) {
this.iconRes = resId;
return this;
}
/**
* Specifies a resource id of vector icon for the new NavigationItem.
*
* @param resId Resource id of the desired vector icon drawable.
* @return This builder to allow methods chaining.
*/
public Builder vectorIcon(@DrawableRes int resId) {
this.vectorIconRes = resId;
return this;
}
/**
* Specifies a resource id of tint list for icon for the new NavigationItem.
* <p>
* <b>Use this only if this builder has been initialized with valid Resources.</b>
*
* @param resId Resource id of the desired tint list that will be used to tint the icon's
* graphics.
* @return This builder to allow methods chaining.
*/
public Builder iconTintList(@ColorRes int resId) {
return iconTintList(obtainColorStateList(resId));
}
/**
* Specifies a tint list for icon for the new NavigationItem.
*
* @param tint The desired tint list that will be used to tint he icon's graphics.
* @return This builder to allow methods chaining.
*/
public Builder iconTintList(@Nullable ColorStateList tint) {
this.iconTintList = tint;
this.iconTintSpecified = true;
return this;
}
/**
* Specifies a resource id of title for the new NavigationItem.
* <p>
* <b>Use this only if this builder has been initialized with valid Resources.</b>
*
* @param resId Resource id of the desired title text.
* @return This builder to allow methods chaining.
*/
public Builder title(@StringRes int resId) {
return title(obtainText(resId));
}
/**
* Specifies a title text for the new NavigationItem.
*
* @param title The desired title text.
* @return This builder to allow methods chaining.
*/
public Builder title(@NonNull CharSequence title) {
this.title = title;
return this;
}
/**
* Specifies a resource id of color list for title text for the new NavigationItem.
* <p>
* <b>Use this only if this builder has been initialized with valid Resources.</b>
*
* @param resId Resource id of the desired color list for title text.
* @return This builder to allow methods chaining.
*/
public Builder titleColorList(@ColorRes int resId) {
final ColorStateList colorList = obtainColorStateList(resId);
return colorList != null ? titleColorList(colorList) : this;
}
/**
* Specifies a color list for title text for the new NavigationItem.
*
* @param color The desired color list for title text.
* @return This builder to allow methods chaining.
*/
public Builder titleColorList(@NonNull ColorStateList color) {
this.titleColorList = color;
this.titleColorSpecified = true;
return this;
}
/**
* Specifies a data object for the new NavigationItem.
*
* @param data The desired initial data to associated with the new item.
* @return This builder to allow methods chaining.
*/
public Builder data(@Nullable Object data) {
this.data = data;
return this;
}
/**
* Specifies a boolean flag indicating whether the new NavigationItem should be enabled or
* not.
* <p>
* <b>Note</b>, that if navigation item is disabled it is not clickable at all.
*
* @param enabled {@code True} to be the new NavigationItem enabled, {@code false} otherwise.
* @return This builder to allow methods chaining.
*/
public Builder enabled(boolean enabled) {
this.enabled = enabled;
return this;
}
/**
* Specifies a boolean flag indicating whether the new NavigationItem should be selectable
* or not.
* <p>
* <b>Note</b>, that if navigation item is not selectable, it can be clicked bu cannot by
* selected (highlighted).
*
* @param selectable {@code True} to be the new NavigationItem selectable, {@code false}
* otherwise.
* @return This builder to allow methods chaining.
*/
public Builder selectable(boolean selectable) {
this.selectable = selectable;
return this;
}
/**
* Obtains a text from the attached resources for the specified <var>resId</var>.
*
* @param resId Resource id of the desired text to obtain.
* @return Obtained text or empty text if this builder was not initialized with valid resources.
*/
@NonNull
protected final CharSequence obtainText(@StringRes int resId) {
return resId != 0 && resources != null ? resources.getText(resId) : "";
}
/**
* Obtains a color state list from the attached resources for the specified <var>resId</var>.
*
* @param resId Resource id of the desired color list.
* @return Obtained color list or {@code null} if this builder was not initialized with
* valid resources.
*/
@Nullable
protected final ColorStateList obtainColorStateList(@ColorRes int resId) {
return resources != null ? ResourceUtils.getColorStateList(resources, resId, null) : null;
}
/**
* Builds a new instance of NavigationItem from the current data of this builder.
*
* @return New NavigationItem instance.
*/
@NonNull
public NavigationItem build() {
return new NavigationItem(this);
}
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.http;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.socket.ChannelInputShutdownEvent;
import io.netty.handler.codec.ByteToMessageDecoder;
import io.netty.handler.codec.DecoderResult;
import io.netty.handler.codec.TooLongFrameException;
import io.netty.util.ByteProcessor;
import io.netty.util.internal.AppendableCharSequence;
import java.util.List;
/**
* Decodes {@link ByteBuf}s into {@link HttpMessage}s and
* {@link HttpContent}s.
*
* <h3>Parameters that prevents excessive memory consumption</h3>
* <table border="1">
* <tr>
* <th>Name</th><th>Meaning</th>
* </tr>
* <tr>
* <td>{@code maxInitialLineLength}</td>
* <td>The maximum length of the initial line
* (e.g. {@code "GET / HTTP/1.0"} or {@code "HTTP/1.0 200 OK"})
* If the length of the initial line exceeds this value, a
* {@link TooLongFrameException} will be raised.</td>
* </tr>
* <tr>
* <td>{@code maxHeaderSize}</td>
* <td>The maximum length of all headers. If the sum of the length of each
* header exceeds this value, a {@link TooLongFrameException} will be raised.</td>
* </tr>
* <tr>
* <td>{@code maxChunkSize}</td>
* <td>The maximum length of the content or each chunk. If the content length
* (or the length of each chunk) exceeds this value, the content or chunk
* will be split into multiple {@link HttpContent}s whose length is
* {@code maxChunkSize} at maximum.</td>
* </tr>
* </table>
*
* <h3>Chunked Content</h3>
*
* If the content of an HTTP message is greater than {@code maxChunkSize} or
* the transfer encoding of the HTTP message is 'chunked', this decoder
* generates one {@link HttpMessage} instance and its following
* {@link HttpContent}s per single HTTP message to avoid excessive memory
* consumption. For example, the following HTTP message:
* <pre>
* GET / HTTP/1.1
* Transfer-Encoding: chunked
*
* 1a
* abcdefghijklmnopqrstuvwxyz
* 10
* 1234567890abcdef
* 0
* Content-MD5: ...
* <i>[blank line]</i>
* </pre>
* triggers {@link HttpRequestDecoder} to generate 3 objects:
* <ol>
* <li>An {@link HttpRequest},</li>
* <li>The first {@link HttpContent} whose content is {@code 'abcdefghijklmnopqrstuvwxyz'},</li>
* <li>The second {@link LastHttpContent} whose content is {@code '1234567890abcdef'}, which marks
* the end of the content.</li>
* </ol>
*
* If you prefer not to handle {@link HttpContent}s by yourself for your
* convenience, insert {@link HttpObjectAggregator} after this decoder in the
* {@link ChannelPipeline}. However, please note that your server might not
* be as memory efficient as without the aggregator.
*
* <h3>Extensibility</h3>
*
* Please note that this decoder is designed to be extended to implement
* a protocol derived from HTTP, such as
* <a href="http://en.wikipedia.org/wiki/Real_Time_Streaming_Protocol">RTSP</a> and
* <a href="http://en.wikipedia.org/wiki/Internet_Content_Adaptation_Protocol">ICAP</a>.
* To implement the decoder of such a derived protocol, extend this class and
* implement all abstract methods properly.
*/
public abstract class HttpObjectDecoder extends ByteToMessageDecoder {
private static final String EMPTY_VALUE = "";
private final int maxChunkSize;
private final boolean chunkedSupported;
protected final boolean validateHeaders;
private final HeaderParser headerParser;
private final LineParser lineParser;
private HttpMessage message;
private long chunkSize;
private long contentLength = Long.MIN_VALUE;
private volatile boolean resetRequested;
// These will be updated by splitHeader(...)
private CharSequence name;
private CharSequence value;
private LastHttpContent trailer;
/**
* The internal state of {@link HttpObjectDecoder}.
* <em>Internal use only</em>.
*/
private enum State {
SKIP_CONTROL_CHARS,
READ_INITIAL,
READ_HEADER,
READ_VARIABLE_LENGTH_CONTENT,
READ_FIXED_LENGTH_CONTENT,
READ_CHUNK_SIZE,
READ_CHUNKED_CONTENT,
READ_CHUNK_DELIMITER,
READ_CHUNK_FOOTER,
BAD_MESSAGE,
UPGRADED
}
private State currentState = State.SKIP_CONTROL_CHARS;
/**
* Creates a new instance with the default
* {@code maxInitialLineLength (4096}}, {@code maxHeaderSize (8192)}, and
* {@code maxChunkSize (8192)}.
*/
protected HttpObjectDecoder() {
this(4096, 8192, 8192, true);
}
/**
* Creates a new instance with the specified parameters.
*/
protected HttpObjectDecoder(
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize, boolean chunkedSupported) {
this(maxInitialLineLength, maxHeaderSize, maxChunkSize, chunkedSupported, true);
}
/**
* Creates a new instance with the specified parameters.
*/
protected HttpObjectDecoder(
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize,
boolean chunkedSupported, boolean validateHeaders) {
if (maxInitialLineLength <= 0) {
throw new IllegalArgumentException(
"maxInitialLineLength must be a positive integer: " +
maxInitialLineLength);
}
if (maxHeaderSize <= 0) {
throw new IllegalArgumentException(
"maxHeaderSize must be a positive integer: " +
maxHeaderSize);
}
if (maxChunkSize <= 0) {
throw new IllegalArgumentException(
"maxChunkSize must be a positive integer: " +
maxChunkSize);
}
this.maxChunkSize = maxChunkSize;
this.chunkedSupported = chunkedSupported;
this.validateHeaders = validateHeaders;
AppendableCharSequence seq = new AppendableCharSequence(128);
lineParser = new LineParser(seq, maxInitialLineLength);
headerParser = new HeaderParser(seq, maxHeaderSize);
}
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf buffer, List<Object> out) throws Exception {
if (resetRequested) {
resetNow();
}
switch (currentState) {
case SKIP_CONTROL_CHARS: {
if (!skipControlCharacters(buffer)) {
return;
}
currentState = State.READ_INITIAL;
}
case READ_INITIAL: try {
AppendableCharSequence line = lineParser.parse(buffer);
if (line == null) {
return;
}
String[] initialLine = splitInitialLine(line);
if (initialLine.length < 3) {
// Invalid initial line - ignore.
currentState = State.SKIP_CONTROL_CHARS;
return;
}
message = createMessage(initialLine);
currentState = State.READ_HEADER;
// fall-through
} catch (Exception e) {
out.add(invalidMessage(buffer, e));
return;
}
case READ_HEADER: try {
State nextState = readHeaders(buffer);
if (nextState == null) {
return;
}
currentState = nextState;
switch (nextState) {
case SKIP_CONTROL_CHARS:
// fast-path
// No content is expected.
out.add(message);
out.add(LastHttpContent.EMPTY_LAST_CONTENT);
resetNow();
return;
case READ_CHUNK_SIZE:
if (!chunkedSupported) {
throw new IllegalArgumentException("Chunked messages not supported");
}
// Chunked encoding - generate HttpMessage first. HttpChunks will follow.
out.add(message);
return;
default:
/**
* <a href="https://tools.ietf.org/html/rfc7230#section-3.3.3">RFC 7230, 3.3.3</a> states that if a
* request does not have either a transfer-encoding or a content-length header then the message body
* length is 0. However for a response the body length is the number of octets received prior to the
* server closing the connection. So we treat this as variable length chunked encoding.
*/
long contentLength = contentLength();
if (contentLength == 0 || contentLength == -1 && isDecodingRequest()) {
out.add(message);
out.add(LastHttpContent.EMPTY_LAST_CONTENT);
resetNow();
return;
}
assert nextState == State.READ_FIXED_LENGTH_CONTENT ||
nextState == State.READ_VARIABLE_LENGTH_CONTENT;
out.add(message);
if (nextState == State.READ_FIXED_LENGTH_CONTENT) {
// chunkSize will be decreased as the READ_FIXED_LENGTH_CONTENT state reads data chunk by chunk.
chunkSize = contentLength;
}
// We return here, this forces decode to be called again where we will decode the content
return;
}
} catch (Exception e) {
out.add(invalidMessage(buffer, e));
return;
}
case READ_VARIABLE_LENGTH_CONTENT: {
// Keep reading data as a chunk until the end of connection is reached.
int toRead = Math.min(buffer.readableBytes(), maxChunkSize);
if (toRead > 0) {
ByteBuf content = buffer.readSlice(toRead).retain();
out.add(new DefaultHttpContent(content));
}
return;
}
case READ_FIXED_LENGTH_CONTENT: {
int readLimit = buffer.readableBytes();
// Check if the buffer is readable first as we use the readable byte count
// to create the HttpChunk. This is needed as otherwise we may end up with
// create a HttpChunk instance that contains an empty buffer and so is
// handled like it is the last HttpChunk.
//
// See https://github.com/netty/netty/issues/433
if (readLimit == 0) {
return;
}
int toRead = Math.min(readLimit, maxChunkSize);
if (toRead > chunkSize) {
toRead = (int) chunkSize;
}
ByteBuf content = buffer.readSlice(toRead).retain();
chunkSize -= toRead;
if (chunkSize == 0) {
// Read all content.
out.add(new DefaultLastHttpContent(content, validateHeaders));
resetNow();
} else {
out.add(new DefaultHttpContent(content));
}
return;
}
/**
* everything else after this point takes care of reading chunked content. basically, read chunk size,
* read chunk, read and ignore the CRLF and repeat until 0
*/
case READ_CHUNK_SIZE: try {
AppendableCharSequence line = lineParser.parse(buffer);
if (line == null) {
return;
}
int chunkSize = getChunkSize(line.toString());
this.chunkSize = chunkSize;
if (chunkSize == 0) {
currentState = State.READ_CHUNK_FOOTER;
return;
}
currentState = State.READ_CHUNKED_CONTENT;
// fall-through
} catch (Exception e) {
out.add(invalidChunk(buffer, e));
return;
}
case READ_CHUNKED_CONTENT: {
assert chunkSize <= Integer.MAX_VALUE;
int toRead = Math.min((int) chunkSize, maxChunkSize);
toRead = Math.min(toRead, buffer.readableBytes());
if (toRead == 0) {
return;
}
HttpContent chunk = new DefaultHttpContent(buffer.readSlice(toRead).retain());
chunkSize -= toRead;
out.add(chunk);
if (chunkSize != 0) {
return;
}
currentState = State.READ_CHUNK_DELIMITER;
// fall-through
}
case READ_CHUNK_DELIMITER: {
final int wIdx = buffer.writerIndex();
int rIdx = buffer.readerIndex();
while (wIdx > rIdx) {
byte next = buffer.getByte(rIdx++);
if (next == HttpConstants.LF) {
currentState = State.READ_CHUNK_SIZE;
break;
}
}
buffer.readerIndex(rIdx);
return;
}
case READ_CHUNK_FOOTER: try {
LastHttpContent trailer = readTrailingHeaders(buffer);
if (trailer == null) {
return;
}
out.add(trailer);
resetNow();
return;
} catch (Exception e) {
out.add(invalidChunk(buffer, e));
return;
}
case BAD_MESSAGE: {
// Keep discarding until disconnection.
buffer.skipBytes(buffer.readableBytes());
break;
}
case UPGRADED: {
int readableBytes = buffer.readableBytes();
if (readableBytes > 0) {
// Keep on consuming as otherwise we may trigger an DecoderException,
// other handler will replace this codec with the upgraded protocol codec to
// take the traffic over at some point then.
// See https://github.com/netty/netty/issues/2173
out.add(buffer.readBytes(readableBytes));
}
break;
}
}
}
@Override
protected void decodeLast(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
decode(ctx, in, out);
// Handle the last unfinished message.
if (message != null) {
boolean chunked = HttpHeaderUtil.isTransferEncodingChunked(message);
if (currentState == State.READ_VARIABLE_LENGTH_CONTENT && !in.isReadable() && !chunked) {
// End of connection.
out.add(LastHttpContent.EMPTY_LAST_CONTENT);
reset();
return;
}
// Check if the closure of the connection signifies the end of the content.
boolean prematureClosure;
if (isDecodingRequest() || chunked) {
// The last request did not wait for a response.
prematureClosure = true;
} else {
// Compare the length of the received content and the 'Content-Length' header.
// If the 'Content-Length' header is absent, the length of the content is determined by the end of the
// connection, so it is perfectly fine.
prematureClosure = contentLength() > 0;
}
resetNow();
if (!prematureClosure) {
out.add(LastHttpContent.EMPTY_LAST_CONTENT);
}
}
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof ChannelInputShutdownEvent) {
// The decodeLast method is invoked when a channelInactive event is encountered.
// This method is responsible for ending requests in some situations and must be called
// when the input has been shutdown.
super.channelInactive(ctx);
}
super.userEventTriggered(ctx, evt);
}
protected boolean isContentAlwaysEmpty(HttpMessage msg) {
if (msg instanceof HttpResponse) {
HttpResponse res = (HttpResponse) msg;
int code = res.status().code();
// Correctly handle return codes of 1xx.
//
// See:
// - http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html Section 4.4
// - https://github.com/netty/netty/issues/222
if (code >= 100 && code < 200) {
// One exception: Hixie 76 websocket handshake response
return !(code == 101 && !res.headers().contains(HttpHeaderNames.SEC_WEBSOCKET_ACCEPT));
}
switch (code) {
case 204: case 205: case 304:
return true;
}
}
return false;
}
/**
* Resets the state of the decoder so that it is ready to decode a new message.
* This method is useful for handling a rejected request with {@code Expect: 100-continue} header.
*/
public void reset() {
resetRequested = true;
}
private void resetNow() {
HttpMessage message = this.message;
this.message = null;
name = null;
value = null;
contentLength = Long.MIN_VALUE;
lineParser.reset();
headerParser.reset();
trailer = null;
if (!isDecodingRequest()) {
HttpResponse res = (HttpResponse) message;
if (res != null && res.status().code() == 101) {
currentState = State.UPGRADED;
return;
}
}
currentState = State.SKIP_CONTROL_CHARS;
}
private HttpMessage invalidMessage(ByteBuf in, Exception cause) {
currentState = State.BAD_MESSAGE;
// Advance the readerIndex so that ByteToMessageDecoder does not complain
// when we produced an invalid message without consuming anything.
in.skipBytes(in.readableBytes());
if (message != null) {
message.setDecoderResult(DecoderResult.failure(cause));
} else {
message = createInvalidMessage();
message.setDecoderResult(DecoderResult.failure(cause));
}
HttpMessage ret = message;
message = null;
return ret;
}
private HttpContent invalidChunk(ByteBuf in, Exception cause) {
currentState = State.BAD_MESSAGE;
// Advance the readerIndex so that ByteToMessageDecoder does not complain
// when we produced an invalid message without consuming anything.
in.skipBytes(in.readableBytes());
HttpContent chunk = new DefaultLastHttpContent(Unpooled.EMPTY_BUFFER);
chunk.setDecoderResult(DecoderResult.failure(cause));
message = null;
trailer = null;
return chunk;
}
private static boolean skipControlCharacters(ByteBuf buffer) {
boolean skiped = false;
final int wIdx = buffer.writerIndex();
int rIdx = buffer.readerIndex();
while (wIdx > rIdx) {
int c = buffer.getUnsignedByte(rIdx++);
if (!Character.isISOControl(c) && !Character.isWhitespace(c)) {
rIdx--;
skiped = true;
break;
}
}
buffer.readerIndex(rIdx);
return skiped;
}
private State readHeaders(ByteBuf buffer) {
final HttpMessage message = this.message;
final HttpHeaders headers = message.headers();
AppendableCharSequence line = headerParser.parse(buffer);
if (line == null) {
return null;
}
if (line.length() > 0) {
do {
char firstChar = line.charAt(0);
if (name != null && (firstChar == ' ' || firstChar == '\t')) {
StringBuilder buf = new StringBuilder(value.length() + line.length() + 1);
buf.append(value)
.append(' ')
.append(line.toString().trim());
value = buf.toString();
} else {
if (name != null) {
headers.add(name, value);
}
splitHeader(line);
}
line = headerParser.parse(buffer);
if (line == null) {
return null;
}
} while (line.length() > 0);
}
// Add the last header.
if (name != null) {
headers.add(name, value);
}
// reset name and value fields
name = null;
value = null;
State nextState;
if (isContentAlwaysEmpty(message)) {
HttpHeaderUtil.setTransferEncodingChunked(message, false);
nextState = State.SKIP_CONTROL_CHARS;
} else if (HttpHeaderUtil.isTransferEncodingChunked(message)) {
nextState = State.READ_CHUNK_SIZE;
} else if (contentLength() >= 0) {
nextState = State.READ_FIXED_LENGTH_CONTENT;
} else {
nextState = State.READ_VARIABLE_LENGTH_CONTENT;
}
return nextState;
}
private long contentLength() {
if (contentLength == Long.MIN_VALUE) {
contentLength = HttpHeaderUtil.getContentLength(message, -1);
}
return contentLength;
}
private LastHttpContent readTrailingHeaders(ByteBuf buffer) {
AppendableCharSequence line = headerParser.parse(buffer);
if (line == null) {
return null;
}
CharSequence lastHeader = null;
if (line.length() > 0) {
LastHttpContent trailer = this.trailer;
if (trailer == null) {
trailer = this.trailer = new DefaultLastHttpContent(Unpooled.EMPTY_BUFFER, validateHeaders);
}
do {
char firstChar = line.charAt(0);
if (lastHeader != null && (firstChar == ' ' || firstChar == '\t')) {
List<CharSequence> current = trailer.trailingHeaders().getAll(lastHeader);
if (!current.isEmpty()) {
int lastPos = current.size() - 1;
String lineTrimmed = line.toString().trim();
CharSequence currentLastPos = current.get(lastPos);
StringBuilder b = new StringBuilder(currentLastPos.length() + lineTrimmed.length());
b.append(currentLastPos)
.append(lineTrimmed);
current.set(lastPos, b.toString());
} else {
// Content-Length, Transfer-Encoding, or Trailer
}
} else {
splitHeader(line);
CharSequence headerName = name;
if (!HttpHeaderNames.CONTENT_LENGTH.equalsIgnoreCase(headerName) &&
!HttpHeaderNames.TRANSFER_ENCODING.equalsIgnoreCase(headerName) &&
!HttpHeaderNames.TRAILER.equalsIgnoreCase(headerName)) {
trailer.trailingHeaders().add(headerName, value);
}
lastHeader = name;
// reset name and value fields
name = null;
value = null;
}
line = headerParser.parse(buffer);
if (line == null) {
return null;
}
} while (line.length() > 0);
this.trailer = null;
return trailer;
}
return LastHttpContent.EMPTY_LAST_CONTENT;
}
protected abstract boolean isDecodingRequest();
protected abstract HttpMessage createMessage(String[] initialLine) throws Exception;
protected abstract HttpMessage createInvalidMessage();
private static int getChunkSize(String hex) {
hex = hex.trim();
for (int i = 0; i < hex.length(); i ++) {
char c = hex.charAt(i);
if (c == ';' || Character.isWhitespace(c) || Character.isISOControl(c)) {
hex = hex.substring(0, i);
break;
}
}
return Integer.parseInt(hex, 16);
}
private static String[] splitInitialLine(AppendableCharSequence sb) {
int aStart;
int aEnd;
int bStart;
int bEnd;
int cStart;
int cEnd;
aStart = findNonWhitespace(sb, 0);
aEnd = findWhitespace(sb, aStart);
bStart = findNonWhitespace(sb, aEnd);
bEnd = findWhitespace(sb, bStart);
cStart = findNonWhitespace(sb, bEnd);
cEnd = findEndOfString(sb);
return new String[] {
sb.subStringUnsafe(aStart, aEnd),
sb.subStringUnsafe(bStart, bEnd),
cStart < cEnd? sb.subStringUnsafe(cStart, cEnd) : "" };
}
private void splitHeader(AppendableCharSequence sb) {
final int length = sb.length();
int nameStart;
int nameEnd;
int colonEnd;
int valueStart;
int valueEnd;
nameStart = findNonWhitespace(sb, 0);
for (nameEnd = nameStart; nameEnd < length; nameEnd ++) {
char ch = sb.charAt(nameEnd);
if (ch == ':' || Character.isWhitespace(ch)) {
break;
}
}
for (colonEnd = nameEnd; colonEnd < length; colonEnd ++) {
if (sb.charAt(colonEnd) == ':') {
colonEnd ++;
break;
}
}
name = sb.subStringUnsafe(nameStart, nameEnd);
valueStart = findNonWhitespace(sb, colonEnd);
if (valueStart == length) {
value = EMPTY_VALUE;
} else {
valueEnd = findEndOfString(sb);
value = sb.subStringUnsafe(valueStart, valueEnd);
}
}
private static int findNonWhitespace(AppendableCharSequence sb, int offset) {
for (int result = offset; result < sb.length(); ++result) {
if (!Character.isWhitespace(sb.charAtUnsafe(result))) {
return result;
}
}
return sb.length();
}
private static int findWhitespace(AppendableCharSequence sb, int offset) {
for (int result = offset; result < sb.length(); ++result) {
if (Character.isWhitespace(sb.charAtUnsafe(result))) {
return result;
}
}
return sb.length();
}
private static int findEndOfString(AppendableCharSequence sb) {
for (int result = sb.length() - 1; result > 0; --result) {
if (!Character.isWhitespace(sb.charAtUnsafe(result))) {
return result + 1;
}
}
return 0;
}
private static class HeaderParser implements ByteProcessor {
private final AppendableCharSequence seq;
private final int maxLength;
private int size;
HeaderParser(AppendableCharSequence seq, int maxLength) {
this.seq = seq;
this.maxLength = maxLength;
}
public AppendableCharSequence parse(ByteBuf buffer) {
final int oldSize = size;
seq.reset();
int i = buffer.forEachByte(this);
if (i == -1) {
size = oldSize;
return null;
}
buffer.readerIndex(i + 1);
return seq;
}
public void reset() {
size = 0;
}
@Override
public boolean process(byte value) throws Exception {
char nextByte = (char) value;
if (nextByte == HttpConstants.CR) {
return true;
}
if (nextByte == HttpConstants.LF) {
return false;
}
if (++ size > maxLength) {
// TODO: Respond with Bad Request and discard the traffic
// or close the connection.
// No need to notify the upstream handlers - just log.
// If decoding a response, just throw an exception.
throw newException(maxLength);
}
seq.append(nextByte);
return true;
}
protected TooLongFrameException newException(int maxLength) {
return new TooLongFrameException("HTTP header is larger than " + maxLength + " bytes.");
}
}
private static final class LineParser extends HeaderParser {
LineParser(AppendableCharSequence seq, int maxLength) {
super(seq, maxLength);
}
@Override
public AppendableCharSequence parse(ByteBuf buffer) {
reset();
return super.parse(buffer);
}
@Override
protected TooLongFrameException newException(int maxLength) {
return new TooLongFrameException("An HTTP line is larger than " + maxLength + " bytes.");
}
}
}
| |
package org.asteriskjava.util;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
/**
* Some static utility methods to imitate Asterisk specific logic. <br>
* See Asterisk's <code>util.c</code>. <br>
* Client code is not supposed to use this class.
*
* @author srt
* @version $Id$
*/
public class AstUtil
{
private static final Set<String> TRUE_LITERALS;
private static final Set<String> NULL_LITERALS;
static
{
TRUE_LITERALS = new HashSet<>(20);
TRUE_LITERALS.add("yes");
TRUE_LITERALS.add("true");
TRUE_LITERALS.add("y");
TRUE_LITERALS.add("t");
TRUE_LITERALS.add("1");
TRUE_LITERALS.add("on");
TRUE_LITERALS.add("enabled");
NULL_LITERALS = new HashSet<>(20);
NULL_LITERALS.add("<unknown>");
NULL_LITERALS.add("unknown");
NULL_LITERALS.add("none"); // VarSet event in pbx.c
NULL_LITERALS.add("<none>");
NULL_LITERALS.add("-none-"); // IPaddress in PeerEntryEvent
NULL_LITERALS.add("(none)");
NULL_LITERALS.add("<not set>");
NULL_LITERALS.add("(not set)");
NULL_LITERALS.add("<no name>");
NULL_LITERALS.add("n/a"); // channel in AgentsEvent
NULL_LITERALS.add("<null>");
NULL_LITERALS.add("(null)"); // appData in ListDialplanEvent
}
private AstUtil()
{
// hide constructor
}
/**
* Checks if a String represents <code>true</code> or <code>false</code>
* according to Asterisk's logic. <br>
* The original implementation is <code>util.c</code> is as follows: <br>
*
* <pre>
* int ast_true(const char *s)
* {
* if (!s || ast_strlen_zero(s))
* return 0;
* <br>
* if (!strcasecmp(s, "yes") ||
* !strcasecmp(s, "true") ||
* !strcasecmp(s, "y") ||
* !strcasecmp(s, "t") ||
* !strcasecmp(s, "1") ||
* !strcasecmp(s, "on"))
* return -1;
* <br>
* return 0;
* }
* </pre>
*
* <br>
* To support the dnd property of
* {@link org.asteriskjava.manager.event.ZapShowChannelsEvent} this method
* also consideres the string "Enabled" as true.
*
* @param o the Object (usually a String) to check for <code>true</code>.
* @return <code>true</code> if s represents <code>true</code>,
* <code>false</code> otherwise.
*/
public static boolean isTrue(Object o)
{
if (o == null)
{
return false;
}
if (o instanceof Boolean)
{
return (Boolean) o;
}
final String s;
if (o instanceof String)
{
s = (String) o;
}
else
{
s = o.toString();
}
return TRUE_LITERALS.contains(s.toLowerCase(Locale.US));
}
/**
* @param a an object
* @param b an object to be compared with {@code a} for equality
* @return {@code true} if the arguments are equal to each other and
* {@code false} otherwise
*/
public static boolean isEqual(Object a, Object b)
{
return a == b || a != null && a.equals(b);
}
/**
* Parses a string for caller id information. <br>
* The caller id string should be in the form
* <code>"Some Name" <1234></code>. <br>
* This resembles <code>ast_callerid_parse</code> in <code>callerid.c</code>
* but strips any whitespace.
*
* @param s the string to parse
* @return a String[] with name (index 0) and number (index 1)
*/
public static String[] parseCallerId(String s)
{
final String[] result = new String[2];
final int lbPosition;
final int rbPosition;
String name;
String number;
if (s == null)
{
return result;
}
lbPosition = s.lastIndexOf('<');
rbPosition = s.lastIndexOf('>');
// no opening and closing brace? use value as CallerId name
if (lbPosition < 0 || rbPosition < 0)
{
name = s.trim();
if (name.length() == 0)
{
name = null;
}
result[0] = name;
return result;
}
number = s.substring(lbPosition + 1, rbPosition).trim();
if (number.length() == 0)
{
number = null;
}
name = s.substring(0, lbPosition).trim();
if (name.startsWith("\"") && name.endsWith("\"") && name.length() > 1)
{
name = name.substring(1, name.length() - 1).trim();
}
if (name.length() == 0)
{
name = null;
}
result[0] = name;
result[1] = number;
return result;
}
/**
* Checks if the value of s was <code>null</code> in Asterisk. <br>
* This method is useful as Asterisk likes to replace <code>null</code>
* values with different string values like "unknown", "<unknown>" or
* "<null>". <br>
* To find such replacements search for <code>S_OR</code> in Asterisk's
* source code. You will find things like
*
* <pre>
* S_OR(chan->cid.cid_num, "<unknown>")
* fdprintf(fd, "agi_callerid: %s\n", S_OR(chan->cid.cid_num, "unknown"));
* </pre>
*
* and more...
*
* @param s the string to test, may be <code>null</code>. If s is not a
* string the only test that is performed is a check for
* <code>null</code>.
* @return <code>true</code> if the s was <code>null</code> in Asterisk;
* <code>false</code> otherwise.
*/
public static boolean isNull(Object s)
{
if (s == null)
{
return true;
}
if (!(s instanceof String))
{
return false;
}
return NULL_LITERALS.contains(((String) s).toLowerCase(Locale.US));
}
}
| |
package com.blazebit.storage.rest.client;
import java.util.List;
import java.util.Locale;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.client.AsyncInvoker;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.Invocation.Builder;
import javax.ws.rs.core.CacheControl;
import javax.ws.rs.core.Cookie;
import javax.ws.rs.core.GenericType;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.MessageBodyReader;
public class ResponseObjectBuilder implements Builder {
private final List<MessageBodyReader<?>> responseObjectMessageReader;
private final Builder delegate;
public ResponseObjectBuilder(Builder delegate, List<MessageBodyReader<?>> responseObjectMessageReader) {
this.delegate = delegate;
this.responseObjectMessageReader = responseObjectMessageReader;
}
private Builder getOrCreateDelegate(Builder newBuilder) {
if (delegate != newBuilder) {
return new ResponseObjectBuilder(newBuilder, responseObjectMessageReader);
}
return this;
}
/*
* Wrap for response object support
*/
public Invocation build(String method) {
return new ResponseObjectInvocation(delegate.build(method), responseObjectMessageReader);
}
public Invocation build(String method, Entity<?> entity) {
return new ResponseObjectInvocation(delegate.build(method, entity), responseObjectMessageReader);
}
public Invocation buildGet() {
return new ResponseObjectInvocation(delegate.buildGet(), responseObjectMessageReader);
}
public Invocation buildDelete() {
return new ResponseObjectInvocation(delegate.buildDelete(), responseObjectMessageReader);
}
public Invocation buildPost(Entity<?> entity) {
return new ResponseObjectInvocation(delegate.buildPost(entity), responseObjectMessageReader);
}
public Invocation buildPut(Entity<?> entity) {
return new ResponseObjectInvocation(delegate.buildPut(entity), responseObjectMessageReader);
}
public AsyncInvoker async() {
// TODO: implement wrapping
return delegate.async();
}
/*
* Delegate and wrap under the hood
*/
public Response get() {
return buildGet().invoke();
}
public <T> T get(Class<T> responseType) {
return buildGet().invoke(responseType);
}
public <T> T get(GenericType<T> responseType) {
return buildGet().invoke(responseType);
}
public Response put(Entity<?> entity) {
return buildPut(entity).invoke();
}
public <T> T put(Entity<?> entity, Class<T> responseType) {
return buildPut(entity).invoke(responseType);
}
public <T> T put(Entity<?> entity, GenericType<T> responseType) {
return buildPut(entity).invoke(responseType);
}
public Response post(Entity<?> entity) {
return buildPost(entity).invoke();
}
public <T> T post(Entity<?> entity, Class<T> responseType) {
return buildPost(entity).invoke(responseType);
}
public <T> T post(Entity<?> entity, GenericType<T> responseType) {
return buildPost(entity).invoke(responseType);
}
public Response delete() {
return buildDelete().invoke();
}
public <T> T delete(Class<T> responseType) {
return buildDelete().invoke(responseType);
}
public <T> T delete(GenericType<T> responseType) {
return buildDelete().invoke(responseType);
}
public Response head() {
return build(HttpMethod.HEAD).invoke();
}
public Response options() {
return build(HttpMethod.OPTIONS).invoke();
}
public <T> T options(Class<T> responseType) {
return build(HttpMethod.OPTIONS).invoke(responseType);
}
public <T> T options(GenericType<T> responseType) {
return build(HttpMethod.OPTIONS).invoke(responseType);
}
public Response trace() {
return build("TRACE").invoke();
}
public <T> T trace(Class<T> responseType) {
return build("TRACE").invoke(responseType);
}
public <T> T trace(GenericType<T> responseType) {
return build("TRACE").invoke(responseType);
}
public Response method(String name) {
return build(name).invoke();
}
public <T> T method(String name, Class<T> responseType) {
return build(name).invoke(responseType);
}
public <T> T method(String name, GenericType<T> responseType) {
return build(name).invoke(responseType);
}
public Response method(String name, Entity<?> entity) {
return build(name, entity).invoke();
}
public <T> T method(String name, Entity<?> entity, Class<T> responseType) {
return build(name, entity).invoke(responseType);
}
public <T> T method(String name, Entity<?> entity, GenericType<T> responseType) {
return build(name, entity).invoke(responseType);
}
/*
* Wrapping delegates
*/
public Builder accept(String... mediaTypes) {
return getOrCreateDelegate(delegate.accept(mediaTypes));
}
public Builder accept(MediaType... mediaTypes) {
return getOrCreateDelegate(delegate.accept(mediaTypes));
}
public Builder acceptLanguage(Locale... locales) {
return getOrCreateDelegate(delegate.acceptLanguage(locales));
}
public Builder acceptLanguage(String... locales) {
return getOrCreateDelegate(delegate.acceptLanguage(locales));
}
public Builder acceptEncoding(String... encodings) {
return getOrCreateDelegate(delegate.acceptEncoding(encodings));
}
public Builder cookie(Cookie cookie) {
return getOrCreateDelegate(delegate.cookie(cookie));
}
public Builder cookie(String name, String value) {
return getOrCreateDelegate(delegate.cookie(name, value));
}
public Builder cacheControl(CacheControl cacheControl) {
return getOrCreateDelegate(delegate.cacheControl(cacheControl));
}
public Builder header(String name, Object value) {
return getOrCreateDelegate(delegate.header(name, value));
}
public Builder headers(MultivaluedMap<String, Object> headers) {
return getOrCreateDelegate(delegate.headers(headers));
}
public Builder property(String name, Object value) {
return getOrCreateDelegate(delegate.property(name, value));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.topology.addservice;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.actionmanager.ActionManager;
import org.apache.ambari.server.actionmanager.RequestFactory;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.KerberosHelper;
import org.apache.ambari.server.controller.RequestStatusResponse;
import org.apache.ambari.server.serveraction.kerberos.KerberosAdminAuthenticationException;
import org.apache.ambari.server.serveraction.kerberos.KerberosInvalidConfigurationException;
import org.apache.ambari.server.serveraction.kerberos.KerberosMissingAdminCredentialsException;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.SecurityType;
import org.apache.ambari.server.state.Service;
import org.apache.ambari.server.state.State;
import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
import org.apache.ambari.server.topology.Configuration;
import org.apache.ambari.server.topology.ProvisionStep;
import org.apache.ambari.server.utils.LoggingPreconditions;
import org.apache.ambari.server.utils.StageUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
@Singleton
public class AddServiceOrchestrator {
private static final Logger LOG = LoggerFactory.getLogger(AddServiceOrchestrator.class);
private static final LoggingPreconditions CHECK = new LoggingPreconditions(LOG);
@Inject
private ResourceProviderAdapter resourceProviders;
@Inject
private AmbariManagementController controller;
@Inject
private ActionManager actionManager;
@Inject
private RequestFactory requestFactory;
@Inject
private RequestValidatorFactory requestValidatorFactory;
@Inject
private StackAdvisorAdapter stackAdvisorAdapter;
public RequestStatusResponse processAddServiceRequest(Cluster cluster, AddServiceRequest request) {
LOG.info("Received {} request for {}: {}", request.getOperationType(), cluster.getClusterName(), request);
AddServiceInfo validatedRequest = validate(cluster, request);
ensureCredentials(cluster, validatedRequest);
AddServiceInfo requestWithLayout = recommendLayout(validatedRequest);
AddServiceInfo requestWithConfig = recommendConfiguration(requestWithLayout);
createResources(cluster, requestWithConfig);
createHostTasks(requestWithConfig);
return requestWithConfig.getStages().getRequestStatusResponse();
}
/**
* Performs basic validation of the request and
* fills in details about the requested services and components.
*
* @return validated information about the requested services
*/
private AddServiceInfo validate(Cluster cluster, AddServiceRequest request) {
LOG.info("Validating {}", request);
RequestValidator validator = requestValidatorFactory.create(request, cluster);
validator.validate();
return validator.createValidServiceInfo(actionManager, requestFactory);
}
/**
* Stores any credentials provided in the request, and
* validates KDC credentials if the cluster has Kerberos enabled.
* The goal is to make sure that no resources (services, components, etc.) get created
* (except the credentials) if the request as a whole would fail due to missing credentials.
*/
private void ensureCredentials(Cluster cluster, AddServiceInfo validatedRequest) {
resourceProviders.createCredentials(validatedRequest);
if (cluster.getSecurityType() == SecurityType.KERBEROS) {
try {
controller.getKerberosHelper().validateKDCCredentials(cluster);
} catch (KerberosMissingAdminCredentialsException | KerberosAdminAuthenticationException | KerberosInvalidConfigurationException e) {
CHECK.wrapInUnchecked(e, IllegalArgumentException::new, "KDC credentials validation failed: %s", e);
} catch (AmbariException e) {
CHECK.wrapInUnchecked(e, IllegalStateException::new, "Error occurred while validating KDC credentials: %s", e);
}
}
}
/**
* Requests layout recommendation from the stack advisor.
* @return new request, updated based on the recommended layout
* @throws IllegalArgumentException if the request cannot be satisfied
*/
private AddServiceInfo recommendLayout(AddServiceInfo request) {
if (!request.requiresLayoutRecommendation()) {
LOG.info("Using layout specified in request for {}", request);
return request;
}
LOG.info("Recommending layout for {}", request);
return stackAdvisorAdapter.recommendLayout(request);
}
/**
* Requests config recommendation from the stack advisor.
* @return new request, updated with the recommended config
* @throws IllegalArgumentException if the request cannot be satisfied
*/
private AddServiceInfo recommendConfiguration(AddServiceInfo request) {
LOG.info("Recommending configuration for {}", request);
return stackAdvisorAdapter.recommendConfigurations(request);
}
/**
* Creates the service, component and host component resources for the request.
*/
private void createResources(Cluster cluster, AddServiceInfo request) {
LOG.info("Creating resources for {}", request);
Set<String> existingServices = cluster.getServices().keySet();
updateKerberosDescriptor(request);
resourceProviders.createServices(request);
resourceProviders.createComponents(request);
resourceProviders.updateServiceDesiredState(request, State.INSTALLED);
resourceProviders.updateServiceDesiredState(request, State.STARTED);
resourceProviders.createHostComponents(request);
configureKerberos(request, cluster, existingServices);
resourceProviders.updateExistingConfigs(request, existingServices);
resourceProviders.createConfigs(request);
}
private void configureKerberos(AddServiceInfo request, Cluster cluster, Set<String> existingServices) {
if (cluster.getSecurityType() == SecurityType.KERBEROS) {
LOG.info("Configuring Kerberos for {}", request);
Configuration stackDefaultConfig = request.getStack().getValidDefaultConfig();
Set<String> newServices = request.newServices().keySet();
Set<String> services = ImmutableSet.copyOf(Sets.union(newServices, existingServices));
Map<String, Map<String, String>> existingConfigurations = request.getConfig().getFullProperties();
existingConfigurations.put(KerberosHelper.CLUSTER_HOST_INFO, createComponentHostMap(cluster));
try {
KerberosHelper kerberosHelper = controller.getKerberosHelper();
kerberosHelper.ensureHeadlessIdentities(cluster, existingConfigurations, services);
request.getConfig().applyUpdatesToStackDefaultProperties(stackDefaultConfig, existingConfigurations,
kerberosHelper.getServiceConfigurationUpdates(
cluster, existingConfigurations, createServiceComponentMap(cluster), null, existingServices, true, true
)
);
} catch (AmbariException | KerberosInvalidConfigurationException e) {
CHECK.wrapInUnchecked(e, RuntimeException::new, "Error configuring Kerberos for %s: %s", request, e);
}
}
}
private void createHostTasks(AddServiceInfo request) {
LOG.info("Creating host tasks for {}", request);
ProvisionActionPredicateBuilder predicates = new ProvisionActionPredicateBuilder(request);
for (ProvisionStep step : ProvisionStep.values()) {
predicates.getPredicate(step).ifPresent(predicate ->
resourceProviders.updateHostComponentDesiredState(request, predicate, step)
);
}
try {
request.getStages().persist();
} catch (AmbariException e) {
CHECK.wrapInUnchecked(e, IllegalStateException::new, "Error creating host tasks for %s", request);
}
}
private void updateKerberosDescriptor(AddServiceInfo request) {
request.getKerberosDescriptor().ifPresent(descriptorInRequest -> {
Optional<KerberosDescriptor> existingDescriptor = resourceProviders.getKerberosDescriptor(request);
if (existingDescriptor.isPresent()) {
KerberosDescriptor newDescriptor = existingDescriptor.get().update(descriptorInRequest);
resourceProviders.updateKerberosDescriptor(request, newDescriptor);
} else {
resourceProviders.createKerberosDescriptor(request, descriptorInRequest);
}
});
}
private static Map<String, String> createComponentHostMap(Cluster cluster) {
return StageUtils.createComponentHostMap(
cluster.getServices().keySet(),
service -> getComponentsForService(cluster, service),
(service, component) -> getHostsForServiceComponent(cluster, service, component)
);
}
private static Set<String> getHostsForServiceComponent(Cluster cluster, String service, String component) {
try {
return cluster.getService(service).getServiceComponent(component).getServiceComponentsHosts();
} catch (AmbariException e) {
return CHECK.wrapInUnchecked(e, IllegalStateException::new, "Error getting hosts for service %s component %: %s", service, component, e, e);
}
}
private static Set<String> getComponentsForService(Cluster cluster, String service) {
try {
return cluster.getService(service).getServiceComponents().keySet();
} catch (AmbariException e) {
return CHECK.wrapInUnchecked(e, IllegalStateException::new, "Error getting components of service %s: %s", service, e, e);
}
}
private static Map<String, Set<String>> createServiceComponentMap(Cluster cluster) {
Map<String, Set<String>> serviceComponentMap = new HashMap<>();
for (Map.Entry<String, Service> e : cluster.getServices().entrySet()) {
serviceComponentMap.put(e.getKey(), ImmutableSet.copyOf(e.getValue().getServiceComponents().keySet()));
}
return serviceComponentMap;
}
}
| |
/*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.cellbase.app.transform;
import htsjdk.tribble.readers.TabixReader;
import org.apache.commons.lang.math.NumberUtils;
import org.opencb.biodata.models.variant.Variant;
import org.opencb.cellbase.core.common.clinical.gwas.Gwas;
import org.opencb.cellbase.core.common.clinical.gwas.GwasStudy;
import org.opencb.cellbase.core.common.clinical.gwas.GwasTest;
import org.opencb.cellbase.core.common.clinical.gwas.GwasTrait;
import org.opencb.cellbase.core.serializer.CellBaseSerializer;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.NumberFormat;
import java.util.HashMap;
import java.util.Map;
/**
* @author Luis Miguel Cruz
* @version 1.2.3
* @since October 08, 2014
*/
public class GwasParser extends CellBaseParser {
private static final int REF = 0;
private static final int ALT = 1;
private final Path gwasFile;
private final Path dbSnpTabixFilePath;
private int invalidStartRecords;
private int invalidChromosome;
private int gwasLinesNotFoundInDbsnp;
public GwasParser(Path gwasFile, Path dbSnpTabixFilePath, CellBaseSerializer serializer) {
super(serializer);
this.gwasFile = gwasFile;
this.dbSnpTabixFilePath = dbSnpTabixFilePath;
this.invalidStartRecords = 0;
this.invalidChromosome = 0;
this.gwasLinesNotFoundInDbsnp = 0;
}
public void parse() {
if (Files.exists(gwasFile) && Files.exists(dbSnpTabixFilePath)) {
try {
logger.info("Opening gwas file " + gwasFile + " ...");
BufferedReader inputReader = new BufferedReader(new FileReader(gwasFile.toFile()));
logger.info("Ignoring gwas file header line ...");
inputReader.readLine();
Map<Variant, Gwas> variantMap = new HashMap<>();
logger.info("Opening dbSNP tabix file " + dbSnpTabixFilePath + " ...");
TabixReader dbsnpTabixReader = new TabixReader(dbSnpTabixFilePath.toString());
long processedGwasLines = 0;
logger.info("Parsing gwas file ...");
String line;
while ((line = inputReader.readLine()) != null) {
if (!line.isEmpty()) {
processedGwasLines++;
Gwas gwasRecord = buildGwasObject(line.split("\t"), dbsnpTabixReader);
if (gwasRecord != null) {
addGwasRecordToVariantMap(variantMap, gwasRecord);
}
}
}
dbsnpTabixReader.close();
logger.info("Serializing parsed variants ...");
for (Gwas gwasOutputRecord : variantMap.values()) {
serializer.serialize(gwasOutputRecord);
}
logger.info("Done");
this.disconnect();
this.printSummary(processedGwasLines, variantMap);
} catch (IOException e) {
logger.error("Unable to parse " + gwasFile + " using dbSNP file " + dbSnpTabixFilePath + ": " + e.getMessage());
}
}
}
private Gwas buildGwasObject(String[] values, TabixReader dbsnpTabixReader) {
Gwas gwas = null;
Integer start = parseStart(values);
if (start != null) {
Integer end = start;
String chromosome = parseChromosome(values[11]);
if (chromosome != null) {
String snpId = values[21].trim();
String[] refAndAlt = getRefAndAltFromDbsnp(chromosome, start, snpId, dbsnpTabixReader);
if (refAndAlt != null) {
gwas = new Gwas(chromosome, start, end, refAndAlt[REF], refAndAlt[ALT], values[10], values[13], values[14],
values[15], values[16], values[17], values[18], values[19], values[20], snpId, values[22], values[23],
values[24], values[25], parseFloat(values[26]), values[33]);
addGwasStudy(values, gwas);
} else {
gwasLinesNotFoundInDbsnp++;
}
} else {
invalidChromosome++;
}
} else {
invalidStartRecords++;
}
return gwas;
}
private Integer parseStart(String[] values) {
Integer start = null;
if (NumberUtils.isDigits(values[12])) {
start = Integer.parseInt(values[12]);
}
return start;
}
private String parseChromosome(String chromosome) {
String transformedChromosome = null;
if (!chromosome.isEmpty()) {
switch (chromosome) {
case "23":
transformedChromosome = "X";
break;
case "24":
transformedChromosome = "Y";
break;
case "25":
transformedChromosome = "MT";
break;
default:
transformedChromosome = chromosome;
}
}
return transformedChromosome;
}
private Float parseFloat(String value) {
Float riskAlleleFrequency = null;
if (NumberUtils.isNumber(value)) {
riskAlleleFrequency = Float.parseFloat(value);
}
return riskAlleleFrequency;
}
private String[] getRefAndAltFromDbsnp(String chromosome, Integer start, String snpId, TabixReader dbsnpTabixReader) {
String[] refAndAlt = null;
TabixReader.Iterator dbsnpIterator = dbsnpTabixReader.query(chromosome + ":" + start + "-" + start);
try {
String dbSnpRecord = dbsnpIterator.next();
boolean found = false;
while (dbSnpRecord != null && !found) {
String[] dbsnpFields = dbSnpRecord.split("\t");
if (snpId.equalsIgnoreCase(dbsnpFields[2])) {
refAndAlt = new String[2];
refAndAlt[REF] = dbsnpFields[3];
refAndAlt[ALT] = dbsnpFields[4];
found = true;
}
dbSnpRecord = dbsnpIterator.next();
}
} catch (IOException e) {
logger.warn("Error reading position '" + chromosome + ":" + start + "' in dbSNP: " + e.getMessage());
}
return refAndAlt;
}
private void addGwasStudy(String[] values, Gwas gwas) {
// Add the study values
GwasStudy study = new GwasStudy(values[1], values[2], values[3], values[4], values[5], values[6], values[8], values[9], values[32]);
addGwasTraitToStudy(values, study);
gwas.addStudy(study);
}
private void addGwasTraitToStudy(String[] values, GwasStudy study) {
// Add the trait values
GwasTrait trait = new GwasTrait(values[7], values[0]);
addGwasTestToTrait(values, trait);
study.addTrait(trait);
}
private void addGwasTestToTrait(String[] values, GwasTrait trait) {
// Add the test values
Float pValue = parseFloat(values[27]);
Float pValueMlog = parseFloat(values[28]);
GwasTest test = new GwasTest(pValue, pValueMlog, values[29], values[30], values[31]);
trait.addTest(test);
}
private void printSummary(long processedGwasLines, Map<Variant, Gwas> variantMap) {
NumberFormat formatter = NumberFormat.getInstance();
logger.info("");
logger.info("Summary");
logger.info("=======");
logger.info("Processed " + formatter.format(processedGwasLines) + " gwas lines");
logger.info("Serialized " + formatter.format(variantMap.size()) + " variants");
logger.info(formatter.format(gwasLinesNotFoundInDbsnp) + " gwas lines ignored because variant not found in dbsnp");
if (invalidStartRecords != 0) {
logger.info(formatter.format(invalidStartRecords) + " gwas lines ignored because have no valid 'start' value");
}
if (invalidChromosome != 0) {
logger.info(formatter.format(invalidChromosome) + " gwas lines ignored because have no valid chromosome");
}
}
private void addGwasRecordToVariantMap(Map<Variant, Gwas> variantMap, Gwas gwasRecord) {
String[] alternates = gwasRecord.getAlternate().split(",");
for (int i = 0; i < alternates.length; i++) {
String alternate = alternates[i];
Variant variantKey = new Variant(
gwasRecord.getChromosome(), gwasRecord.getStart(), gwasRecord.getEnd(), gwasRecord.getReference(), alternate);
if (variantMap.containsKey(variantKey)) {
updateGwasEntry(variantMap, gwasRecord, variantKey);
} else {
// if a gwas record has several alternatives, it has to be cloned to avoid side effects (set gwasRecord
// alternative would update the previous instance of gwas record saved in the 'variantMap')
gwasRecord = cloneGwasRecordIfNecessary(gwasRecord, i);
gwasRecord.setAlternate(alternate);
variantMap.put(variantKey, gwasRecord);
}
}
}
private Gwas cloneGwasRecordIfNecessary(Gwas gwasRecord, int i) {
if (i > 0) {
gwasRecord = new Gwas(gwasRecord);
}
return gwasRecord;
}
private void updateGwasEntry(Map<Variant, Gwas> variantMap, Gwas gwasVO, Variant gwasKey) {
Gwas gwas = variantMap.get(gwasKey);
gwas.addStudies(gwasVO.getStudies());
variantMap.put(gwasKey, gwas);
}
}
| |
package com.apleben.animation.AnimatedCurves;
import org.jdesktop.animation.timing.Animator;
import org.jdesktop.animation.timing.TimingTargetAdapter;
import org.jdesktop.animation.timing.interpolation.PropertySetter;
import org.jdesktop.swingx.JXPanel;
import org.jdesktop.swingx.StackLayout;
import org.jdesktop.swingx.graphics.GraphicsUtilities;
import org.jdesktop.swingx.image.ColorTintFilter;
import org.jdesktop.swingx.image.GaussianBlurFilter;
import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.awt.image.BufferedImageOp;
import java.io.IOException;
/**
* The Introduction panel with the custom blurred & pulsed logo.
*
* @author apupeikis
*/
public class IntroPanel extends JXPanel {
private BlurPulseLogo logo; // the blurred pulsing logo instance
public IntroPanel() {
buildContentPane();
setAlpha(0.01f);
startFadeInAnimation();
}
/**
* getting the final {@code Animator} object, those performing a pulse animation.
* @return the Animator object performing the last pulse animation
*/
public Animator getFinalAnimator() {
return logo.getGlowAnimator();
}
/*
* building the content panel with {@code GradientPanel} and our custom logo
*/
private void buildContentPane() {
setLayout(new StackLayout());
logo = new BlurPulseLogo("/com/apleben/animation/AnimatedCurves/resources/logo.png");
logo.setOpaque(false);
GradientPanel panel = new GradientPanel();
add(panel, StackLayout.TOP);
add(logo, StackLayout.TOP);
}
/*
* starting the fast fade in animation to appearing the panel with alpha changes from 0.01f to 1.0f
*/
private void startFadeInAnimation() {
Animator animator = PropertySetter.createAnimator(300, this, "alpha", 0.01f, 1.0f);
animator.addTarget(new TimingTargetAdapter() {
@Override
public void end() {
logo.startAnimator(); // starting logo's Animator after the end of the current animation
}
});
animator.start();
}
/**
* Custom blurred and finally pulsed logo. After instantiation performed the two sequence of animation steps.
*/
public static class BlurPulseLogo extends JComponent {
private Animator glowAnimator; // the final pulse Animator
private BufferedImage image, glow, blurred; // just an image cache
private int radius = 30; // Blur Filter radius
private GaussianBlurFilter blurFilter; // obviously, the Gaussian Blur Filter
private boolean blurPaint, glowPaint; // flags to paint the blur and pulse effects accordingly
private float alpha = 0.0f; // alpha for the pulse effect painting
/**
* Instantiating the {@code BlurPulseLogo} instance
* @param imageName the string of the logo's image name
*/
public BlurPulseLogo(final String imageName) {
try {
image = GraphicsUtilities.loadCompatibleImage(getClass().getResource(imageName));
} catch (IOException e) {
e.printStackTrace();
}
blurFilter = new GaussianBlurFilter(radius);
PropertySetter setter = new PropertySetter(this, "alpha", 0.0f, 1.0f);
glowAnimator = new Animator(800, 6.0, Animator.RepeatBehavior.REVERSE, setter);
}
@Override
public Dimension getMinimumSize() {
return getPreferredSize();
}
@Override
public Dimension getMaximumSize() {
return getPreferredSize();
}
@Override
public Dimension getPreferredSize() {
return new Dimension(image.getWidth(), image.getHeight());
}
/**
* returning the last pulse Animator object
* @return the Animator object performing the last pulse animation
*/
public Animator getGlowAnimator() {
return glowAnimator;
}
/**
* get the alpha number
* @return the alpha for the pulse animation state
*/
public float getAlpha() {
return alpha;
}
/**
* set the alpha number
* @param alpha the alpha number for the pulse animation state
*/
public void setAlpha(float alpha) {
this.alpha = alpha;
repaint();
}
/**
* get the blur filter radius number
* @return the blur filter radius number
*/
public int getRadius() {
return radius;
}
/**
* set the blur filter radius number
* @param radius the blur filter radius number
*/
public void setRadius(int radius) {
this.radius = radius;
blurFilter = new GaussianBlurFilter(radius);
repaint();
}
/**
* {@inheritDoc}
*/
@Override
protected void paintComponent(Graphics g) {
createImageCache();
Graphics2D g2;
if (blurPaint) {
g2 = blurred.createGraphics();
g2.setComposite(AlphaComposite.Clear);
g2.fillRect(0, 0, blurred.getWidth(), blurred.getHeight());
g2.setComposite(AlphaComposite.SrcOver);
if (radius > 0) {
g2.drawImage(image, blurFilter, 0, 0);
} else {
g2.drawImage(image, null, 0, 0);
}
g2.dispose();
}
int x = (getWidth() - image.getWidth()) / 2;
int y = (getHeight() - image.getHeight()) / 2;
g2 = (Graphics2D) g.create();
if (blurPaint) {
g2.drawImage(blurred, x, y, null);
} else if (glowPaint) {
g2.setComposite(AlphaComposite.SrcOver.derive(getAlpha()));
g2.drawImage(glow, x, y, null);
g2.setComposite(AlphaComposite.SrcOver);
g2.drawImage(image, x, y, null);
}
g2.dispose();
}
/*
* create an image cache of the blurred image and glow image for speed up our custom painting code
*/
private void createImageCache() {
if (blurred == null) {
blurred = GraphicsUtilities.createCompatibleTranslucentImage(image.getWidth(), image.getHeight());
}
if (glow == null) {
glow = GraphicsUtilities.createCompatibleImage(image);
Graphics2D g2 = glow.createGraphics();
g2.drawImage(image, 0, 0, null);
g2.dispose();
BufferedImageOp filter = new GaussianBlurFilter(24);
glow = filter.filter(glow, null);
filter = new ColorTintFilter(Color.WHITE, 1.0f);
glow = filter.filter(glow, null);
}
}
/*
* starting the animation process
*/
public void startAnimator() {
startBlurAnimator();
}
/*
* starting the blur animation
*/
private void startBlurAnimator() {
blurPaint = true; // we should paint blur effect
glowPaint = false; // but not pulse right now
final int oldValue = getRadius();
Animator animator = new Animator(2800, new TimingTargetAdapter() {
@Override
public void end() {
blurPaint = false; // after blur was done, just switch it off in painting code
radius = oldValue;
startGlowAnimator(); // starting pulse animation
}
@Override
public void timingEvent(float fraction) {
// is it are linear interpolation?! probably, the part of
int value = (int) (oldValue + fraction * (-oldValue));
if (value > oldValue) value = oldValue;
else if (value < 0) value = 0;
setRadius(value); // setting the new radius and repaint
}
});
animator.start();
}
/*
* starting the pulse animation. The 2nd and last animation in the circle
*/
private void startGlowAnimator() {
glowPaint = true;
glowAnimator.start();
}
}
}
| |
package com.wroclawstudio.weddinggame.models.envioremnt;
import android.content.Context;
import android.graphics.drawable.Drawable;
import com.wroclawstudio.weddinggame.R;
import com.wroclawstudio.weddinggame.utils.ViewUtils;
import java.util.ArrayList;
import java.util.List;
public class EnvironmentBuilder {
private final int height;
private final int lenght;
private final Context context;
private List<Integer> holes = new ArrayList<>();
private List<PlaneObject> grass = new ArrayList<>();
private List<PlaneObject> platform = new ArrayList<>();
private int castlePosition;
private ArrayList<PlaneObject> cloud;
private int enemyPosition;
private ArrayList<PlaneObject> texts;
public EnvironmentBuilder(Context context, int height, int lenght) {
this.context = context;
this.height = height;
this.lenght = lenght;
}
public EnvironmentBuilder setHoles(List<Integer> holes) {
this.holes = holes;
return this;
}
public EnvironmentBuilder setGrass(List<PlaneObject> grass) {
this.grass = grass;
return this;
}
public EnvironmentBuilder setPlatform(List<PlaneObject> platform) {
this.platform = platform;
return this;
}
public EnvironmentBuilder setCastlePosition(int castlePosition) {
this.castlePosition = castlePosition;
return this;
}
public EnvironmentBuilder setCloud(ArrayList<PlaneObject> cloud) {
this.cloud = cloud;
return this;
}
public List<BaseGameObject[]> build() {
List<BaseGameObject[]> list = new ArrayList<>();
Drawable groundDrawable = ViewUtils.getDrawable(context, R.drawable.ground);
Drawable startGrassDrawable = ViewUtils.getDrawable(context, R.drawable.grass_left);
Drawable endGrassDrawable = ViewUtils.getDrawable(context, R.drawable.grass_right);
Drawable middleGrassDrawable = ViewUtils.getDrawable(context, R.drawable.grass_mid);
Drawable platformDrawable = ViewUtils.getDrawable(context, R.drawable.brick);
Drawable castleBrickDrawale = ViewUtils.getDrawable(context, R.drawable.castle_brick);
Drawable castleRoofSkyDrawable = ViewUtils.getDrawable(context, R.drawable.castle_brick_roof_sky);
Drawable castleRoofDrawable = ViewUtils.getDrawable(context, R.drawable.castle_brick_roof);
Drawable castleDoorBottom = ViewUtils.getDrawable(context, R.drawable.door_bottom);
Drawable castleWindowRightDrawable = ViewUtils.getDrawable(context, R.drawable.castle_window_right);
Drawable castleWindowLeftDrawable = ViewUtils.getDrawable(context, R.drawable.castle_window_left);
Drawable castleDoorTop = ViewUtils.getDrawable(context, R.drawable.door_top);
Drawable cloudStartBottom = ViewUtils.getDrawable(context, R.drawable.cloud_l_bot);
Drawable cloudStartTop = ViewUtils.getDrawable(context, R.drawable.cloud_l_top);
Drawable cloudEndBottom = ViewUtils.getDrawable(context, R.drawable.cloud_r_bot);
Drawable cloudEndTop = ViewUtils.getDrawable(context, R.drawable.cloud_r_top);
Drawable cloudMiddleBottom = ViewUtils.getDrawable(context, R.drawable.cloud_m_bot);
Drawable cloudMiddleTop = ViewUtils.getDrawable(context, R.drawable.cloud_m_top);
Drawable enemyTopLeft = ViewUtils.getDrawable(context, R.drawable.enemy_t_l);
Drawable enemyTopRight = ViewUtils.getDrawable(context, R.drawable.enemy_t_r);
Drawable enemyBottomLeft = ViewUtils.getDrawable(context, R.drawable.enemy_b_l);
Drawable enemyBottomRight = ViewUtils.getDrawable(context, R.drawable.enemy_b_r);
Drawable princessTop = ViewUtils.getDrawable(context, R.drawable.princess_top);
Drawable princessBottom = ViewUtils.getDrawable(context, R.drawable.princess_bot);
for (int index = 0; index < lenght + 60; index++) {
List<BaseGameObject> column = new ArrayList<>();
if (!holes.contains(index)) {
for (int i = 0; i < height; i++) {
column.add(new GroundObject(groundDrawable, i));
}
}
for (int i = 0; i < platform.size(); i++) {
PlaneObject object = platform.get(i);
if (new EnvironmentInteger(index).equals(object)) {
column.add(new PlatformObject(platformDrawable, object.startY));
}
}
for (int i = 0; i < texts.size(); i++) {
PlaneObject object = texts.get(i);
if (new EnvironmentInteger(index).equals(object)) {
column.add(new MessageObject(object.getText(), object.startY));
}
}
for (int i = 0; i < cloud.size(); i++) {
PlaneObject object = cloud.get(i);
if (new EnvironmentInteger(index).equals(object)) {
if (object.startX == index) {
column.add(new CloudObject(cloudStartBottom, object.startY));
column.add(new CloudObject(cloudStartTop, object.endY));
} else if (object.endX == index) {
column.add(new CloudObject(cloudEndBottom, object.startY));
column.add(new CloudObject(cloudEndTop, object.endY));
} else {
column.add(new CloudObject(cloudMiddleBottom, object.startY));
column.add(new CloudObject(cloudMiddleTop, object.endY));
}
}
}
if (enemyPosition - 1 == index) {
column.add(new EnemyObject(enemyBottomLeft, height));
column.add(new EnemyObject(enemyTopLeft, height + 1));
}
if (enemyPosition == index) {
column.add(new EnemyObject(enemyBottomRight, height));
column.add(new EnemyObject(enemyTopRight, height + 1));
}
if (castlePosition - 3 < index && castlePosition + 3 > index) {
int baseLevel = height;
int level = 3;
int secondLevel = 2;
int firstLevel = baseLevel + level;
boolean inner = castlePosition - 2 < index && castlePosition + 2 > index;
int maxForPosition = (inner ? secondLevel : 0) + firstLevel;
for (int currentCastleLevel = height; currentCastleLevel < maxForPosition; currentCastleLevel++) {
if (castlePosition == index &&
(currentCastleLevel == height || currentCastleLevel == height + 1)) {
if (currentCastleLevel == height) {
//bottom door
column.add(new CastleObject(castleDoorBottom, currentCastleLevel));
} else {
// top door
column.add(new CastleObject(castleDoorTop, currentCastleLevel));
}
} else if (inner && currentCastleLevel == firstLevel - 1) {
column.add(new CastleObject(castleRoofDrawable, currentCastleLevel));
} else if (currentCastleLevel == maxForPosition - 1) {
// sky roof
column.add(new CastleObject(castleRoofSkyDrawable, currentCastleLevel));
} else if ((index == castlePosition + 1 || index == castlePosition - 1)
&& currentCastleLevel == firstLevel) {
if (index == castlePosition + 1) {
column.add(new CastleObject(castleWindowRightDrawable, currentCastleLevel));
} else {
column.add(new CastleObject(castleWindowLeftDrawable, currentCastleLevel));
}
} else {
column.add(new CastleObject(castleBrickDrawale, currentCastleLevel));
}
}
}
if (castlePosition + 1 == index) {
column.add(new PlatformObject(princessBottom, height));
column.add(new PlatformObject(princessTop, height + 1));
}
int position = grass.indexOf(new EnvironmentInteger(index));
if (position != -1) {
PlaneObject object = grass.get(position);
if (object.startX == index) {
column.add(new GrassObject(startGrassDrawable, object.startY));
} else if (object.endX == index) {
column.add(new GrassObject(endGrassDrawable, object.startY));
} else {
column.add(new GrassObject(middleGrassDrawable, object.startY));
}
}
list.add(column.toArray(new BaseGameObject[column.size()]));
}
return list;
}
public EnvironmentBuilder setEnemy(int enemyPosition) {
this.enemyPosition = enemyPosition;
return this;
}
public EnvironmentBuilder setText(ArrayList<PlaneObject> texts) {
this.texts = texts;
return this;
}
public static class PlaneObject {
final int startX;
final int endX;
final int startY;
final int endY;
final String text;
public PlaneObject(int startX, int endX, int startY, int endY) {
this.startX = startX;
this.endX = endX;
this.startY = startY;
this.endY = endY;
this.text=null;
}
public PlaneObject(int startX, int y, String text) {
this.startX = startX;
this.endX = startX;
this.startY = y;
this.endY = y;
this.text = text;
}
public PlaneObject(int startX, int endX, int y) {
this.startX = startX;
this.endX = endX;
this.startY = y;
this.endY = y;
this.text=null;
}
@Override
public boolean equals(Object x) {
if (x instanceof Integer) {
int xValue = (Integer) x;
return xValue >= startX && xValue <= endX;
}
return false;
}
public String getText() {
return text;
}
}
public static class EnvironmentInteger {
private final int value;
public EnvironmentInteger(int value) {
this.value = value;
}
@Override
public boolean equals(Object o) {
if (o instanceof PlaneObject) {
return value >= ((PlaneObject) o).startX && value <= ((PlaneObject) o).endX;
}
return false;
}
}
}
| |
/*
* Copyright 2017-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.routing.config.impl;
import com.google.common.collect.ImmutableSet;
import com.googlecode.concurrenttrees.radix.node.concrete.DefaultByteArrayNodeFactory;
import com.googlecode.concurrenttrees.radixinverted.ConcurrentInvertedRadixTree;
import com.googlecode.concurrenttrees.radixinverted.InvertedRadixTree;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.apache.felix.scr.annotations.Service;
import org.onlab.packet.Ip4Address;
import org.onlab.packet.Ip6Address;
import org.onlab.packet.IpAddress;
import org.onlab.packet.IpPrefix;
import org.onlab.packet.MacAddress;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.CoreService;
import org.onosproject.incubator.net.intf.InterfaceService;
import org.onosproject.net.ConnectPoint;
import org.onosproject.net.config.ConfigFactory;
import org.onosproject.net.config.NetworkConfigEvent;
import org.onosproject.net.config.NetworkConfigListener;
import org.onosproject.net.config.NetworkConfigRegistry;
import org.onosproject.net.config.NetworkConfigService;
import org.onosproject.net.config.basics.SubjectFactories;
import org.onosproject.routing.RoutingService;
import org.onosproject.routing.config.BgpConfig;
import org.onosproject.routing.config.LocalIpPrefixEntry;
import org.onosproject.routing.config.ReactiveRoutingConfig;
import org.onosproject.routing.config.RouterConfig;
import org.onosproject.routing.config.RoutersConfig;
import org.onosproject.routing.config.RoutingConfigurationService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
/**
* Implementation of RoutingConfigurationService which reads routing
* configuration from the network configuration service.
*/
@Component(immediate = true)
@Service
public class RoutingConfigurationImpl implements RoutingConfigurationService {
private final Logger log = LoggerFactory.getLogger(getClass());
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected NetworkConfigRegistry registry;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected NetworkConfigService configService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected CoreService coreService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected InterfaceService interfaceService;
private Set<IpAddress> gatewayIpAddresses = new HashSet<>();
private Set<ConnectPoint> bgpPeerConnectPoints = new HashSet<>();
private InvertedRadixTree<LocalIpPrefixEntry>
localPrefixTable4 = new ConcurrentInvertedRadixTree<>(
new DefaultByteArrayNodeFactory());
private InvertedRadixTree<LocalIpPrefixEntry>
localPrefixTable6 = new ConcurrentInvertedRadixTree<>(
new DefaultByteArrayNodeFactory());
private MacAddress virtualGatewayMacAddress;
private final InternalNetworkConfigListener configListener =
new InternalNetworkConfigListener();
private ConfigFactory<ApplicationId, BgpConfig> bgpConfigFactory =
new ConfigFactory<ApplicationId, BgpConfig>(
SubjectFactories.APP_SUBJECT_FACTORY, BgpConfig.class, "bgp") {
@Override
public BgpConfig createConfig() {
return new BgpConfig();
}
};
private ConfigFactory<ApplicationId, RouterConfig> routerConfigFactory =
new ConfigFactory<ApplicationId, RouterConfig>(
SubjectFactories.APP_SUBJECT_FACTORY, RouterConfig.class, "router") {
@Override
public RouterConfig createConfig() {
return new RouterConfig();
}
};
private ConfigFactory<ApplicationId, RoutersConfig> routersConfigFactory =
new ConfigFactory<ApplicationId, RoutersConfig>(
SubjectFactories.APP_SUBJECT_FACTORY, RoutersConfig.class, "routers", true) {
@Override
public RoutersConfig createConfig() {
return new RoutersConfig();
}
};
private ConfigFactory<ApplicationId, ReactiveRoutingConfig>
reactiveRoutingConfigFactory =
new ConfigFactory<ApplicationId, ReactiveRoutingConfig>(
SubjectFactories.APP_SUBJECT_FACTORY,
ReactiveRoutingConfig.class, "reactiveRouting") {
@Override
public ReactiveRoutingConfig createConfig() {
return new ReactiveRoutingConfig();
}
};
@Activate
public void activate() {
configService.addListener(configListener);
registry.registerConfigFactory(bgpConfigFactory);
registry.registerConfigFactory(routerConfigFactory);
registry.registerConfigFactory(routersConfigFactory);
registry.registerConfigFactory(reactiveRoutingConfigFactory);
setUpConfiguration();
log.info("Routing configuration service started");
}
@Deactivate
public void deactivate() {
registry.unregisterConfigFactory(bgpConfigFactory);
registry.unregisterConfigFactory(routerConfigFactory);
registry.unregisterConfigFactory(routersConfigFactory);
registry.unregisterConfigFactory(reactiveRoutingConfigFactory);
configService.removeListener(configListener);
log.info("Routing configuration service stopped");
}
/**
* Set up reactive routing information from configuration.
*/
private void setUpConfiguration() {
ReactiveRoutingConfig config = configService.getConfig(
coreService.registerApplication(RoutingConfigurationService
.REACTIVE_ROUTING_APP_ID),
RoutingConfigurationService.CONFIG_CLASS);
if (config == null) {
log.warn("No reactive routing config available!");
return;
}
for (LocalIpPrefixEntry entry : config.localIp4PrefixEntries()) {
localPrefixTable4.put(createBinaryString(entry.ipPrefix()), entry);
gatewayIpAddresses.add(entry.getGatewayIpAddress());
}
for (LocalIpPrefixEntry entry : config.localIp6PrefixEntries()) {
localPrefixTable6.put(createBinaryString(entry.ipPrefix()), entry);
gatewayIpAddresses.add(entry.getGatewayIpAddress());
}
virtualGatewayMacAddress = config.virtualGatewayMacAddress();
// Setup BGP peer connect points
ApplicationId routerAppId = coreService.getAppId(RoutingService.ROUTER_APP_ID);
if (routerAppId == null) {
log.info("Router application ID is null!");
return;
}
BgpConfig bgpConfig = configService.getConfig(routerAppId, BgpConfig.class);
if (bgpConfig == null) {
log.info("BGP config is null!");
return;
} else {
bgpPeerConnectPoints =
bgpConfig.bgpSpeakers().stream()
.flatMap(speaker -> speaker.peers().stream())
.map(peer -> interfaceService.getMatchingInterface(peer))
.filter(Objects::nonNull)
.map(intf -> intf.connectPoint())
.collect(Collectors.toSet());
}
}
@Override
public boolean isIpAddressLocal(IpAddress ipAddress) {
if (ipAddress.isIp4()) {
return localPrefixTable4.getValuesForKeysPrefixing(
createBinaryString(
IpPrefix.valueOf(ipAddress, Ip4Address.BIT_LENGTH)))
.iterator().hasNext();
} else {
return localPrefixTable6.getValuesForKeysPrefixing(
createBinaryString(
IpPrefix.valueOf(ipAddress, Ip6Address.BIT_LENGTH)))
.iterator().hasNext();
}
}
@Override
public boolean isIpPrefixLocal(IpPrefix ipPrefix) {
return (localPrefixTable4.getValueForExactKey(
createBinaryString(ipPrefix)) != null ||
localPrefixTable6.getValueForExactKey(
createBinaryString(ipPrefix)) != null);
}
@Override
public boolean isVirtualGatewayIpAddress(IpAddress ipAddress) {
return gatewayIpAddresses.contains(ipAddress);
}
@Override
public MacAddress getVirtualGatewayMacAddress() {
return virtualGatewayMacAddress;
}
@Override
public Set<ConnectPoint> getBgpPeerConnectPoints() {
return ImmutableSet.copyOf(bgpPeerConnectPoints);
}
/**
* Creates the binary string representation of an IP prefix.
* The prefix can be either IPv4 or IPv6.
* The string length is equal to the prefix length + 1.
*
* For each string, we put a extra "0" in the front. The purpose of
* doing this is to store the default route inside InvertedRadixTree.
*
* @param ipPrefix the IP prefix to use
* @return the binary string representation
*/
private static String createBinaryString(IpPrefix ipPrefix) {
if (ipPrefix.prefixLength() == 0) {
return "0";
}
byte[] octets = ipPrefix.address().toOctets();
StringBuilder result = new StringBuilder(ipPrefix.prefixLength());
for (int i = 0; i < ipPrefix.prefixLength(); i++) {
int byteOffset = i / Byte.SIZE;
int bitOffset = i % Byte.SIZE;
int mask = 1 << (Byte.SIZE - 1 - bitOffset);
byte value = octets[byteOffset];
boolean isSet = ((value & mask) != 0);
result.append(isSet ? "1" : "0");
}
return "0" + result.toString();
}
private class InternalNetworkConfigListener implements NetworkConfigListener {
@Override
public void event(NetworkConfigEvent event) {
switch (event.type()) {
case CONFIG_REGISTERED:
break;
case CONFIG_UNREGISTERED:
break;
case CONFIG_ADDED:
case CONFIG_UPDATED:
case CONFIG_REMOVED:
if (event.configClass() == RoutingConfigurationService.CONFIG_CLASS) {
setUpConfiguration();
}
break;
default:
break;
}
}
}
}
| |
/*
* Copyright 2016 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel;
import io.netty.channel.embedded.EmbeddedChannel;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.function.Executable;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.ArrayDeque;
import java.util.Queue;
import java.util.concurrent.TimeUnit;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
public class CombinedChannelDuplexHandlerTest {
private static final Object MSG = new Object();
private static final SocketAddress LOCAL_ADDRESS = new InetSocketAddress(0);
private static final SocketAddress REMOTE_ADDRESS = new InetSocketAddress(0);
private static final Throwable CAUSE = new Throwable();
private static final Object USER_EVENT = new Object();
private enum Event {
REGISTERED,
UNREGISTERED,
ACTIVE,
INACTIVE,
CHANNEL_READ,
CHANNEL_READ_COMPLETE,
EXCEPTION_CAUGHT,
USER_EVENT_TRIGGERED,
CHANNEL_WRITABILITY_CHANGED,
HANDLER_ADDED,
HANDLER_REMOVED,
BIND,
CONNECT,
WRITE,
FLUSH,
READ,
REGISTER,
DEREGISTER,
CLOSE,
DISCONNECT
}
@Test
public void testInboundRemoveBeforeAdded() {
final CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler> handler =
new CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler>(
new ChannelInboundHandlerAdapter(), new ChannelOutboundHandlerAdapter());
assertThrows(IllegalStateException.class, new Executable() {
@Override
public void execute() {
handler.removeInboundHandler();
}
});
}
@Test
public void testOutboundRemoveBeforeAdded() {
final CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler> handler =
new CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler>(
new ChannelInboundHandlerAdapter(), new ChannelOutboundHandlerAdapter());
assertThrows(IllegalStateException.class, new Executable() {
@Override
public void execute() {
handler.removeOutboundHandler();
}
});
}
@Test
public void testInboundHandlerImplementsOutboundHandler() {
assertThrows(IllegalArgumentException.class, new Executable() {
@Override
public void execute() {
new CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler>(
new ChannelDuplexHandler(), new ChannelOutboundHandlerAdapter());
}
});
}
@Test
public void testOutboundHandlerImplementsInboundHandler() {
assertThrows(IllegalArgumentException.class, new Executable() {
@Override
public void execute() {
new CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler>(
new ChannelInboundHandlerAdapter(), new ChannelDuplexHandler());
}
});
}
@Test
public void testInitNotCalledBeforeAdded() {
final CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler> handler =
new CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler>() { };
assertThrows(IllegalStateException.class, new Executable() {
@Override
public void execute() throws Throwable {
handler.handlerAdded(null);
}
});
}
@Test
public void testExceptionCaughtBothCombinedHandlers() {
final Exception exception = new Exception();
final Queue<ChannelHandler> queue = new ArrayDeque<ChannelHandler>();
ChannelInboundHandler inboundHandler = new ChannelInboundHandlerAdapter() {
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
assertSame(exception, cause);
queue.add(this);
ctx.fireExceptionCaught(cause);
}
};
ChannelOutboundHandler outboundHandler = new ChannelOutboundHandlerAdapter() {
@SuppressWarnings("deprecation")
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
assertSame(exception, cause);
queue.add(this);
ctx.fireExceptionCaught(cause);
}
};
ChannelInboundHandler lastHandler = new ChannelInboundHandlerAdapter() {
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
assertSame(exception, cause);
queue.add(this);
}
};
EmbeddedChannel channel = new EmbeddedChannel(
new CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler>(
inboundHandler, outboundHandler), lastHandler);
channel.pipeline().fireExceptionCaught(exception);
assertFalse(channel.finish());
assertSame(inboundHandler, queue.poll());
assertSame(outboundHandler, queue.poll());
assertSame(lastHandler, queue.poll());
assertTrue(queue.isEmpty());
}
@Test
public void testInboundEvents() {
InboundEventHandler inboundHandler = new InboundEventHandler();
CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler> handler =
new CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler>(
inboundHandler, new ChannelOutboundHandlerAdapter());
EmbeddedChannel channel = new EmbeddedChannel();
channel.pipeline().addLast(handler);
assertEquals(Event.HANDLER_ADDED, inboundHandler.pollEvent());
doInboundOperations(channel);
assertInboundOperations(inboundHandler);
handler.removeInboundHandler();
assertEquals(Event.HANDLER_REMOVED, inboundHandler.pollEvent());
// These should not be handled by the inboundHandler anymore as it was removed before
doInboundOperations(channel);
// Should have not received any more events as it was removed before via removeInboundHandler()
assertNull(inboundHandler.pollEvent());
try {
channel.checkException();
fail();
} catch (Throwable cause) {
assertSame(CAUSE, cause);
}
assertTrue(channel.finish());
assertNull(inboundHandler.pollEvent());
}
@Test
public void testOutboundEvents() {
ChannelInboundHandler inboundHandler = new ChannelInboundHandlerAdapter();
OutboundEventHandler outboundHandler = new OutboundEventHandler();
CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler> handler =
new CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler>(
inboundHandler, outboundHandler);
EmbeddedChannel channel = new EmbeddedChannel();
channel.pipeline().addLast(new OutboundEventHandler());
channel.pipeline().addLast(handler);
assertEquals(Event.HANDLER_ADDED, outboundHandler.pollEvent());
doOutboundOperations(channel);
assertOutboundOperations(outboundHandler);
handler.removeOutboundHandler();
assertEquals(Event.HANDLER_REMOVED, outboundHandler.pollEvent());
// These should not be handled by the inboundHandler anymore as it was removed before
doOutboundOperations(channel);
// Should have not received any more events as it was removed before via removeInboundHandler()
assertNull(outboundHandler.pollEvent());
assertFalse(channel.finish());
assertNull(outboundHandler.pollEvent());
}
private static void doOutboundOperations(Channel channel) {
channel.pipeline().bind(LOCAL_ADDRESS).syncUninterruptibly();
channel.pipeline().connect(REMOTE_ADDRESS, LOCAL_ADDRESS).syncUninterruptibly();
channel.pipeline().write(MSG).syncUninterruptibly();
channel.pipeline().flush();
channel.pipeline().read();
channel.pipeline().disconnect().syncUninterruptibly();
channel.pipeline().close().syncUninterruptibly();
channel.pipeline().deregister().syncUninterruptibly();
}
private static void assertOutboundOperations(OutboundEventHandler outboundHandler) {
assertEquals(Event.BIND, outboundHandler.pollEvent());
assertEquals(Event.CONNECT, outboundHandler.pollEvent());
assertEquals(Event.WRITE, outboundHandler.pollEvent());
assertEquals(Event.FLUSH, outboundHandler.pollEvent());
assertEquals(Event.READ, outboundHandler.pollEvent());
assertEquals(Event.CLOSE, outboundHandler.pollEvent());
assertEquals(Event.CLOSE, outboundHandler.pollEvent());
assertEquals(Event.DEREGISTER, outboundHandler.pollEvent());
}
private static void doInboundOperations(Channel channel) {
channel.pipeline().fireChannelRegistered();
channel.pipeline().fireChannelActive();
channel.pipeline().fireChannelRead(MSG);
channel.pipeline().fireChannelReadComplete();
channel.pipeline().fireExceptionCaught(CAUSE);
channel.pipeline().fireUserEventTriggered(USER_EVENT);
channel.pipeline().fireChannelWritabilityChanged();
channel.pipeline().fireChannelInactive();
channel.pipeline().fireChannelUnregistered();
}
private static void assertInboundOperations(InboundEventHandler handler) {
assertEquals(Event.REGISTERED, handler.pollEvent());
assertEquals(Event.ACTIVE, handler.pollEvent());
assertEquals(Event.CHANNEL_READ, handler.pollEvent());
assertEquals(Event.CHANNEL_READ_COMPLETE, handler.pollEvent());
assertEquals(Event.EXCEPTION_CAUGHT, handler.pollEvent());
assertEquals(Event.USER_EVENT_TRIGGERED, handler.pollEvent());
assertEquals(Event.CHANNEL_WRITABILITY_CHANGED, handler.pollEvent());
assertEquals(Event.INACTIVE, handler.pollEvent());
assertEquals(Event.UNREGISTERED, handler.pollEvent());
}
@Test
@Timeout(value = 3000, unit = TimeUnit.MILLISECONDS)
public void testPromisesPassed() {
OutboundEventHandler outboundHandler = new OutboundEventHandler();
EmbeddedChannel ch = new EmbeddedChannel(outboundHandler,
new CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler>(
new ChannelInboundHandlerAdapter(), new ChannelOutboundHandlerAdapter()));
ChannelPipeline pipeline = ch.pipeline();
ChannelPromise promise = ch.newPromise();
pipeline.bind(LOCAL_ADDRESS, promise);
promise.syncUninterruptibly();
promise = ch.newPromise();
pipeline.connect(REMOTE_ADDRESS, LOCAL_ADDRESS, promise);
promise.syncUninterruptibly();
promise = ch.newPromise();
pipeline.close(promise);
promise.syncUninterruptibly();
promise = ch.newPromise();
pipeline.disconnect(promise);
promise.syncUninterruptibly();
promise = ch.newPromise();
pipeline.write(MSG, promise);
promise.syncUninterruptibly();
promise = ch.newPromise();
pipeline.deregister(promise);
promise.syncUninterruptibly();
ch.finish();
}
@Test
public void testNotSharable() {
assertThrows(IllegalStateException.class, new Executable() {
@Override
public void execute() {
new CombinedChannelDuplexHandler<ChannelInboundHandler, ChannelOutboundHandler>() {
@Override
public boolean isSharable() {
return true;
}
};
}
});
}
private static final class InboundEventHandler extends ChannelInboundHandlerAdapter {
private final Queue<Object> queue = new ArrayDeque<Object>();
@Override
public void handlerAdded(ChannelHandlerContext ctx) {
queue.add(Event.HANDLER_ADDED);
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) {
queue.add(Event.HANDLER_REMOVED);
}
@Override
public void channelRegistered(ChannelHandlerContext ctx) {
queue.add(Event.REGISTERED);
}
@Override
public void channelUnregistered(ChannelHandlerContext ctx) {
queue.add(Event.UNREGISTERED);
}
@Override
public void channelActive(ChannelHandlerContext ctx) {
queue.add(Event.ACTIVE);
}
@Override
public void channelInactive(ChannelHandlerContext ctx) {
queue.add(Event.INACTIVE);
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
queue.add(Event.CHANNEL_READ);
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
queue.add(Event.CHANNEL_READ_COMPLETE);
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) {
queue.add(Event.USER_EVENT_TRIGGERED);
}
@Override
public void channelWritabilityChanged(ChannelHandlerContext ctx) {
queue.add(Event.CHANNEL_WRITABILITY_CHANGED);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
queue.add(Event.EXCEPTION_CAUGHT);
}
Event pollEvent() {
Object o = queue.poll();
if (o instanceof AssertionError) {
throw (AssertionError) o;
}
return (Event) o;
}
}
private static final class OutboundEventHandler extends ChannelOutboundHandlerAdapter {
private final Queue<Object> queue = new ArrayDeque<Object>();
@Override
public void handlerAdded(ChannelHandlerContext ctx) {
queue.add(Event.HANDLER_ADDED);
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) {
queue.add(Event.HANDLER_REMOVED);
}
@Override
public void bind(ChannelHandlerContext ctx, SocketAddress localAddress, ChannelPromise promise) {
try {
assertSame(LOCAL_ADDRESS, localAddress);
queue.add(Event.BIND);
promise.setSuccess();
} catch (AssertionError e) {
promise.setFailure(e);
}
}
@Override
public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress,
SocketAddress localAddress, ChannelPromise promise) {
try {
assertSame(REMOTE_ADDRESS, remoteAddress);
assertSame(LOCAL_ADDRESS, localAddress);
queue.add(Event.CONNECT);
promise.setSuccess();
} catch (AssertionError e) {
promise.setFailure(e);
}
}
@Override
public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) {
queue.add(Event.DISCONNECT);
promise.setSuccess();
}
@Override
public void close(ChannelHandlerContext ctx, ChannelPromise promise) {
queue.add(Event.CLOSE);
promise.setSuccess();
}
@Override
public void deregister(ChannelHandlerContext ctx, ChannelPromise promise) {
queue.add(Event.DEREGISTER);
promise.setSuccess();
}
@Override
public void read(ChannelHandlerContext ctx) {
queue.add(Event.READ);
}
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) {
try {
assertSame(MSG, msg);
queue.add(Event.WRITE);
promise.setSuccess();
} catch (AssertionError e) {
promise.setFailure(e);
}
}
@Override
public void flush(ChannelHandlerContext ctx) {
queue.add(Event.FLUSH);
}
Event pollEvent() {
Object o = queue.poll();
if (o instanceof AssertionError) {
throw (AssertionError) o;
}
return (Event) o;
}
}
}
| |
/*
* Copyright (c) 2009, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* $Id: FSUtils.java 4975 2009-02-02 08:30:52Z lsantha $
*
* Copyright (C) 2003-2009 JNode.org
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library; If not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package org.jnode.fs.util;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.logging.Level;
import com.sun.max.ve.logging.*;
import org.jnode.fs.FSDirectory;
import org.jnode.fs.FSEntry;
import org.jnode.fs.FSFile;
/**
*
* @author Fabien DUMINY
*/
public class FSUtils {
private static final Logger log = Logger.getLogger(FSUtils.class.getName());
private static final int MAX_DUMP_SIZE = 256;
private static final int LINE_SIZE = 16;
protected static DateFormat dateFormat = new SimpleDateFormat();
/**
* @param path
* @param separator
* @return the file name of a filename + path
*/
public static String getName(String path, char separator) {
int idx = path.lastIndexOf(separator);
if (idx >= 0) {
path = path.substring(idx + 1);
}
return path;
}
/**
* @param path
* @param separator
* @return
*/
public static String getParentName(String path, char separator) {
int idx = path.lastIndexOf(separator);
if (idx < 0) {
path = "";
} else {
path = path.substring(0, idx);
}
return path;
}
/**
* @param entry
* @param deep
* @return
*/
public static String toString(FSEntry entry, boolean deep) {
if (entry == null)
return "<FSEntry>NULL</FSEntry>";
StringBuilder sb = new StringBuilder(2048);
sb.append("<FSEntry>");
sb.append(" name=" + entry.getName());
try {
sb.append(toStringDate(" lastModified=", entry.getLastModified()));
} catch (IOException e) {
sb.append(" lastModified=###" + e.getMessage() + "###");
log.log(Level.SEVERE, "error in lastModified", e);
}
try {
sb.append(" isDirty=" + entry.isDirty());
} catch (IOException e1) {
sb.append(" isDirty=###" + e1.getMessage() + "###");
log.log(Level.SEVERE, "error in isDirty", e1);
}
sb.append(" isValid=" + entry.isValid());
sb.append(" isFile=" + entry.isFile());
if (deep && entry.isFile()) {
try {
sb.append(toString(entry.getFile()));
} catch (IOException e2) {
sb.append(" getFile=###" + e2.getMessage() + "###");
log.log(Level.SEVERE, "error in getFile", e2);
}
}
sb.append(" isDir=" + entry.isDirectory());
if (deep && entry.isDirectory())
try {
sb.append(toString(entry.getDirectory()));
} catch (IOException e3) {
sb.append(" getDirectory=###" + e3.getMessage() + "###");
log.log(Level.SEVERE, "error in getDirectory", e3);
}
sb.append("</FSEntry>");
return sb.toString();
}
/**
* @param dir
* @return
* @throws IOException
*/
public static String toString(FSDirectory dir) throws IOException {
return toString(dir, false);
}
/**
* @param dir
* @param deep
* @return
* @throws IOException
*/
public static String toString(FSDirectory dir, boolean deep) throws IOException {
if (dir == null)
return "<FSDirectory>NULL</FSDirectory>";
String str = "<FSDirectory>isValid=" + dir.isValid() + "</FSDirectory>";
if (deep)
str += "\n" + dir.toString(); // also print entry table
return str;
}
/**
* @param file
* @return
*/
public static String toString(FSFile file) {
if (file == null)
return "<FSEntry>NULL</FSEntry>";
StringBuilder sb = new StringBuilder(32);
sb.append("<FSFile>");
sb.append(" isValid" + file.isValid());
sb.append(" length" + file.getLength());
sb.append("</FSFile>");
return sb.toString();
}
/**
* @param str
* @param date
* @return
*/
public static String toStringDate(String str, long date) {
return toString(str, new Date(date));
}
/**
* @param str
* @param date
* @return
*/
public static String toString(String str, Date date) {
return str + dateFormat.format(date);
}
/**
* @param data
* @return
*/
public static String toString(byte[] data) {
return toString(data, 0, data.length);
}
/**
* @param data
* @param offset
* @param length
* @return
*/
public static String toString(byte[] data, int offset, int length) {
StringBuilder sb = new StringBuilder(1024);
StringBuilder chars = new StringBuilder(LINE_SIZE);
int l = Math.min(Math.min(length - offset, data.length - offset), MAX_DUMP_SIZE);
int mod = l % LINE_SIZE;
if (mod != 0)
l += LINE_SIZE - mod;
for (int i = 0; i < l; i++) {
if ((i % 16) == 0) {
sb.append(lpad(Integer.toHexString(i), 4)).append(" - ");
chars.setLength(0); // empty
}
int idx = offset + i;
boolean end = (idx >= data.length);
if (!end) {
sb.append(lpad(Integer.toHexString(data[idx]), 2)).append(' ');
chars.append((char) data[idx]);
}
if (((i % 16) == 15) || end) {
sb.append(" ").append(chars.toString()).append('\n');
}
}
return sb.toString();
}
/**
* @param str
* @param size
* @return
*/
public static String lpad(String str, int size) {
if (str.length() >= size)
return str;
String pad = "";
int nbBlanks = size - str.length();
for (int i = 0; i < nbBlanks; i++)
pad += " ";
return pad + str;
}
/**
* @param data
* @param offset
* @param length
* @return
*/
public static String toStringAsChars(byte[] data, int offset, int length) {
int l = Math.min(offset + length, data.length);
StringBuilder sb = new StringBuilder(l);
for (int i = offset; i < l; i++) {
sb.append((char) data[i]);
}
return sb.toString();
}
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.skylark.parser;
import com.facebook.buck.core.exceptions.BuckUncheckedExecutionException;
import com.facebook.buck.core.exceptions.HumanReadableException;
import com.facebook.buck.core.filesystems.AbsPath;
import com.facebook.buck.core.filesystems.ForwardRelPath;
import com.facebook.buck.core.model.label.Label;
import com.facebook.buck.core.model.label.LabelSyntaxException;
import com.facebook.buck.core.model.label.PackageIdentifier;
import com.facebook.buck.core.model.label.PathFragment;
import com.facebook.buck.core.model.label.RepositoryName;
import com.facebook.buck.core.starlark.compatible.BuckStarlark;
import com.facebook.buck.core.starlark.compatible.BuckStarlarkPrintHandler;
import com.facebook.buck.core.starlark.compatible.StarlarkExportable;
import com.facebook.buck.core.starlark.eventhandler.Event;
import com.facebook.buck.core.starlark.eventhandler.EventHandler;
import com.facebook.buck.core.starlark.rule.SkylarkUserDefinedRule;
import com.facebook.buck.core.util.immutables.BuckStyleValue;
import com.facebook.buck.parser.api.FileManifest;
import com.facebook.buck.parser.api.FileParser;
import com.facebook.buck.parser.exceptions.BuildFileParseException;
import com.facebook.buck.parser.implicit.ImplicitInclude;
import com.facebook.buck.parser.implicit.ImplicitIncludePath;
import com.facebook.buck.parser.implicit.PackageImplicitIncludesFinder;
import com.facebook.buck.parser.options.ProjectBuildFileParserOptions;
import com.facebook.buck.skylark.function.LoadSymbolsContext;
import com.facebook.buck.skylark.io.Globber;
import com.facebook.buck.skylark.packages.PackageContext;
import com.facebook.buck.skylark.parser.context.ParseContext;
import com.facebook.buck.skylark.parser.context.ReadConfigContext;
import com.facebook.buck.util.types.Either;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.UncheckedExecutionException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.util.ArrayDeque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.IntStream;
import javax.annotation.Nullable;
import net.starlark.java.eval.EvalException;
import net.starlark.java.eval.LoadedModule;
import net.starlark.java.eval.Module;
import net.starlark.java.eval.Mutability;
import net.starlark.java.eval.Starlark;
import net.starlark.java.eval.StarlarkThread;
import net.starlark.java.syntax.Location;
import net.starlark.java.syntax.ParserInput;
import net.starlark.java.syntax.Program;
import net.starlark.java.syntax.ResolverModule;
import net.starlark.java.syntax.StarlarkFile;
import net.starlark.java.syntax.SyntaxError;
import org.immutables.value.Value;
/** Abstract parser for files written using Skylark syntax. */
abstract class AbstractSkylarkFileParser<T extends FileManifest> implements FileParser<T> {
protected final ProjectBuildFileParserOptions options;
protected final EventHandler eventHandler;
protected final BuckGlobals buckGlobals;
private final ConcurrentHashMap<AbsPath, Either<Program, ExtensionData>> extensionCache;
private final ConcurrentHashMap<Label, IncludesData> includesDataCache;
private final PackageImplicitIncludesFinder packageImplicitIncludeFinder;
private final AtomicReference<ImplicitlyLoadedExtension> globalImplicitIncludes =
new AtomicReference<>();
AbstractSkylarkFileParser(
ProjectBuildFileParserOptions options, BuckGlobals buckGlobals, EventHandler eventHandler) {
this.options = options;
this.eventHandler = eventHandler;
this.buckGlobals = buckGlobals;
this.extensionCache = new ConcurrentHashMap<>();
this.includesDataCache = new ConcurrentHashMap<>();
this.packageImplicitIncludeFinder =
PackageImplicitIncludesFinder.fromConfiguration(options.getPackageImplicitIncludes());
}
abstract BuckOrPackage getBuckOrPackage();
abstract ParseResult getParseResult(
AbsPath parseFile,
ParseContext context,
ReadConfigContext readConfigContext,
Globber globber,
ImmutableList<String> loadedPaths);
abstract Globber getGlobber(ForwardRelPath parseFile);
private ImplicitlyLoadedExtension loadImplicitInclude(ImplicitIncludePath path)
throws IOException, InterruptedException {
ExtensionData extensionData =
loadExtensionFromImport(
ImmutableLoadImport.ofImpl(
implicitIncludeContainingLabel(),
path.reconstructWithAtAndColon(),
Location.BUILTIN),
LoadStack.EMPTY);
return ImmutableImplicitlyLoadedExtension.ofImpl(
extensionData.getLoadTransitiveClosure(), extensionData.getExtension().getSymbols());
}
private ImplicitlyLoadedExtension loadGlobalImplicitIncludes()
throws IOException, InterruptedException {
ImplicitlyLoadedExtension globalImplicitIncludes = this.globalImplicitIncludes.get();
if (globalImplicitIncludes != null) {
return globalImplicitIncludes;
}
ImmutableList.Builder<ImplicitlyLoadedExtension> extensions = ImmutableList.builder();
for (ImplicitIncludePath defaultInclude : options.getDefaultIncludes()) {
extensions.add(loadImplicitInclude(defaultInclude));
}
ImplicitlyLoadedExtension mergedExtensions =
ImplicitlyLoadedExtension.merge(extensions.build());
for (; ; ) {
ImplicitlyLoadedExtension get = this.globalImplicitIncludes.get();
if (get != null) {
return get;
}
if (this.globalImplicitIncludes.compareAndSet(null, mergedExtensions)) {
return mergedExtensions;
}
}
}
private ImplicitlyLoadedExtension loadImplicitExtension(
ForwardRelPath basePath, LoadStack loadStack) throws IOException, InterruptedException {
if (getBuckOrPackage() != BuckOrPackage.BUCK) {
return ImplicitlyLoadedExtension.empty();
}
Optional<ImplicitInclude> implicitInclude =
packageImplicitIncludeFinder.findIncludeForBuildFile(basePath);
if (!implicitInclude.isPresent()) {
return loadGlobalImplicitIncludes();
}
// Only export requested symbols, and ensure that all requsted symbols are present.
ExtensionData data =
loadExtensionFromImport(
ImmutableLoadImport.ofImpl(
implicitIncludeContainingLabel(),
implicitInclude.get().getRawImportLabel().reconstructWithAtAndColon(),
Location.BUILTIN),
loadStack);
LoadedModule symbols = data.getExtension();
ImmutableMap<String, String> expectedSymbols = implicitInclude.get().getSymbols();
Builder<String, Object> loaded = ImmutableMap.builderWithExpectedSize(expectedSymbols.size());
for (Entry<String, String> kvp : expectedSymbols.entrySet()) {
Object symbol = symbols.getGlobal(kvp.getValue());
if (symbol == null) {
throw BuildFileParseException.createForUnknownParseError(
String.format(
"Could not find symbol '%s' in implicitly loaded extension '%s'",
kvp.getValue(), implicitInclude.get().getLoadPath()));
}
loaded.put(kvp.getKey(), symbol);
}
return ImplicitlyLoadedExtension.merge(
ImmutableList.of(
loadGlobalImplicitIncludes(),
ImmutableImplicitlyLoadedExtension.ofImpl(
data.getLoadTransitiveClosure(), loaded.build())));
}
private Label implicitIncludeContainingLabel() {
return Label.createUnvalidated(
PackageIdentifier.create(
RepositoryName.createFromValidStrippedName(options.getCellName()),
PathFragment.EMPTY_FRAGMENT),
"BUCK");
}
/** @return The parsed result defined in {@code parseFile}. */
protected ParseResult parse(ForwardRelPath parseFile)
throws IOException, BuildFileParseException, InterruptedException {
AbsPath parseFileAbs = options.getProjectRoot().resolve(parseFile);
ForwardRelPath basePath = parseFile.getParentButEmptyForSingleSegment();
Label containingLabel = createContainingLabel(basePath);
LoadStack loadStack = LoadStack.top(Location.fromFile(parseFile.toString()));
ImplicitlyLoadedExtension implicitLoad = loadImplicitExtension(basePath, loadStack);
Program buildFileAst =
parseSkylarkFile(
parseFileAbs, loadStack, getBuckOrPackage().fileKind, implicitLoad.getLoadedSymbols());
Globber globber = getGlobber(parseFile);
PackageContext packageContext =
createPackageContext(basePath, globber, implicitLoad.getLoadedSymbols());
ParseContext parseContext = new ParseContext(packageContext);
ReadConfigContext readConfigContext = new ReadConfigContext(packageContext.getRawConfig());
try (Mutability mutability = Mutability.create("parsing " + parseFile)) {
EnvironmentData envData =
createBuildFileEvaluationEnvironment(
parseFileAbs,
containingLabel,
buildFileAst,
mutability,
parseContext,
readConfigContext,
implicitLoad.getLoadTransitiveClosure());
Module module = new Module(buildFileAst.getModule());
exec(loadStack, buildFileAst, module, envData.getEnvironment(), "file %s", parseFile);
ImmutableList.Builder<String> loadedPaths =
ImmutableList.builderWithExpectedSize(envData.getLoadedPaths().size() + 1);
loadedPaths.add(parseFileAbs.toString());
loadedPaths.addAll(envData.getLoadedPaths());
return getParseResult(
parseFileAbs, parseContext, readConfigContext, globber, loadedPaths.build());
}
}
private void exec(
LoadStack loadStack,
Program program,
Module module,
StarlarkThread thread,
String what,
Object... whatArgs)
throws InterruptedException {
try {
Starlark.execFileProgram(program, module, thread);
} catch (EvalException e) {
String whatFormatted = String.format(what, whatArgs);
throw new BuildFileParseException(
e,
loadStack.toDependencyStack(),
"Cannot evaluate " + whatFormatted + "\n" + e.getMessageWithStack());
} catch (InterruptedException | BuildFileParseException e) {
throw e;
} catch (Exception e) {
if (e instanceof Starlark.UncheckedEvalException
&& e.getCause() instanceof BuildFileParseException) {
// thrown by post-assign hook
throw (BuildFileParseException) e.getCause();
}
throw new BuckUncheckedExecutionException(e, "When evaluating " + what, whatArgs);
}
}
/**
* @return The environment that can be used for evaluating build files. It includes built-in
* functions like {@code glob} and native rules like {@code java_library}.
*/
private EnvironmentData createBuildFileEvaluationEnvironment(
AbsPath buildFilePath,
Label containingLabel,
Program buildFileAst,
Mutability mutability,
ParseContext parseContext,
ReadConfigContext readConfigContext,
ImmutableSet<String> implicitLoadExtensionTransitiveClosure)
throws IOException, InterruptedException, BuildFileParseException {
ImmutableMap<String, ExtensionData> dependencies =
loadExtensions(containingLabel, getImports(buildFileAst, containingLabel), LoadStack.EMPTY);
StarlarkThread env = new StarlarkThread(mutability, BuckStarlark.BUCK_STARLARK_SEMANTICS);
env.setPrintHandler(new BuckStarlarkPrintHandler(eventHandler));
env.setLoader(Maps.transformValues(dependencies, ExtensionData::getExtension)::get);
parseContext.setup(env);
readConfigContext.setup(env);
return ImmutableEnvironmentData.ofImpl(
env,
toLoadedPaths(
buildFilePath, dependencies.values(), implicitLoadExtensionTransitiveClosure));
}
private PackageContext createPackageContext(
ForwardRelPath basePath,
Globber globber,
ImmutableMap<String, Object> implicitlyLoadedSymbols) {
return PackageContext.of(
globber,
options.getRawConfig(),
options.getCellName(),
basePath,
eventHandler,
implicitlyLoadedSymbols);
}
protected Label createContainingLabel(ForwardRelPath basePath) {
return Label.createUnvalidated(
PackageIdentifier.create(
RepositoryName.createFromValidStrippedName(options.getCellName()),
PathFragment.createAlreadyNormalized(basePath.toString())),
"BUCK");
}
/**
* @param containingPath the path of the build or extension file that has provided dependencies.
* @param dependencies the list of extension dependencies that {@code containingPath} has.
* @return transitive closure of all paths loaded during parsing of {@code containingPath}
* including {@code containingPath} itself as the first element.
*/
private ImmutableList<String> toLoadedPaths(
AbsPath containingPath,
ImmutableCollection<ExtensionData> dependencies,
ImmutableSet<String> implicitLoadExtensionTransitiveClosure) {
// expected size is used to reduce the number of unnecessary resize invocations
int expectedSize = 1;
expectedSize += implicitLoadExtensionTransitiveClosure.size();
for (ExtensionData dependency : dependencies) {
expectedSize += dependency.getLoadTransitiveClosure().size();
}
ImmutableList.Builder<String> loadedPathsBuilder =
ImmutableList.builderWithExpectedSize(expectedSize);
// for loop is used instead of foreach to avoid iterator overhead, since it's a hot spot
loadedPathsBuilder.add(containingPath.toString());
for (ExtensionData dependency : dependencies) {
loadedPathsBuilder.addAll(dependency.getLoadTransitiveClosure());
}
loadedPathsBuilder.addAll(implicitLoadExtensionTransitiveClosure);
return loadedPathsBuilder.build();
}
/**
* Reads file and returns abstract syntax tree for that file.
*
* @param path file path to read the data from.
* @return abstract syntax tree; does not handle any errors.
*/
@VisibleForTesting
protected StarlarkFile readSkylarkAST(AbsPath path) throws IOException {
ParserInput input = ParserInput.fromUTF8(Files.readAllBytes(path.getPath()), path.toString());
StarlarkFile file = StarlarkFile.parse(input);
Event.replayEventsOn(eventHandler, file.errors());
return file;
}
private ResolverModule makeModule(
FileKind fileKind, ImmutableMap<String, Object> implicitIncludes) {
if (fileKind == FileKind.BZL) {
Preconditions.checkArgument(
implicitIncludes.isEmpty(), "cannot use implicit includes when loading .bzl");
return buckGlobals.makeBuckLoadContextGlobals();
} else {
return buckGlobals.makeBuckBuildFileContextGlobals(implicitIncludes);
}
}
private Program parseSkylarkFile(
AbsPath path,
LoadStack loadStack,
FileKind fileKind,
ImmutableMap<String, Object> implicitIncludes)
throws BuildFileParseException, IOException {
StarlarkFile starlarkFile;
try {
starlarkFile = readSkylarkAST(path);
} catch (NoSuchFileException e) {
throw BuildFileParseException.createForUnknownParseError(
loadStack.toDependencyStack(), "%s cannot be loaded because it does not exist", path);
}
if (!starlarkFile.errors().isEmpty()) {
throw BuildFileParseException.createForUnknownParseError(
loadStack.toDependencyStack(), "Cannot parse %s", path);
}
Event.replayEventsOn(eventHandler, starlarkFile.errors());
if (!starlarkFile.errors().isEmpty()) {
throw BuildFileParseException.createForUnknownParseError(
loadStack.toDependencyStack(), "Cannot parse %s", path);
}
if (fileKind != FileKind.BZL) {
if (!StarlarkBuckFileSyntax.checkBuildSyntax(starlarkFile, eventHandler)) {
throw BuildFileParseException.createForUnknownParseError(
loadStack.toDependencyStack(), "Cannot parse %s", path);
}
}
Program result;
try {
ResolverModule module = makeModule(fileKind, implicitIncludes);
result = Program.compileFile(starlarkFile, module);
module.freeze();
} catch (SyntaxError.Exception e) {
Event.replayEventsOn(eventHandler, starlarkFile.errors());
throw BuildFileParseException.createForUnknownParseError(
loadStack.toDependencyStack(), "Cannot parse %s", path);
}
if (!starlarkFile.errors().isEmpty()) {
throw BuildFileParseException.createForUnknownParseError(
loadStack.toDependencyStack(), "Cannot parse %s", path);
}
return result;
}
private static ImmutableList<LoadImport> getImports(Program file, Label fileLabel) {
return IntStream.range(0, file.getLoads().size())
.mapToObj(
i -> {
String load = file.getLoads().get(i);
Location location = file.getLoadLocation(i);
return ImmutableLoadImport.ofImpl(fileLabel, load, location);
})
.collect(ImmutableList.toImmutableList());
}
/**
* Creates an {@code IncludesData} object from a {@code path}.
*
* @param loadImport an import label representing an extension to load.
*/
private IncludesData loadIncludeImpl(LoadImport loadImport, LoadStack loadStack)
throws IOException, BuildFileParseException, InterruptedException {
Label label = loadImport.getLabel();
AbsPath filePath = getImportPath(label, loadImport.getImport());
Program fileAst = parseSkylarkFile(filePath, loadStack, FileKind.BZL, ImmutableMap.of());
ImmutableList<IncludesData> dependencies =
loadIncludes(label, getImports(fileAst, label), loadStack);
return ImmutableIncludesData.ofImpl(
filePath, dependencies, toIncludedPaths(filePath, dependencies, ImmutableSet.of()));
}
/**
* Creates an {@code IncludesData} object from a {@code path}.
*
* @param loadImport an import label representing an extension to load.
*/
private IncludesData loadInclude(LoadImport loadImport, LoadStack loadStack)
throws IOException, BuildFileParseException, InterruptedException {
IncludesData includesData = includesDataCache.get(loadImport.getLabel());
if (includesData != null) {
return includesData;
}
includesData = loadIncludeImpl(loadImport, loadStack);
includesDataCache.put(loadImport.getLabel(), includesData);
return includesData;
}
/** Collects all the included files identified by corresponding Starlark imports. */
private ImmutableList<IncludesData> loadIncludes(
Label containingLabel, ImmutableList<LoadImport> skylarkImports, LoadStack loadStack)
throws BuildFileParseException, IOException, InterruptedException {
Set<String> processed = new HashSet<>(skylarkImports.size());
ImmutableList.Builder<IncludesData> includes =
ImmutableList.builderWithExpectedSize(skylarkImports.size());
// foreach is not used to avoid iterator overhead
for (int i = 0; i < skylarkImports.size(); ++i) {
LoadImport skylarkImport = skylarkImports.get(i);
Preconditions.checkState(containingLabel.equals(skylarkImport.getContainingLabel()));
// sometimes users include the same extension multiple times...
if (!processed.add(skylarkImport.getImport())) continue;
try {
includes.add(
loadInclude(skylarkImport, loadStack.child(skylarkImport.getImportLocation())));
} catch (UncheckedExecutionException e) {
propagateRootCause(e);
}
}
return includes.build();
}
private ImmutableSet<String> toIncludedPaths(
AbsPath containingPath,
ImmutableList<IncludesData> dependencies,
ImmutableSet<String> implicitLoadExtensionTransitiveClosure) {
ImmutableSet.Builder<String> includedPathsBuilder = ImmutableSet.builder();
includedPathsBuilder.add(containingPath.toString());
// for loop is used instead of foreach to avoid iterator overhead, since it's a hot spot
for (int i = 0; i < dependencies.size(); ++i) {
includedPathsBuilder.addAll(dependencies.get(i).getLoadTransitiveClosure());
}
includedPathsBuilder.addAll(implicitLoadExtensionTransitiveClosure);
return includedPathsBuilder.build();
}
/** Loads all extensions identified by corresponding imports. */
protected ImmutableMap<String, ExtensionData> loadExtensions(
Label containingLabel, ImmutableList<LoadImport> skylarkImports, LoadStack loadStack)
throws BuildFileParseException, IOException, InterruptedException {
Set<String> processed = new HashSet<>(skylarkImports.size());
ImmutableMap.Builder<String, ExtensionData> extensions =
ImmutableMap.builderWithExpectedSize(skylarkImports.size());
// foreach is not used to avoid iterator overhead
for (int i = 0; i < skylarkImports.size(); ++i) {
LoadImport skylarkImport = skylarkImports.get(i);
Preconditions.checkState(containingLabel.equals(skylarkImport.getContainingLabel()));
// sometimes users include the same extension multiple times...
if (!processed.add(skylarkImport.getImport())) continue;
try {
extensions.put(
skylarkImport.getImport(),
loadExtensionFromImport(
skylarkImport, loadStack.child(skylarkImport.getImportLocation())));
} catch (UncheckedExecutionException e) {
propagateRootCause(e);
}
}
return extensions.build();
}
/**
* Propagates underlying parse exception from {@link UncheckedExecutionException}.
*
* <p>This is an unfortunate consequence of having to use {@link
* LoadingCache#getUnchecked(Object)} in when using stream transformations :(
*
* <p>TODO(ttsugrii): the logic of extracting root causes to make them user-friendly should be
* happening somewhere in {@link com.facebook.buck.cli.MainRunner}, since this behavior is not
* unique to parsing.
*/
private void propagateRootCause(UncheckedExecutionException e)
throws IOException, InterruptedException {
Throwable rootCause = Throwables.getRootCause(e);
if (rootCause instanceof BuildFileParseException) {
throw (BuildFileParseException) rootCause;
}
if (rootCause instanceof IOException) {
throw (IOException) rootCause;
}
if (rootCause instanceof InterruptedException) {
throw (InterruptedException) rootCause;
}
throw e;
}
/**
* A struct like class to help loadExtension implementation to represent the state needed for load
* of a single extension/file.
*/
@VisibleForTesting
static class LocalExtensionLoadState {
// Extension key being loaded.
private final LoadImport load;
// Path for the extension.
private final AbsPath path;
// Load path
private final LoadStack loadStack;
// List of dependencies this extension uses.
private final Set<LoadImport> dependencies;
// This extension AST.
private @Nullable Program ast;
private LocalExtensionLoadState(LoadImport load, AbsPath extensionPath, LoadStack loadStack) {
this.load = load;
this.path = extensionPath;
this.loadStack = loadStack;
this.dependencies = new HashSet<LoadImport>();
this.ast = null;
}
public AbsPath getPath() {
return path;
}
// Methods to get/set/test AST for this extension.
public boolean haveAST() {
return ast != null;
}
public void setAST(Program ast) {
Preconditions.checkArgument(!haveAST(), "AST can be set only once");
this.ast = ast;
}
public Program getAST() {
Preconditions.checkNotNull(ast);
return ast;
}
// Adds a single dependency key for this extension.
public void addDependency(LoadImport dependency) {
Preconditions.checkArgument(dependency.getContainingLabel().equals(load.getLabel()));
dependencies.add(dependency);
}
// Returns the list of dependencies for this extension.
public Set<LoadImport> getDependencies() {
return dependencies;
}
// Returns the label of the file including this extension.
public Label getParentLabel() {
return load.getContainingLabel();
}
// Returns this extensions label.
public Label getLabel() {
return load.getLabel();
}
}
/*
* Given the list of load imports, returns the list of extension data corresponding to those loads.
* Requires all of the extensions are available in the extension data cache.
*
* @param label {@link Label} identifying extension with dependencies
* @param dependencies list of load import dependencies
* @returns list of ExtensionData
*/
private ImmutableMap<String, ExtensionData> getDependenciesExtensionData(
Label label, Set<LoadImport> dependencies) throws BuildFileParseException {
HashMap<String, ExtensionData> depBuilder = new HashMap<>();
for (LoadImport dependency : dependencies) {
ExtensionData extension =
lookupExtensionForImport(getImportPath(dependency.getLabel(), dependency.getImport()));
if (extension == null) {
throw BuildFileParseException.createForUnknownParseError(
"Cannot evaluate extension file %s; missing dependency is %s",
label, dependency.getLabel());
}
depBuilder.putIfAbsent(dependency.getImport(), extension);
}
return ImmutableMap.copyOf(depBuilder);
}
/**
* Retrieves extension data from the cache, and returns a copy suitable for the specified skylark
* import string.
*
* @param path a path for the extension to lookup
* @return {@link ExtensionData} suitable for the requested extension and importString, or null if
* no such extension found.
*/
private @Nullable ExtensionData lookupExtensionForImport(AbsPath path) {
Either<Program, ExtensionData> either = extensionCache.get(path);
return either != null && either.isRight() ? either.getRight() : null;
}
/**
* Retrieves extension data from the cache, and returns a copy suitable for the specified skylark
* import string.
*
* @param path a path for the extension to lookup
* @return {@link ExtensionData} suitable for the requested extension and importString, or null if
* no such extension found.
*/
private Either<Program, ExtensionData> lookupExtensionForImportOrLoadProgram(
AbsPath path, LoadStack loadStack) throws IOException {
Either<Program, ExtensionData> either = extensionCache.get(path);
if (either != null) {
return either;
}
Program program = parseSkylarkFile(path, loadStack, FileKind.BZL, ImmutableMap.of());
either = Either.ofLeft(program);
Either<Program, ExtensionData> prev = extensionCache.putIfAbsent(path, either);
if (prev != null) {
return prev;
} else {
return either;
}
}
/** Store extension in cache and return actually stored extension. */
private ExtensionData cacheExtension(AbsPath path, ExtensionData extensionData) {
return extensionCache
.compute(
path,
(k, oldValue) -> {
if (oldValue != null && oldValue.isRight()) {
// Someone else stored the extension.
return oldValue;
} else {
return Either.ofRight(extensionData);
}
})
.getRight();
}
/**
* Updates extension load state with the list of its dependencies, and schedules any unsatisfied
* dependencies to be loaded by adding those dependencies to the work queue.
*
* @param load {@link LocalExtensionLoadState} representing extension currently loaded
* @param queue a work queue of extensions that still need to be loaded.
* @return true if this extension has any unsatisfied dependencies
*/
private boolean processExtensionDependencies(
LocalExtensionLoadState load, ArrayDeque<LocalExtensionLoadState> queue) {
// Update this load state with the list of its dependencies.
// Schedule missing dependencies to be loaded.
boolean haveUnsatisfiedDeps = false;
ImmutableList<LoadImport> imports = getImports(load.getAST(), load.getLabel());
for (int i = 0; i < imports.size(); ++i) {
LoadImport dependency = imports.get(i);
// Record dependency for this load.
load.addDependency(dependency);
AbsPath extensionPath = getImportPath(dependency.getLabel(), dependency.getImport());
if (lookupExtensionForImport(extensionPath) == null) {
// Schedule dependency to be loaded if needed.
haveUnsatisfiedDeps = true;
queue.push(
new LocalExtensionLoadState(
dependency, extensionPath, load.loadStack.child(dependency.getImportLocation())));
}
}
return haveUnsatisfiedDeps;
}
/**
* Given fully loaded extension represented by {@link LocalExtensionLoadState}, evaluates
* extension and returns {@link ExtensionData}
*
* @param load {@link LocalExtensionLoadState} representing loaded extension
* @returns {@link ExtensionData} for this extions.
*/
@VisibleForTesting
protected ExtensionData buildExtensionData(LocalExtensionLoadState load)
throws InterruptedException {
ImmutableMap<String, ExtensionData> dependencies =
getDependenciesExtensionData(load.getLabel(), load.getDependencies());
BuckStarlarkLoadedModule loadedExtension;
try (Mutability mutability = Mutability.create("importing extension")) {
// Create this extension.
StarlarkThread extensionEnv =
new StarlarkThread(mutability, BuckStarlark.BUCK_STARLARK_SEMANTICS);
extensionEnv.setPrintHandler(new BuckStarlarkPrintHandler(eventHandler));
extensionEnv.setLoader(Maps.transformValues(dependencies, ExtensionData::getExtension)::get);
ReadConfigContext readConfigContext = new ReadConfigContext(options.getRawConfig());
readConfigContext.setup(extensionEnv);
LoadSymbolsContext loadSymbolsContext = new LoadSymbolsContext();
loadSymbolsContext.setup(extensionEnv);
extensionEnv.setPostAssignHook(
(n, v) -> {
try {
ensureExportedIfExportable(load.getLabel(), n, v);
} catch (EvalException e) {
// TODO(nga): what about stack trace
eventHandler.handle(Event.error(e.getDeprecatedLocation(), e.getMessage()));
throw new BuildFileParseException(e, e.getMessage());
}
});
Program ast = load.getAST();
buckGlobals.getKnownUserDefinedRuleTypes().invalidateExtension(load.getLabel());
ResolverModule resolverModule = ast.getModule();
// Must be already frozen, but freeze again to be safe.
resolverModule.freeze();
Module module = new Module(resolverModule);
exec(
load.loadStack,
ast,
module,
extensionEnv,
"extension %s referenced from %s",
load.getLabel(),
load.getParentLabel());
extensionEnv.mutability().freeze();
loadedExtension = new BuckStarlarkLoadedModule(module, loadSymbolsContext.getLoadedSymbols());
}
return ImmutableExtensionData.ofImpl(
loadedExtension,
load.getPath(),
dependencies.values(),
toLoadedPaths(load.getPath(), dependencies.values(), ImmutableSet.of()));
}
/**
* Call {@link StarlarkExportable#export(Label, String)} on any objects that are assigned to
*
* <p>This is primarily used to make sure that {@link SkylarkUserDefinedRule} and {@link
* com.facebook.buck.core.rules.providers.impl.UserDefinedProvider} instances set their name
* properly upon assignment
*
* @param identifier the name of the variable
* @param lookedUp exported value
*/
private void ensureExportedIfExportable(Label extensionLabel, String identifier, Object lookedUp)
throws BuildFileParseException, EvalException {
if (lookedUp instanceof StarlarkExportable) {
StarlarkExportable exportable = (StarlarkExportable) lookedUp;
if (!exportable.isExported()) {
Preconditions.checkState(extensionLabel != null);
exportable.export(extensionLabel, identifier);
if (lookedUp instanceof SkylarkUserDefinedRule) {
this.buckGlobals
.getKnownUserDefinedRuleTypes()
.addRule((SkylarkUserDefinedRule) exportable);
}
}
}
}
/**
* Creates an extension from a {@code path}.
*
* @param loadImport an import label representing an extension to load.
*/
private ExtensionData loadExtensionFromImport(LoadImport loadImport, LoadStack loadStack)
throws IOException, BuildFileParseException, InterruptedException {
ExtensionData extension = null;
ArrayDeque<LocalExtensionLoadState> work = new ArrayDeque<>();
AbsPath extensionPath = getImportPath(loadImport.getLabel(), loadImport.getImport());
work.push(new LocalExtensionLoadState(loadImport, extensionPath, loadStack));
while (!work.isEmpty()) {
LocalExtensionLoadState load = work.peek();
Either<Program, ExtensionData> either =
lookupExtensionForImportOrLoadProgram(load.getPath(), load.loadStack);
extension = either.getRightOption().orElse(null);
if (extension != null) {
// It's possible that some lower level dependencies already loaded
// this work item. We're done with it, so pop the queue.
work.pop();
continue;
}
// Load BuildFileAST if needed.
boolean astLoaded;
if (load.haveAST()) {
astLoaded = false;
} else {
load.setAST(either.getLeft());
astLoaded = true;
}
boolean haveUnsatisfiedDeps = astLoaded && processExtensionDependencies(load, work);
// NB: If we have unsatisfied dependencies, we don't do anything;
// more importantly we do not pop the work queue in this case.
// This load is kept on the queue until all of its dependencies are satisfied.
if (!haveUnsatisfiedDeps) {
// We are done with this load; build it and cache it.
work.removeFirst();
extension = buildExtensionData(load);
extension = cacheExtension(load.getPath(), extension);
}
}
Preconditions.checkNotNull(extension);
return extension;
}
/**
* @return The path to a Skylark extension. For example, for {@code load("//pkg:foo.bzl", "foo")}
* import it would return {@code /path/to/repo/pkg/foo.bzl} and for {@code
* load("@repo//pkg:foo.bzl", "foo")} it would return {@code /repo/pkg/foo.bzl} assuming that
* {@code repo} is located at {@code /repo}.
*/
private AbsPath getImportPath(Label containingLabel, String skylarkImport)
throws BuildFileParseException {
if (isRelativeLoad(skylarkImport) && skylarkImport.contains("/")) {
throw BuildFileParseException.createForUnknownParseError(
"Relative loads work only for files in the same directory but "
+ skylarkImport
+ " is trying to load a file from a nested directory. "
+ "Please use absolute label instead ([cell]//pkg[/pkg]:target).");
}
PathFragment relativeExtensionPath = containingLabel.toPathFragment();
RepositoryName repository = containingLabel.getPackageIdentifier().getRepository();
if (repository.isMain()) {
return options.getProjectRoot().resolve(relativeExtensionPath.toString());
}
// Skylark repositories have an "@" prefix, but Buck roots do not, so ignore it
String repositoryName = repository.getName().substring(1);
@Nullable AbsPath repositoryPath = options.getCellRoots().get(repositoryName);
if (repositoryPath == null) {
throw BuildFileParseException.createForUnknownParseError(
skylarkImport + " references an unknown repository " + repositoryName);
}
return repositoryPath.resolve(relativeExtensionPath.toString());
}
private boolean isRelativeLoad(String skylarkImport) {
return skylarkImport.startsWith(":");
}
/**
* @return The path path of the provided {@code buildFile}. For example, for {@code
* /Users/foo/repo/src/bar/BUCK}, where {@code /Users/foo/repo} is the path to the repo, it
* would return {@code src/bar}.
*/
protected ForwardRelPath getBasePath(AbsPath buildFile) {
return Optional.ofNullable(options.getProjectRoot().relativize(buildFile).getParent())
.map(ForwardRelPath::ofRelPath)
.orElse(ForwardRelPath.EMPTY);
}
@Override
public ImmutableSortedSet<String> getIncludedFiles(ForwardRelPath parseFile)
throws BuildFileParseException, InterruptedException, IOException {
AbsPath parseFileAbs = options.getProjectRoot().resolve(parseFile);
ForwardRelPath basePath = parseFile.getParentButEmptyForSingleSegment();
Label containingLabel = createContainingLabel(basePath);
ImplicitlyLoadedExtension implicitLoad = loadImplicitExtension(basePath, LoadStack.EMPTY);
Program buildFileAst =
parseSkylarkFile(
parseFileAbs,
LoadStack.EMPTY,
getBuckOrPackage().fileKind,
implicitLoad.getLoadedSymbols());
ImmutableList<IncludesData> dependencies =
loadIncludes(containingLabel, getImports(buildFileAst, containingLabel), LoadStack.EMPTY);
// it might be potentially faster to keep sorted sets for each dependency separately and just
// merge sorted lists as we aggregate transitive close up
// But Guava does not seem to have a built-in way of merging sorted lists/sets
return ImmutableSortedSet.copyOf(
toIncludedPaths(parseFileAbs, dependencies, implicitLoad.getLoadTransitiveClosure()));
}
@Override
public void close() throws BuildFileParseException {
// nothing to do
}
/**
* A value object for information about load function import, since import string does not provide
* enough context. For instance, the same import string can represent different logical imports
* depending on which repository it is resolved in.
*/
@BuckStyleValue
abstract static class LoadImport {
/** Returns a label of the file containing this import. */
abstract Label getContainingLabel();
/** Returns a Skylark import. */
abstract String getImport();
abstract Location getImportLocation();
/** Returns a label of file being imported. */
@Value.Derived
Label getLabel() {
try {
return getContainingLabel().getRelativeWithRemapping(getImport());
} catch (LabelSyntaxException e) {
throw BuildFileParseException.createForUnknownParseError(
"Incorrect load location in %s: %s", getImportLocation(), e.getMessage());
}
}
}
/**
* A value object for information about implicit loads. This allows us to both validate implicit
* import information, and return some additional information needed to setup build file
* environments in one swoop.
*/
@BuckStyleValue
abstract static class ImplicitlyLoadedExtension {
abstract ImmutableSet<String> getLoadTransitiveClosure();
abstract ImmutableMap<String, Object> getLoadedSymbols();
private static class EmptyHolder {
static ImplicitlyLoadedExtension EMPTY =
ImmutableImplicitlyLoadedExtension.ofImpl(ImmutableSet.of(), ImmutableMap.of());
}
static ImplicitlyLoadedExtension empty() {
return EmptyHolder.EMPTY;
}
static ImplicitlyLoadedExtension merge(List<ImplicitlyLoadedExtension> extensions) {
if (extensions.isEmpty()) {
return empty();
} else if (extensions.size() == 1) {
return extensions.get(0);
} else {
ImmutableSet.Builder<String> loadTransitiveClosure = ImmutableSet.builder();
HashMap<String, Object> loadedSymbols = new HashMap<>();
for (ImplicitlyLoadedExtension extension : extensions) {
loadTransitiveClosure.addAll(extension.getLoadTransitiveClosure());
for (Entry<String, Object> entry : extension.getLoadedSymbols().entrySet()) {
Object prevValue = loadedSymbols.put(entry.getKey(), entry.getValue());
if (prevValue != null) {
throw new HumanReadableException(
"non-unique symbol in implicit include: %s", entry.getKey());
}
}
}
return ImmutableImplicitlyLoadedExtension.ofImpl(
loadTransitiveClosure.build(), ImmutableMap.copyOf(loadedSymbols));
}
}
}
}
| |
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package admanager.axis.v202108.creativeservice;
import static com.google.api.ads.common.lib.utils.Builder.DEFAULT_CONFIGURATION_FILENAME;
import com.beust.jcommander.Parameter;
import com.google.api.ads.admanager.axis.factory.AdManagerServices;
import com.google.api.ads.admanager.axis.v202108.ApiError;
import com.google.api.ads.admanager.axis.v202108.ApiException;
import com.google.api.ads.admanager.axis.v202108.AssetCreativeTemplateVariableValue;
import com.google.api.ads.admanager.axis.v202108.BaseCreativeTemplateVariableValue;
import com.google.api.ads.admanager.axis.v202108.Creative;
import com.google.api.ads.admanager.axis.v202108.CreativeAsset;
import com.google.api.ads.admanager.axis.v202108.CreativeServiceInterface;
import com.google.api.ads.admanager.axis.v202108.LongCreativeTemplateVariableValue;
import com.google.api.ads.admanager.axis.v202108.Size;
import com.google.api.ads.admanager.axis.v202108.StringCreativeTemplateVariableValue;
import com.google.api.ads.admanager.axis.v202108.TemplateCreative;
import com.google.api.ads.admanager.axis.v202108.UrlCreativeTemplateVariableValue;
import com.google.api.ads.admanager.lib.client.AdManagerSession;
import com.google.api.ads.admanager.lib.utils.examples.ArgumentNames;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.ads.common.lib.conf.ConfigurationLoadException;
import com.google.api.ads.common.lib.exception.OAuthException;
import com.google.api.ads.common.lib.exception.ValidationException;
import com.google.api.ads.common.lib.utils.Media;
import com.google.api.ads.common.lib.utils.examples.CodeSampleParams;
import com.google.api.client.auth.oauth2.Credential;
import java.io.IOException;
import java.rmi.RemoteException;
import java.util.Random;
/**
* This example creates a new template creative for a given advertiser. To determine which creatives
* already exist, run GetAllCreatives.java. To determine which creative templates exist, run
* GetAllCreativeTemplates.java.
*
* <p>Credentials and properties in {@code fromFile()} are pulled from the "ads.properties" file.
* See README for more info.
*/
public class CreateCreativesFromTemplates {
private static class CreateCreativesFromTemplatesParams extends CodeSampleParams {
@Parameter(
names = ArgumentNames.ADVERTISER_ID,
required = true,
description =
"The ID of the advertiser (company) that all creatives will be assigned" + " to.")
private Long advertiserId;
}
/**
* Runs the example.
*
* @param adManagerServices the services factory.
* @param session the session.
* @param advertiserId the ID of the advertiser (company) that all creatives will be assigned to.
* @throws ApiException if the API request failed with one or more service errors.
* @throws RemoteException if the API request failed due to other errors.
* @throws IOException if unable to get media data from the URL.
*/
public static void runExample(
AdManagerServices adManagerServices, AdManagerSession session, long advertiserId)
throws IOException {
// Get the CreativeService.
CreativeServiceInterface creativeService =
adManagerServices.get(session, CreativeServiceInterface.class);
// Create creative size.
Size size = new Size();
size.setWidth(600);
size.setHeight(315);
size.setIsAspectRatio(false);
// Use the image banner with optional third party tracking template.
// To determine what other creative templates exist,
// run GetAllCreativeTemplates.java.
long creativeTemplateId = 10000680L;
// Create a template creative.
TemplateCreative templateCreative = new TemplateCreative();
templateCreative.setName("Template creative #" + new Random().nextInt(Integer.MAX_VALUE));
templateCreative.setAdvertiserId(advertiserId);
templateCreative.setCreativeTemplateId(creativeTemplateId);
templateCreative.setSize(size);
// Create the asset variable value.
AssetCreativeTemplateVariableValue assetVariableValue =
new AssetCreativeTemplateVariableValue();
assetVariableValue.setUniqueName("Imagefile");
CreativeAsset asset = new CreativeAsset();
asset.setAssetByteArray(Media.getMediaDataFromUrl("https://goo.gl/3b9Wfh"));
// Filenames must be unique.
asset.setFileName(String.format("image%s.jpg", new Random().nextInt(Integer.MAX_VALUE)));
assetVariableValue.setAsset(asset);
// Create the image width variable value.
LongCreativeTemplateVariableValue imageWidthVariableValue =
new LongCreativeTemplateVariableValue();
imageWidthVariableValue.setUniqueName("Imagewidth");
imageWidthVariableValue.setValue(300L);
// Create the image height variable value.
LongCreativeTemplateVariableValue imageHeightVariableValue =
new LongCreativeTemplateVariableValue();
imageHeightVariableValue.setUniqueName("Imageheight");
imageHeightVariableValue.setValue(250L);
// Create the URL variable value.
UrlCreativeTemplateVariableValue urlVariableValue = new UrlCreativeTemplateVariableValue();
urlVariableValue.setUniqueName("ClickthroughURL");
urlVariableValue.setValue("www.google.com");
// Create the target window variable value.
StringCreativeTemplateVariableValue targetWindowVariableValue =
new StringCreativeTemplateVariableValue();
targetWindowVariableValue.setUniqueName("Targetwindow");
targetWindowVariableValue.setValue("__blank");
// Set the creative template variables.
templateCreative.setCreativeTemplateVariableValues(
new BaseCreativeTemplateVariableValue[] {
assetVariableValue,
imageWidthVariableValue,
imageHeightVariableValue,
urlVariableValue,
targetWindowVariableValue
});
// Create the creative on the server.
Creative[] creatives = creativeService.createCreatives(new Creative[] {templateCreative});
for (Creative createdCreative : creatives) {
System.out.printf(
"A creative with ID %d, name '%s', and type '%s'"
+ " was created and can be previewed at: %s%n",
createdCreative.getId(),
createdCreative.getName(),
createdCreative.getClass().getSimpleName(),
createdCreative.getPreviewUrl());
}
}
public static void main(String[] args) {
AdManagerSession session;
try {
// Generate a refreshable OAuth2 credential.
Credential oAuth2Credential =
new OfflineCredentials.Builder()
.forApi(Api.AD_MANAGER)
.fromFile()
.build()
.generateCredential();
// Construct a AdManagerSession.
session =
new AdManagerSession.Builder().fromFile().withOAuth2Credential(oAuth2Credential).build();
} catch (ConfigurationLoadException cle) {
System.err.printf(
"Failed to load configuration from the %s file. Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, cle);
return;
} catch (ValidationException ve) {
System.err.printf(
"Invalid configuration in the %s file. Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, ve);
return;
} catch (OAuthException oe) {
System.err.printf(
"Failed to create OAuth credentials. Check OAuth settings in the %s file. "
+ "Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, oe);
return;
}
AdManagerServices adManagerServices = new AdManagerServices();
CreateCreativesFromTemplatesParams params = new CreateCreativesFromTemplatesParams();
if (!params.parseArguments(args)) {
// Either pass the required parameters for this example on the command line, or insert them
// into the code here. See the parameter class definition above for descriptions.
params.advertiserId = Long.parseLong("INSERT_ADVERTISER_ID_HERE");
}
try {
runExample(adManagerServices, session, params.advertiserId);
} catch (ApiException apiException) {
// ApiException is the base class for most exceptions thrown by an API request. Instances
// of this exception have a message and a collection of ApiErrors that indicate the
// type and underlying cause of the exception. Every exception object in the admanager.axis
// packages will return a meaningful value from toString
//
// ApiException extends RemoteException, so this catch block must appear before the
// catch block for RemoteException.
System.err.println("Request failed due to ApiException. Underlying ApiErrors:");
if (apiException.getErrors() != null) {
int i = 0;
for (ApiError apiError : apiException.getErrors()) {
System.err.printf(" Error %d: %s%n", i++, apiError);
}
}
} catch (RemoteException re) {
System.err.printf("Request failed unexpectedly due to RemoteException: %s%n", re);
} catch (IOException ioe) {
System.err.printf("Example failed due to IOException: %s%n", ioe);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.schema;
import static org.apache.solr.common.params.CommonParams.NAME;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CharFilterFactory;
import org.apache.lucene.analysis.TokenFilterFactory;
import org.apache.lucene.analysis.TokenizerFactory;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.util.Version;
import org.apache.solr.analysis.TokenizerChain;
import org.apache.solr.common.ConfigNode;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.SolrClassLoader;
import org.apache.solr.common.util.DOMUtil;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.util.plugin.AbstractPluginLoader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class FieldTypePluginLoader extends AbstractPluginLoader<FieldType> {
private static final String LUCENE_MATCH_VERSION_PARAM = IndexSchema.LUCENE_MATCH_VERSION_PARAM;
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
/**
* @param schema The schema that will be used to initialize the FieldTypes
* @param fieldTypes All FieldTypes that are instantiated by this Plugin Loader will be added to
* this Map
* @param schemaAware Any SchemaAware objects that are instantiated by this Plugin Loader will be
* added to this collection.
*/
public FieldTypePluginLoader(
final IndexSchema schema,
final Map<String, FieldType> fieldTypes,
final Collection<SchemaAware> schemaAware) {
super("[schema.xml] fieldType", FieldType.class, true, true);
this.schema = schema;
this.fieldTypes = fieldTypes;
this.schemaAware = schemaAware;
}
private final IndexSchema schema;
private final Map<String, FieldType> fieldTypes;
private final Collection<SchemaAware> schemaAware;
@Override
protected FieldType create(SolrClassLoader loader, String name, String className, ConfigNode node)
throws Exception {
FieldType ft = loader.newInstance(className, FieldType.class);
ft.setTypeName(name);
ConfigNode anode = node.child(it -> "query".equals(it.attributes().get("type")), "analyzer");
Analyzer queryAnalyzer = readAnalyzer(anode);
anode = node.child(it -> "multiterm".equals(it.attributes().get("type")), "analyzer");
Analyzer multiAnalyzer = readAnalyzer(anode);
// An analyzer without a type specified, or with type="index"
anode =
node.child(
it ->
(it.attributes().get("type") == null
|| "index".equals(it.attributes().get("type"))),
"analyzer");
Analyzer analyzer = readAnalyzer(anode);
// a custom similarity[Factory]
anode = node.child("similarity");
SimilarityFactory simFactory = IndexSchema.readSimilarity(loader, anode);
if (null != simFactory) {
ft.setSimilarity(simFactory);
}
if (ft instanceof HasImplicitIndexAnalyzer) {
ft.setIsExplicitAnalyzer(false);
if (null != queryAnalyzer && null != analyzer) {
log.warn("Ignoring index-time analyzer for field: {}", name);
} else if (null == queryAnalyzer) { // Accept non-query-time analyzer as a query-time analyzer
queryAnalyzer = analyzer;
}
if (null != queryAnalyzer) {
ft.setIsExplicitQueryAnalyzer(true);
ft.setQueryAnalyzer(queryAnalyzer);
}
} else {
if (null == queryAnalyzer) {
queryAnalyzer = analyzer;
ft.setIsExplicitQueryAnalyzer(false);
} else {
ft.setIsExplicitQueryAnalyzer(true);
}
if (null == analyzer) {
analyzer = queryAnalyzer;
ft.setIsExplicitAnalyzer(false);
} else {
ft.setIsExplicitAnalyzer(true);
}
if (null != analyzer) {
ft.setIndexAnalyzer(analyzer);
ft.setQueryAnalyzer(queryAnalyzer);
if (ft instanceof TextField) {
if (null == multiAnalyzer) {
multiAnalyzer = constructMultiTermAnalyzer(queryAnalyzer);
((TextField) ft).setIsExplicitMultiTermAnalyzer(false);
} else {
((TextField) ft).setIsExplicitMultiTermAnalyzer(true);
}
((TextField) ft).setMultiTermAnalyzer(multiAnalyzer);
}
}
}
if (ft instanceof SchemaAware) {
schemaAware.add((SchemaAware) ft);
}
return ft;
}
@Override
protected void init(FieldType plugin, ConfigNode node) throws Exception {
Map<String, String> params = DOMUtil.toMapExcept(node, NAME);
plugin.setArgs(schema, params);
}
@Override
protected FieldType register(String name, FieldType plugin) throws Exception {
log.trace("fieldtype defined: {}", plugin);
return fieldTypes.put(name, plugin);
}
// The point here is that, if no multiterm analyzer was specified in the schema file, do one of
// several things:
// 1> If legacyMultiTerm == false, assemble a new analyzer composed of all of the charfilters,
// lowercase filters and asciifoldingfilter.
// 2> If legacyMultiTerm == true just construct the analyzer from a KeywordTokenizer. That should
// mimic current behavior.
// Do the same if they've specified that the old behavior is required (legacyMultiTerm="true")
private Analyzer constructMultiTermAnalyzer(Analyzer queryAnalyzer) {
if (queryAnalyzer == null) return null;
if (!(queryAnalyzer instanceof TokenizerChain)) {
return new KeywordAnalyzer();
}
return ((TokenizerChain) queryAnalyzer).getMultiTermAnalyzer();
}
//
// <analyzer><tokenizer class="...."/><tokenizer class="...." arg="....">
//
//
private Analyzer readAnalyzer(ConfigNode node) {
final SolrClassLoader loader = schema.getSolrClassLoader();
// parent node used to be passed in as "fieldtype"
if (node == null) return null;
String analyzerName = DOMUtil.getAttr(node, "class", null);
// check for all of these up front, so we can error if used in
// conjunction with an explicit analyzer class.
List<ConfigNode> charFilterNodes = node.getAll("charFilter");
List<ConfigNode> tokenizerNodes = node.getAll("tokenizer");
List<ConfigNode> tokenFilterNodes = node.getAll("filter");
if (analyzerName != null) {
// explicitly check for child analysis factories instead of
// just any child nodes, because the user might have their
// own custom nodes (ie: <description> or something like that)
if (0 != charFilterNodes.size()
|| 0 != tokenizerNodes.size()
|| 0 != tokenFilterNodes.size()) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Configuration Error: Analyzer class='"
+ analyzerName
+ "' can not be combined with nested analysis factories");
}
try {
// No need to be core-aware as Analyzers are not in the core-aware list
final Class<? extends Analyzer> clazz = loader.findClass(analyzerName, Analyzer.class);
Analyzer analyzer = clazz.getConstructor().newInstance();
final String matchVersionStr = DOMUtil.getAttr(node, LUCENE_MATCH_VERSION_PARAM, null);
final Version luceneMatchVersion =
(matchVersionStr == null)
? schema.getDefaultLuceneMatchVersion()
: SolrConfig.parseLuceneVersionString(matchVersionStr);
if (luceneMatchVersion == null) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Configuration Error: Analyzer '"
+ clazz.getName()
+ "' needs a '"
+ IndexSchema.LUCENE_MATCH_VERSION_PARAM
+ "' parameter");
}
return analyzer;
} catch (Exception e) {
log.error("Cannot load analyzer: {}", analyzerName, e);
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR, "Cannot load analyzer: " + analyzerName, e);
}
}
// Load the CharFilters
final ArrayList<CharFilterFactory> charFilters = new ArrayList<>();
AbstractPluginLoader<CharFilterFactory> charFilterLoader =
new AbstractPluginLoader<CharFilterFactory>(
"[schema.xml] analyzer/charFilter", CharFilterFactory.class, false, false) {
@Override
protected CharFilterFactory create(
SolrClassLoader loader, String name, String className, ConfigNode node)
throws Exception {
final Map<String, String> params = DOMUtil.toMapExcept(node);
String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM);
params.put(
LUCENE_MATCH_VERSION_PARAM,
parseConfiguredVersion(configuredVersion, CharFilterFactory.class.getSimpleName())
.toString());
CharFilterFactory factory;
if (Objects.nonNull(name)) {
factory = CharFilterFactory.forName(name, params);
if (Objects.nonNull(className)) {
log.error(
"Both of name: {} and className: {} are specified for charFilter.",
name,
className);
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Cannot create charFilter: Both of name and className are specified.");
}
} else if (Objects.nonNull(className)) {
factory =
loader.newInstance(
className,
CharFilterFactory.class,
getDefaultPackages(),
new Class<?>[] {Map.class},
new Object[] {params});
} else {
log.error("Neither of name or className is specified for charFilter.");
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Cannot create charFilter: Neither of name or className is specified.");
}
factory.setExplicitLuceneMatchVersion(null != configuredVersion);
return factory;
}
@Override
protected void init(CharFilterFactory plugin, ConfigNode node) throws Exception {
if (plugin != null) {
charFilters.add(plugin);
}
}
@Override
protected CharFilterFactory register(String name, CharFilterFactory plugin) {
return null; // used for map registration
}
};
charFilterLoader.load(loader, charFilterNodes);
// Load the Tokenizer
// Although an analyzer only allows a single Tokenizer, we load a list to make sure
// the configuration is ok
final ArrayList<TokenizerFactory> tokenizers = new ArrayList<>(1);
AbstractPluginLoader<TokenizerFactory> tokenizerLoader =
new AbstractPluginLoader<TokenizerFactory>(
"[schema.xml] analyzer/tokenizer", TokenizerFactory.class, false, false) {
@Override
protected TokenizerFactory create(
SolrClassLoader loader, String name, String className, ConfigNode node)
throws Exception {
final Map<String, String> params = DOMUtil.toMap(node);
String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM);
params.put(
LUCENE_MATCH_VERSION_PARAM,
parseConfiguredVersion(configuredVersion, TokenizerFactory.class.getSimpleName())
.toString());
TokenizerFactory factory;
if (Objects.nonNull(name)) {
factory = TokenizerFactory.forName(name, params);
if (Objects.nonNull(className)) {
log.error(
"Both of name: {} and className: {} are specified for tokenizer.",
name,
className);
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Cannot create tokenizer: Both of name and className are specified.");
}
} else if (Objects.nonNull(className)) {
factory =
loader.newInstance(
className,
TokenizerFactory.class,
getDefaultPackages(),
new Class<?>[] {Map.class},
new Object[] {params});
} else {
log.error("Neither of name or className is specified for tokenizer.");
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Cannot create tokenizer: Neither of name or className is specified.");
}
factory.setExplicitLuceneMatchVersion(null != configuredVersion);
return factory;
}
@Override
protected void init(TokenizerFactory plugin, ConfigNode node) throws Exception {
if (!tokenizers.isEmpty()) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"The schema defines multiple tokenizers for: " + node);
}
tokenizers.add(plugin);
}
@Override
protected TokenizerFactory register(String name, TokenizerFactory plugin) {
return null; // used for map registration
}
};
tokenizerLoader.load(loader, tokenizerNodes);
// Make sure something was loaded
if (tokenizers.isEmpty()) {
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR, "analyzer without class or tokenizer");
}
// Load the Filters
final ArrayList<TokenFilterFactory> filters = new ArrayList<>();
AbstractPluginLoader<TokenFilterFactory> filterLoader =
new AbstractPluginLoader<TokenFilterFactory>(
"[schema.xml] analyzer/filter", TokenFilterFactory.class, false, false) {
@Override
protected TokenFilterFactory create(
SolrClassLoader loader, String name, String className, ConfigNode node)
throws Exception {
final Map<String, String> params = DOMUtil.toMap(node);
String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM);
params.put(
LUCENE_MATCH_VERSION_PARAM,
parseConfiguredVersion(configuredVersion, TokenFilterFactory.class.getSimpleName())
.toString());
TokenFilterFactory factory;
if (Objects.nonNull(name)) {
factory = TokenFilterFactory.forName(name, params);
if (Objects.nonNull(className)) {
log.error(
"Both of name: {} and className: {} are specified for tokenFilter.",
name,
className);
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Cannot create tokenFilter: Both of name and className are specified.");
}
} else if (Objects.nonNull(className)) {
factory =
loader.newInstance(
className,
TokenFilterFactory.class,
getDefaultPackages(),
new Class<?>[] {Map.class},
new Object[] {params});
} else {
log.error("Neither of name or className is specified for tokenFilter.");
throw new SolrException(
SolrException.ErrorCode.SERVER_ERROR,
"Cannot create tokenFilter: Neither of name or className is specified.");
}
factory.setExplicitLuceneMatchVersion(null != configuredVersion);
return factory;
}
@Override
protected void init(TokenFilterFactory plugin, ConfigNode node) throws Exception {
if (plugin != null) {
filters.add(plugin);
}
}
@Override
protected TokenFilterFactory register(String name, TokenFilterFactory plugin)
throws Exception {
return null; // used for map registration
}
};
filterLoader.load(loader, tokenFilterNodes);
return new TokenizerChain(
charFilters.toArray(new CharFilterFactory[charFilters.size()]),
tokenizers.get(0),
filters.toArray(new TokenFilterFactory[filters.size()]));
}
private Version parseConfiguredVersion(String configuredVersion, String pluginClassName) {
Version version =
(configuredVersion != null)
? SolrConfig.parseLuceneVersionString(configuredVersion)
: schema.getDefaultLuceneMatchVersion();
if (!version.onOrAfter(Version.LUCENE_8_0_0)) {
log.warn(
"{} is using deprecated {}"
+ " emulation. You should at some point declare and reindex to at least 8.0, because "
+ "7.x emulation is deprecated and will be removed in 9.0",
pluginClassName,
version);
}
return version;
}
}
| |
package com.actian.ilabs.dataflow.stringtemplate.runner;
/*
Copyright 2015 Actian Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import static com.pervasive.datarush.io.WriteMode.OVERWRITE;
import static com.pervasive.datarush.types.TokenTypeConstant.*;
import static com.pervasive.datarush.types.TypeUtil.mergeTypes;
import java.util.*;
import javax.xml.bind.DatatypeConverter;
import com.pervasive.datarush.encoding.text.DateFormatter;
import com.pervasive.datarush.encoding.text.TimeFormatter;
import com.pervasive.datarush.encoding.text.TimestampFormatter;
import com.pervasive.datarush.graphs.LogicalGraph;
import com.pervasive.datarush.graphs.LogicalGraphFactory;
import com.pervasive.datarush.operators.*;
import com.pervasive.datarush.operators.io.textfile.FieldDelimiterSettings;
import com.pervasive.datarush.operators.io.textfile.ReadDelimitedText;
import com.pervasive.datarush.operators.io.textfile.WriteDelimitedText;
import com.pervasive.datarush.ports.physical.*;
import com.pervasive.datarush.ports.record.*;
import com.pervasive.datarush.tokens.TokenUtils;
import com.pervasive.datarush.tokens.scalar.*;
import com.pervasive.datarush.types.RecordTokenType;
import com.pervasive.datarush.types.ScalarTokenType;
import com.pervasive.datarush.types.TokenTypeConstant;
import com.pervasive.datarush.types.TypeUtil;
import org.apache.commons.lang.StringUtils;
import org.stringtemplate.v4.*;
public class RunStringTemplate extends ExecutableOperator implements RecordPipelineOperator {
private static final Map<ScalarTokenType, String> typeMap;
static {
Map<ScalarTokenType, String> map = new HashMap<ScalarTokenType, String>();
map.put(STRING, "xsd:string");
map.put(BINARY, "xsd:string");
map.put(BOOLEAN, "xsd:boolean");
map.put(NUMERIC, "xsd:decimal");
map.put(DOUBLE, "xsd:double");
map.put(FLOAT, "xsd:double");
map.put(LONG, "xsd:integer");
map.put(INT, "xsd:integer");
map.put(TIME, "xsd:time");
map.put(DATE, "xsd:date");
map.put(TIMESTAMP, "xsd:dateTime");
typeMap = map;
}
private final RecordPort input = newRecordInput("input");
private final RecordPort output = newRecordOutput("output");
private String stg;
public RecordPort getInput() {
return input;
}
public RecordPort getOutput() {
return output;
}
public String getStg() {
return stg;
}
public void setStg(String stg) {
this.stg = stg;
}
public RunStringTemplate() {
}
@Override
protected void computeMetadata(StreamingMetadataContext context) {
//best practice: perform any input validation: should be done first
// validateInput(context);
//required: declare our parallelizability.
// in this case we use source parallelism as a hint for our parallelism.
context.parallelize(ParallelismStrategy.NEGOTIATE_BASED_ON_SOURCE);
//required: declare output type
// in this case our output type is the input type plus an additional field
// containing the result
// RecordTokenType outputType = mergeTypes(getInput().getType(context), record(STRING("stResult")));
RecordTokenType outputType = record(STRING("stResult"));
getOutput().setType(context, outputType);
//best practice: define output ordering/distribution
// in this case we are generating data in a single field so
// the ordering is unspecified and the distribution is partial
RecordMetadata outputMetadata = input.getCombinedMetadata(context);
output.setOutputDataOrdering(context, DataOrdering.UNSPECIFIED);
}
private String FormatFieldValue(ScalarInputField field) {
ScalarTokenType type = field.getType();
String valueString = "";
if (type.equals(TokenTypeConstant.BOOLEAN)) {
BooleanInputField boolField = (BooleanInputField) field;
valueString = DatatypeConverter.printBoolean(boolField.asBoolean());
}
else if (type.equals(TokenTypeConstant.BINARY)) {
BinaryInputField binField = (BinaryInputField) field;
valueString = DatatypeConverter.printHexBinary(binField.asBinary());
}
else if (type.equals(TokenTypeConstant.CHAR)) {
CharInputField charField = (CharInputField) field;
valueString = charField.toString();
}
else if (type.equals(TokenTypeConstant.DATE)) {
DateInputField dateField = (DateInputField) field;
DateFormatter dateFormatter = new DateFormatter("yyyyMMdd");
dateFormatter.setSource(dateField);
valueString = dateFormatter.format();
}
else if (type.equals(TokenTypeConstant.DOUBLE)) {
DoubleInputField doubleField = (DoubleInputField) field;
valueString = DatatypeConverter.printDouble(doubleField.asDouble());
}
else if (type.equals(TokenTypeConstant.FLOAT)) {
FloatInputField floatField = (FloatInputField) field;
valueString = DatatypeConverter.printFloat(floatField.asFloat());
}
else if (type.equals(TokenTypeConstant.INT)) {
IntInputField intField = (IntInputField) field;
valueString = DatatypeConverter.printInt(intField.asInt());
}
else if (type.equals(TokenTypeConstant.LONG)) {
LongInputField longField = (LongInputField) field;
valueString = DatatypeConverter.printLong(longField.asLong());
}
else if (type.equals(TokenTypeConstant.NUMERIC)) {
NumericInputField numericField = (NumericInputField) field;
valueString = DatatypeConverter.printDecimal(numericField.asBigDecimal());
}
else if (type.equals(TokenTypeConstant.STRING)) {
StringInputField stringField = (StringInputField) field;
valueString = DatatypeConverter.printString(stringField.asString());
}
else if (type.equals(TokenTypeConstant.TIME)) {
TimeInputField timeField = (TimeInputField) field;
TimeFormatter timeFormatter = new TimeFormatter("HHmmss.SSSZ");
timeFormatter.setSource(timeField);
valueString = timeFormatter.format();
}
else if (type.equals(TokenTypeConstant.TIMESTAMP)) {
TimestampInputField timestampField = (TimestampInputField) field;
TimestampFormatter timestampFormatter = new TimestampFormatter("yyyyMMdd'T'HHmmss.SSSZ");
timestampFormatter.setSource(timestampField);
valueString = timestampFormatter.format();
}
else {
valueString = "";
}
return valueString;
}
@Override
protected void execute(ExecutionContext context) {
try {
STGroup group = new STGroupString(getStg());
RecordInput recordInput = getInput().getInput(context);
RecordOutput recordOutput = getOutput().getOutput(context);
//best practice: get handle to output fields
// resultField is a handle to the result output field
// outputs is an array of all outputs *except* for stResult; corresponds by index to allInputs
StringSettable resultField = (StringSettable) recordOutput.getField("stResult");
ScalarSettable[] outputs = TokenUtils.selectFields(recordOutput, getInput().getType(context).getNames());
ScalarValued[] allInputs = recordInput.getFields();
ScalarInputField [] fields = recordInput.getFields();
ArrayList<String> fnameList = new ArrayList<String>(64);
for (int i = 0; i < fields.length; i++) {
String fname = fields[i].getName().replaceAll("[^A-Za-z0-9_]","");
fnameList.add(fname);
}
String fnames = StringUtils.join(fnameList, ", ");
ST headerTemplate = group.getInstanceOf("/HEADER");
if (headerTemplate != null /* && context.getPartitionInfo().getPartitionID() == 0 */) {
for (int i = 0; i < fields.length; i++) {
String typeString = typeMap.containsKey(fields[i].getType()) ? typeMap.get(fields[i].getType()) : "";
headerTemplate.addAggr("__metadata.{ name, type }", fields[i].getName(), typeString);
}
}
ST footerTemplate = group.getInstanceOf("/FOOTER");
if (footerTemplate != null) {
for (int i = 0; i < fields.length; i++) {
String typeString = typeMap.containsKey(fields[i].getType()) ? typeMap.get(fields[i].getType()) : "";
footerTemplate.addAggr("__metadata.{ name, type }", fields[i].getName(), typeString);
}
}
long recordCount = 0;
while(recordInput.stepNext()) {
ST recordTemplate = group.getInstanceOf("/RECORD");
if (recordTemplate == null)
continue;
recordCount++;
// Only output the results of the header template if there are records in this partition
if (headerTemplate != null && recordCount == 1) {
String result = headerTemplate.render();
resultField.set(result);
recordOutput.push();
}
ArrayList<String> valueList = new ArrayList<String>(64);
ArrayList<String> typeList = new ArrayList<String>(64);
for (int i = 0; i < fields.length; i++) {
String valueString = FormatFieldValue(fields[i]);
String typeString = typeMap.containsKey(fields[i].getType()) ? typeMap.get(fields[i].getType()) : "";
recordTemplate.addAggr("__data.{ name, type, value }", fields[i].getName(), typeString, valueString);
// recordTemplate.addAggr("__values.{ " + fname + " }", valueString);
valueList.add(valueString);
typeList.add(typeString);
// recordTemplate.addAggr("__types.{ " + fname + " }", typeString);
}
recordTemplate.addAggr("__values.{ " + fnames + " }", valueList.toArray());
recordTemplate.addAggr("__types.{ " + fnames + " }", typeList.toArray());
String result = recordTemplate.render();
resultField.set(result);
recordOutput.push();
}
if (footerTemplate != null && recordCount > 0) {
String result = footerTemplate.render();
resultField.set(result);
recordOutput.push();
}
//required: signal end-of-data on output
recordOutput.pushEndOfData();
} finally {
}
}
public static void main(String[] args) {
LogicalGraph graph = LogicalGraphFactory.newLogicalGraph();
// Use weather alert data from NOAA as the source
ReadDelimitedText reader = graph.add(new ReadDelimitedText("http://www.ncdc.noaa.gov/swdiws/csv/warn/id=533623"));
reader.setHeader(true);
RunStringTemplate runner = graph.add(new RunStringTemplate());
WriteDelimitedText writer = graph.add(new WriteDelimitedText());
String templateGroup = ""
+ "HEADER(__metadata) ::=<<\n"
+ "<! Generate a comma separated list of input field names !>\n"
+ "<trunc(__metadata):{it|<it.name>,}>\n"
+ "<last(__metadata).name>\n"
+ ">>\n"
+ "\n"
+ "FOOTER(__metadata) ::=<<\n"
+ "<! Generate a line of text as the footer !>\n"
+ "\"This is a footer record\"\n"
+ ">>\n"
+ "\n"
+ "RECORD(__data, __types, __values) ::= <<\n"
+ "\n"
+ "<! Generate a record with the value of input field 'field0' !>\n"
+ "<__values.field0>\n"
+ ">>\n";
runner.setStg(templateGroup);
writer.setFieldEndDelimiter("]]");
writer.setFieldStartDelimiter("[[");
writer.setFieldDelimiter("|");
writer.setHeader(false);
writer.setTarget("stdout:");
writer.setMode(OVERWRITE);
graph.connect(reader.getOutput(), runner.getInput());
graph.connect(runner.getOutput(), writer.getInput());
graph.compile().run();
}
}
| |
/**
*============================================================================
* The Ohio State University Research Foundation, Emory University,
* the University of Minnesota Supercomputing Institute
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cagrid-grid-incubation/LICENSE.txt for details.
*============================================================================
**/
/**
*============================================================================
*============================================================================
**/
package org.cagrid.gaards.cds.service.policy;
import gov.nih.nci.cagrid.common.FaultHelper;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.cagrid.gaards.cds.common.AllowedParties;
import org.cagrid.gaards.cds.common.DelegationIdentifier;
import org.cagrid.gaards.cds.common.DelegationPolicy;
import org.cagrid.gaards.cds.common.IdentityDelegationPolicy;
import org.cagrid.gaards.cds.stubs.types.CDSInternalFault;
import org.cagrid.gaards.cds.stubs.types.InvalidPolicyFault;
import org.cagrid.tools.database.Database;
public class IdentityPolicyHandler implements PolicyHandler {
private final static String TABLE = "identity_policies";
private final static String DELEGATION_ID = "delegation_id";
private final static String GRID_IDENTITY = "grid_identity";
private boolean dbBuilt = false;
private Database db;
private Log log;
public IdentityPolicyHandler(Database db) {
this.log = LogFactory.getLog(this.getClass().getName());
this.db = db;
}
public void removePolicy(DelegationIdentifier id) throws CDSInternalFault {
buildDatabase();
Connection c = null;
try {
c = this.db.getConnection();
PreparedStatement s = c.prepareStatement("DELETE FROM " + TABLE
+ " WHERE " + DELEGATION_ID + "= ?");
s.setLong(1, id.getDelegationId());
s.execute();
s.close();
} catch (Exception e) {
log.error(e.getMessage(), e);
CDSInternalFault f = new CDSInternalFault();
f.setFaultString("Unexpected Database Error.");
FaultHelper helper = new FaultHelper(f);
helper.addFaultCause(e);
f = (CDSInternalFault) helper.getFault();
throw f;
} finally {
this.db.releaseConnection(c);
}
}
public DelegationPolicy getPolicy(DelegationIdentifier id)
throws CDSInternalFault, InvalidPolicyFault {
if (policyExists(id)) {
List<String> parties = new ArrayList<String>();
Connection c = null;
try {
c = this.db.getConnection();
PreparedStatement s = c.prepareStatement("select "
+ GRID_IDENTITY + " from " + TABLE + " WHERE "
+ DELEGATION_ID + "= ? ");
s.setLong(1, id.getDelegationId());
ResultSet rs = s.executeQuery();
while (rs.next()) {
parties.add(rs.getString(1));
}
rs.close();
s.close();
IdentityDelegationPolicy policy = new IdentityDelegationPolicy();
AllowedParties ap = new AllowedParties();
String[] identities = new String[parties.size()];
ap.setGridIdentity(parties.toArray(identities));
policy.setAllowedParties(ap);
return policy;
} catch (Exception e) {
log.error(e.getMessage(), e);
CDSInternalFault f = new CDSInternalFault();
f.setFaultString("Unexpected Database Error.");
FaultHelper helper = new FaultHelper(f);
helper.addFaultCause(e);
f = (CDSInternalFault) helper.getFault();
throw f;
} finally {
this.db.releaseConnection(c);
}
} else {
InvalidPolicyFault f = new InvalidPolicyFault();
f.setFaultString("The requested policy does not exist.");
throw f;
}
}
public boolean isAuthorized(DelegationIdentifier id, String gridIdentity)
throws CDSInternalFault {
boolean isAuthorized = false;
Connection c = null;
try {
c = this.db.getConnection();
PreparedStatement s = c.prepareStatement("select count(*) "
+ " from " + TABLE + " WHERE " + DELEGATION_ID + "= ? AND "
+ GRID_IDENTITY + "= ?");
s.setLong(1, id.getDelegationId());
s.setString(2, gridIdentity);
ResultSet rs = s.executeQuery();
if (rs.next()) {
int count = rs.getInt(1);
if (count > 0) {
isAuthorized = true;
}
}
rs.close();
s.close();
} catch (Exception e) {
log.error(e.getMessage(), e);
CDSInternalFault f = new CDSInternalFault();
f.setFaultString("Unexpected Database Error.");
FaultHelper helper = new FaultHelper(f);
helper.addFaultCause(e);
f = (CDSInternalFault) helper.getFault();
throw f;
} finally {
this.db.releaseConnection(c);
}
return isAuthorized;
}
public void storePolicy(DelegationIdentifier id, DelegationPolicy pol)
throws CDSInternalFault, InvalidPolicyFault {
this.buildDatabase();
if (!isSupported(pol.getClass().getName())) {
InvalidPolicyFault f = new InvalidPolicyFault();
f.setFaultString("The policy handler " + getClass().getName()
+ " does not support the policy "
+ pol.getClass().getName() + ".");
throw f;
}
if (this.policyExists(id)) {
InvalidPolicyFault f = new InvalidPolicyFault();
f.setFaultString("A policy already exists for the delegation "
+ id.getDelegationId());
throw f;
}
IdentityDelegationPolicy policy = (IdentityDelegationPolicy) pol;
Connection c = null;
boolean policyStored = false;
try {
c = this.db.getConnection();
AllowedParties ap = policy.getAllowedParties();
if (ap != null) {
String[] parties = ap.getGridIdentity();
if (parties != null) {
for (int i = 0; i < parties.length; i++) {
PreparedStatement s = c.prepareStatement("INSERT INTO "
+ TABLE + " SET " + DELEGATION_ID + "= ?, "
+ GRID_IDENTITY + "= ?");
s.setLong(1, id.getDelegationId());
s.setString(2, parties[i]);
s.execute();
s.close();
policyStored = true;
}
}
}
} catch (Exception e) {
try {
this.removePolicy(id);
} catch (Exception ex) {
}
log.error(e.getMessage(), e);
CDSInternalFault f = new CDSInternalFault();
f.setFaultString("Unexpected Database Error.");
FaultHelper helper = new FaultHelper(f);
helper.addFaultCause(e);
f = (CDSInternalFault) helper.getFault();
throw f;
} finally {
this.db.releaseConnection(c);
}
if (!policyStored) {
InvalidPolicyFault f = new InvalidPolicyFault();
f.setFaultString("No allowed parties provided.");
throw f;
}
}
public void removeAllStoredPolicies() throws CDSInternalFault {
buildDatabase();
try {
this.db.update("DELETE FROM " + TABLE);
dbBuilt = false;
} catch (Exception e) {
log.error(e.getMessage(), e);
CDSInternalFault f = new CDSInternalFault();
f.setFaultString("Unexpected Database Error.");
FaultHelper helper = new FaultHelper(f);
helper.addFaultCause(e);
f = (CDSInternalFault) helper.getFault();
throw f;
}
}
public boolean isSupported(String policyClassName) {
if (policyClassName.equals(IdentityDelegationPolicy.class.getName())) {
return true;
} else {
return false;
}
}
public boolean policyExists(DelegationIdentifier id)
throws CDSInternalFault {
buildDatabase();
try {
return db.exists(TABLE, DELEGATION_ID, id.getDelegationId());
} catch (Exception e) {
log.error(e.getMessage(), e);
CDSInternalFault f = new CDSInternalFault();
f.setFaultString("Unexpected Database Error.");
FaultHelper helper = new FaultHelper(f);
helper.addFaultCause(e);
f = (CDSInternalFault) helper.getFault();
throw f;
}
}
private void buildDatabase() throws CDSInternalFault {
if (!dbBuilt) {
try {
if (!this.db.tableExists(TABLE)) {
String table = "CREATE TABLE " + TABLE + " ("
+ DELEGATION_ID + " BIGINT NOT NULL,"
+ GRID_IDENTITY
+ " VARCHAR(255) NOT NULL, INDEX document_index ("
+ DELEGATION_ID + "));";
this.db.update(table);
}
dbBuilt = true;
} catch (Exception e) {
log.error(e.getMessage(), e);
CDSInternalFault f = new CDSInternalFault();
f.setFaultString("Unexpected Database Error.");
FaultHelper helper = new FaultHelper(f);
helper.addFaultCause(e);
f = (CDSInternalFault) helper.getFault();
throw f;
}
}
}
}
| |
/* Copyright (c) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gdata.data.contacts;
import com.google.gdata.data.AbstractExtension;
import com.google.gdata.data.AttributeGenerator;
import com.google.gdata.data.AttributeHelper;
import com.google.gdata.data.ExtensionDescription;
import com.google.gdata.util.ParseException;
/**
* Contact's jot.
*
*
*/
@ExtensionDescription.Default(
nsAlias = ContactsNamespace.GCONTACT_ALIAS,
nsUri = ContactsNamespace.GCONTACT,
localName = Jot.XML_NAME)
public class Jot extends AbstractExtension {
/** XML element name */
static final String XML_NAME = "jot";
/** XML "rel" attribute name */
private static final String REL = "rel";
private static final AttributeHelper.EnumToAttributeValue<Rel>
REL_ENUM_TO_ATTRIBUTE_VALUE = new
AttributeHelper.LowerCaseEnumToAttributeValue<Rel>();
/** Jot type */
private Rel rel = null;
/** Value */
private String value = null;
/** Jot type. */
public enum Rel {
/** Home jot. */
HOME,
/** Keywords jot. */
KEYWORDS,
/** Other jot. */
OTHER,
/** User jot. */
USER,
/** Work jot. */
WORK
}
/**
* Default mutable constructor.
*/
public Jot() {
super();
}
/**
* Immutable constructor.
*
* @param rel jot type.
* @param value value.
*/
public Jot(Rel rel, String value) {
super();
setRel(rel);
setValue(value);
setImmutable(true);
}
/**
* Returns the jot type.
*
* @return jot type
*/
public Rel getRel() {
return rel;
}
/**
* Sets the jot type.
*
* @param rel jot type or <code>null</code> to reset
*/
public void setRel(Rel rel) {
throwExceptionIfImmutable();
this.rel = rel;
}
/**
* Returns whether it has the jot type.
*
* @return whether it has the jot type
*/
public boolean hasRel() {
return getRel() != null;
}
/**
* Returns the value.
*
* @return value
*/
public String getValue() {
return value;
}
/**
* Sets the value.
*
* @param value value or <code>null</code> to reset
*/
public void setValue(String value) {
throwExceptionIfImmutable();
this.value = value;
}
/**
* Returns whether it has the value.
*
* @return whether it has the value
*/
public boolean hasValue() {
return getValue() != null;
}
@Override
protected void validate() {
if (rel == null) {
throwExceptionForMissingAttribute(REL);
}
}
/**
* Returns the extension description, specifying whether it is required, and
* whether it is repeatable.
*
* @param required whether it is required
* @param repeatable whether it is repeatable
* @return extension description
*/
public static ExtensionDescription getDefaultDescription(boolean required,
boolean repeatable) {
ExtensionDescription desc =
ExtensionDescription.getDefaultDescription(Jot.class);
desc.setRequired(required);
desc.setRepeatable(repeatable);
return desc;
}
@Override
protected void putAttributes(AttributeGenerator generator) {
generator.put(REL, rel, REL_ENUM_TO_ATTRIBUTE_VALUE);
generator.setContent(value);
}
@Override
protected void consumeAttributes(AttributeHelper helper) throws ParseException
{
rel = helper.consumeEnum(REL, true, Rel.class, null,
REL_ENUM_TO_ATTRIBUTE_VALUE);
value = helper.consume(null, false);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!sameClassAs(obj)) {
return false;
}
Jot other = (Jot) obj;
return eq(rel, other.rel)
&& eq(value, other.value);
}
@Override
public int hashCode() {
int result = getClass().hashCode();
if (rel != null) {
result = 37 * result + rel.hashCode();
}
if (value != null) {
result = 37 * result + value.hashCode();
}
return result;
}
@Override
public String toString() {
return "{Jot rel=" + rel + " value=" + value + "}";
}
}
| |
package ru.dionisius.control;
import ru.dionisius.model.Address;
import ru.dionisius.model.MusicType;
import ru.dionisius.model.Role;
import ru.dionisius.model.User;
import org.junit.Assert;
import org.junit.Test;
import java.util.LinkedList;
import java.util.List;
/**
* Created by Dionisius on 12.09.2017.
* Tests DbManager class methods:
*
* String createUser (final String userName, final String userSurname, final String userlogin,
* final String userPassword, final Address userAddress, final Role userRole,
* final List<MusicType> userMusicTypeList);
*
* List<User> getAllUsers ();
*
* User getUserById (final String id);
*
* User getUserByAddress (final Address address);
*
* List<User> getUserByRole (final Role role);
*
* User getUserIdByLogin(final String userLogin);
*
* void deleteUser(final String userId);
*
* void deleteAllUsers();
*
* void updateUser(final String oldUserId, final String userName, final String userSurname,
* final String userlogin, final String userPassword, final Address userAddress,
* final Role userRole, final List<MusicType> userMusicTypeList);
*/
public class DbManagerTest {
final String user1Name = "Ivan";
final String user2Name = "Alexey";
final String user1Surname = "Petrov";
final String user2Surname = "Sidorov";
final String user1login = "aaa";
final String user2login = "bbb";
final String user1Password = "777";
final String user2Password = "hello";
final Address user1Address = new Address("191000", "Russia",
"Moskow", "Pushkina", "15", "176");
final Address user2Address = new Address("185678", "Belorus",
"Minsk", "Lukashenko", "17/7", "88");
final Address user3Address = new Address("222222", "Germany",
"Berlin", "RottenBluemen str.", "122", "71");
final Role user1Role = Role.ADMIN;
final Role user2Role = Role.USER;
final List<MusicType> user1MusicTypeList = new LinkedList<>();
final List<MusicType> user2MusicTypeList = new LinkedList<>();
private final IDbManager manager = DbManager.getInstance();
private String createdUser1Id = null;
private String createdUser2Id = null;
/**
* creates first user.
*/
public void createUser1() {
this.user1MusicTypeList.clear();
this.user1MusicTypeList.add(MusicType.PUNK);
this.user1MusicTypeList.add(MusicType.ROCK);
createdUser1Id = this.manager.createUser(this.user1Name, this.user1Surname, this.user1login,
this.user1Password, this.user1Address, this.user1Role, this.user1MusicTypeList);
}
/**
* Creates second user.
*/
public void createUser2() {
this.user2MusicTypeList.clear();
this.user2MusicTypeList.add(MusicType.DISCO);
this.user2MusicTypeList.add(MusicType.JAZZ);
this.user2MusicTypeList.add(MusicType.ROCK);
this.createdUser2Id = this.manager.createUser(this.user2Name, this.user2Surname, this.user2login,
this.user2Password, this.user2Address, this.user2Role, this.user2MusicTypeList);
}
/**
* Creates second user with the first user's address.
*/
public void createUser2WithUser1Address() {
this.user2MusicTypeList.clear();
this.user2MusicTypeList.add(MusicType.DISCO);
this.user2MusicTypeList.add(MusicType.SOUL);
this.createdUser2Id = this.manager.createUser(this.user2Name, this.user2Surname, this.user2login,
this.user2Password, this.user1Address, this.user2Role, this.user2MusicTypeList);
}
/**
* Creates second user with with the first user's login and password.
*/
public void createUser2WithUser1LoginAndUser1Password() {
this.user2MusicTypeList.clear();
this.user2MusicTypeList.add(MusicType.DISCO);
this.user2MusicTypeList.add(MusicType.JAZZ);
this.user2MusicTypeList.add(MusicType.ROCK);
this.createdUser2Id = this.manager.createUser(this.user2Name, this.user2Surname, this.user1login,
this.user1Password, this.user2Address, this.user2Role, this.user2MusicTypeList);
}
/**
* Tests createUser() method.
* @throws Exception if exception occurs.
*/
@Test
public void whenCreateUserInEmptyDBThenOnlyThisUserIsInDB() throws Exception {
this.manager.deleteAllUsers();
this.createUser1();
User createdUser = this.manager.getUserById(this.createdUser1Id);
Assert.assertEquals(1, this.manager.getAllUsers().size());
Assert.assertEquals(createdUser.getName(), this.user1Name);
Assert.assertEquals(createdUser.getLogin(), this.user1login);
Assert.assertEquals(createdUser.getPassword(), this.user1Password);
Assert.assertEquals(createdUser.getAddress().getZip_code(), this.user1Address.getZip_code());
Assert.assertEquals(createdUser.getAddress().getCountry(), this.user1Address.getCountry());
Assert.assertEquals(createdUser.getAddress().getCity(), this.user1Address.getCity());
Assert.assertEquals(createdUser.getAddress().getStreet(), this.user1Address.getStreet());
Assert.assertEquals(createdUser.getAddress().getHouse(), this.user1Address.getHouse());
Assert.assertEquals(createdUser.getAddress().getFlat(), this.user1Address.getFlat());
Assert.assertEquals(createdUser.getRole(), this.user1Role);
Assert.assertEquals(createdUser.getMusicTypes(), this.user1MusicTypeList);
}
/**
* Tests createUser() method.
* @throws Exception if exception occurs.
*/
@Test
public void whenCreateTwoEqualUsersInEmptyDBThenOnlyOneUserIsInDB() throws Exception {
this.manager.deleteAllUsers();
this.createUser1();
this.createUser1();
User createdUser = this.manager.getUserById(this.createdUser1Id);
Assert.assertEquals(1, this.manager.getAllUsers().size());
Assert.assertEquals(createdUser.getName(), this.user1Name);
Assert.assertEquals(createdUser.getLogin(), this.user1login);
Assert.assertEquals(createdUser.getPassword(), this.user1Password);
Assert.assertEquals(createdUser.getAddress().getZip_code(), this.user1Address.getZip_code());
Assert.assertEquals(createdUser.getAddress().getCountry(), this.user1Address.getCountry());
Assert.assertEquals(createdUser.getAddress().getCity(), this.user1Address.getCity());
Assert.assertEquals(createdUser.getAddress().getStreet(), this.user1Address.getStreet());
Assert.assertEquals(createdUser.getAddress().getHouse(), this.user1Address.getHouse());
Assert.assertEquals(createdUser.getAddress().getFlat(), this.user1Address.getFlat());
Assert.assertEquals(createdUser.getRole(), this.user1Role);
Assert.assertEquals(createdUser.getMusicTypes(), this.user1MusicTypeList);
}
/**
* Tests createUser() method.
* @throws Exception if exception occurs.
*/
@Test
public void whenCreateTwoUsersWithTheSameAddressesInEmptyDBThenThisTwoUsersAreInDB() throws Exception {
this.manager.deleteAllUsers();
this.createUser1();
this.createUser2WithUser1Address();
User createdUser = this.manager.getAllUsers().get(0);
User createdUser2 = this.manager.getAllUsers().get(1);
Assert.assertEquals(2, this.manager.getAllUsers().size());
Assert.assertEquals(createdUser.getName(), this.user1Name);
Assert.assertEquals(createdUser.getLogin(), this.user1login);
Assert.assertEquals(createdUser.getPassword(), this.user1Password);
Assert.assertEquals(createdUser.getAddress().getZip_code(), this.user1Address.getZip_code());
Assert.assertEquals(createdUser.getAddress().getCountry(), this.user1Address.getCountry());
Assert.assertEquals(createdUser.getAddress().getCity(), this.user1Address.getCity());
Assert.assertEquals(createdUser.getAddress().getStreet(), this.user1Address.getStreet());
Assert.assertEquals(createdUser.getAddress().getHouse(), this.user1Address.getHouse());
Assert.assertEquals(createdUser.getAddress().getFlat(), this.user1Address.getFlat());
Assert.assertEquals(createdUser.getRole(), this.user1Role);
Assert.assertEquals(createdUser.getMusicTypes(), this.user1MusicTypeList);
Assert.assertEquals(createdUser2.getName(), this.user2Name);
Assert.assertEquals(createdUser2.getLogin(), this.user2login);
Assert.assertEquals(createdUser2.getPassword(), this.user2Password);
Assert.assertEquals(createdUser2.getAddress().getZip_code(), this.user1Address.getZip_code());
Assert.assertEquals(createdUser2.getAddress().getCountry(), this.user1Address.getCountry());
Assert.assertEquals(createdUser2.getAddress().getCity(), this.user1Address.getCity());
Assert.assertEquals(createdUser2.getAddress().getStreet(), this.user1Address.getStreet());
Assert.assertEquals(createdUser2.getAddress().getHouse(), this.user1Address.getHouse());
Assert.assertEquals(createdUser2.getAddress().getFlat(), this.user1Address.getFlat());
Assert.assertEquals(createdUser2.getRole(), this.user2Role);
Assert.assertEquals(createdUser2.getMusicTypes(), this.user2MusicTypeList);
}
/**
* Tests createUser() method.
* @throws Exception if exception occurs.
*/
@Test
public void whenCreateTwoUsersInEmptyDBWithTheSameLoginAndPasswordThenTheSecondUserIsNotInDB() throws Exception {
this.manager.deleteAllUsers();
this.createUser1();
this.createUser2WithUser1LoginAndUser1Password();
User createdUser = this.manager.getUserById(this.createdUser1Id);
Assert.assertEquals(1, this.manager.getAllUsers().size());
Assert.assertEquals(createdUser.getName(), this.user1Name);
Assert.assertEquals(createdUser.getLogin(), this.user1login);
Assert.assertEquals(createdUser.getPassword(), this.user1Password);
Assert.assertEquals(createdUser.getAddress().getZip_code(), this.user1Address.getZip_code());
Assert.assertEquals(createdUser.getAddress().getCountry(), this.user1Address.getCountry());
Assert.assertEquals(createdUser.getAddress().getCity(), this.user1Address.getCity());
Assert.assertEquals(createdUser.getAddress().getStreet(), this.user1Address.getStreet());
Assert.assertEquals(createdUser.getAddress().getHouse(), this.user1Address.getHouse());
Assert.assertEquals(createdUser.getAddress().getFlat(), this.user1Address.getFlat());
Assert.assertEquals(createdUser.getRole(), this.user1Role);
Assert.assertEquals(createdUser.getMusicTypes(), this.user1MusicTypeList);
}
/**
* Tests getAllUsers() method.
* @throws Exception if exception occurs.
*/
@Test
public void whenGetAllUsersThenExpectedUsersAreReturned() throws Exception {
this.manager.deleteAllUsers();
this.createUser1();
this.createUser2();
User createdUser = this.manager.getAllUsers().get(0);
User createdUser2 = this.manager.getAllUsers().get(1);
Assert.assertEquals(2, this.manager.getAllUsers().size());
Assert.assertEquals(createdUser.getName(), this.user1Name);
Assert.assertEquals(createdUser.getLogin(), this.user1login);
Assert.assertEquals(createdUser.getPassword(), this.user1Password);
Assert.assertEquals(createdUser.getAddress().getZip_code(), this.user1Address.getZip_code());
Assert.assertEquals(createdUser.getAddress().getCountry(), this.user1Address.getCountry());
Assert.assertEquals(createdUser.getAddress().getCity(), this.user1Address.getCity());
Assert.assertEquals(createdUser.getAddress().getStreet(), this.user1Address.getStreet());
Assert.assertEquals(createdUser.getAddress().getHouse(), this.user1Address.getHouse());
Assert.assertEquals(createdUser.getAddress().getFlat(), this.user1Address.getFlat());
Assert.assertEquals(createdUser.getRole(), this.user1Role);
Assert.assertEquals(createdUser.getMusicTypes(), this.user1MusicTypeList);
Assert.assertEquals(createdUser2.getName(), this.user2Name);
Assert.assertEquals(createdUser2.getLogin(), this.user2login);
Assert.assertEquals(createdUser2.getPassword(), this.user2Password);
Assert.assertEquals(createdUser2.getAddress().getZip_code(), this.user2Address.getZip_code());
Assert.assertEquals(createdUser2.getAddress().getCountry(), this.user2Address.getCountry());
Assert.assertEquals(createdUser2.getAddress().getCity(), this.user2Address.getCity());
Assert.assertEquals(createdUser2.getAddress().getStreet(), this.user2Address.getStreet());
Assert.assertEquals(createdUser2.getAddress().getHouse(), this.user2Address.getHouse());
Assert.assertEquals(createdUser2.getAddress().getFlat(), this.user2Address.getFlat());
Assert.assertEquals(createdUser2.getRole(), this.user2Role);
Assert.assertEquals(createdUser2.getMusicTypes(), this.user2MusicTypeList);
}
/**
* Tests getUserById() method.
* @throws Exception if exception occurs.
*/
@Test
public void whenGetUserByIdThenExpectedUserIsReturned() throws Exception {
this.whenGetAllUsersThenExpectedUsersAreReturned();
}
/**
* Tests getUserByRole() method.
* @throws Exception if exception occurs.
*/
@Test
public void whenGetUserByRoleThenExpectedUserIsReturned() throws Exception {
this.manager.deleteAllUsers();
this.createUser1();
List<User> createdUsers = this.manager.getUserByRole(this.user1Role);
Assert.assertEquals(1, createdUsers.size());
User createdUser = this.manager.getAllUsers().get(0);
Assert.assertEquals(createdUser.getName(), this.user1Name);
Assert.assertEquals(createdUser.getLogin(), this.user1login);
Assert.assertEquals(createdUser.getPassword(), this.user1Password);
Assert.assertEquals(createdUser.getAddress().getZip_code(), this.user1Address.getZip_code());
Assert.assertEquals(createdUser.getAddress().getCountry(), this.user1Address.getCountry());
Assert.assertEquals(createdUser.getAddress().getCity(), this.user1Address.getCity());
Assert.assertEquals(createdUser.getAddress().getStreet(), this.user1Address.getStreet());
Assert.assertEquals(createdUser.getAddress().getHouse(), this.user1Address.getHouse());
Assert.assertEquals(createdUser.getAddress().getFlat(), this.user1Address.getFlat());
Assert.assertEquals(createdUser.getRole(), this.user1Role);
Assert.assertEquals(createdUser.getMusicTypes(), this.user1MusicTypeList);
}
/**
* Tests getUserByLogin() method.
* @throws Exception if exception occurs.
*/
@Test
public void whenGetUserByLoginThenExpectedUser() throws Exception {
this.manager.deleteAllUsers();
this.createUser1();
this.createUser2();
User expectedUser2 = this.manager.getUserIdByLoginAndPassword(this.user2login, this.user2Password);
Assert.assertEquals(expectedUser2.getName(), this.user2Name);
Assert.assertEquals(expectedUser2.getLogin(), this.user2login);
Assert.assertEquals(expectedUser2.getPassword(), this.user2Password);
Assert.assertEquals(expectedUser2.getAddress().getZip_code(), this.user2Address.getZip_code());
Assert.assertEquals(expectedUser2.getAddress().getCountry(), this.user2Address.getCountry());
Assert.assertEquals(expectedUser2.getAddress().getCity(), this.user2Address.getCity());
Assert.assertEquals(expectedUser2.getAddress().getStreet(), this.user2Address.getStreet());
Assert.assertEquals(expectedUser2.getAddress().getHouse(), this.user2Address.getHouse());
Assert.assertEquals(expectedUser2.getAddress().getFlat(), this.user2Address.getFlat());
Assert.assertEquals(expectedUser2.getRole(), this.user2Role);
Assert.assertEquals(expectedUser2.getMusicTypes(), this.user2MusicTypeList);
}
/**
* Tests deleteUser() method.
* @throws Exception if exception occurs.
*/
@Test
public void whenDeleteUserThenThisUserIsNotInDB() throws Exception {
this.manager.deleteAllUsers();
this.createUser1();
this.createUser2();
Assert.assertEquals(2, this.manager.getAllUsers().size());
this.manager.deleteUser(this.createdUser1Id);
Assert.assertEquals(1, this.manager.getAllUsers().size());
User expectedUser = this.manager.getUserById(this.createdUser1Id);
Assert.assertEquals(null, expectedUser);
}
/**
* Tests deleteAllUsers() method.
* @throws Exception if exception occurs.
*/
@Test
public void whenDBIsNotEmptyAndDeleteAllUsersThenUserTableIsEmpty() throws Exception {
this.manager.deleteAllUsers();
this.createUser1();
this.createUser2();
Assert.assertEquals(2, this.manager.getAllUsers().size());
this.manager.deleteAllUsers();
Assert.assertEquals(0, this.manager.getAllUsers().size());
}
/**
* Tests updateUser() method.
* @throws Exception if exception occurs.
*/
@Test
public void whenUpdateUserThenOldUserIsNotInDBAndNewUserIsInDB() throws Exception {
this.manager.deleteAllUsers();
this.createUser1();
Assert.assertEquals(1, this.manager.getAllUsers().size());
this.manager.updateUser(this.createdUser1Id, this.user2Name, this.user2Surname, this.user2login, this.user2Password,
this.user2Address, this.user2Role, this.user2MusicTypeList);
User user = this.manager.getAllUsers().get(0);
Assert.assertEquals(1, this.manager.getAllUsers().size());
Assert.assertEquals(user.getName(), this.user2Name);
Assert.assertEquals(user.getLogin(), this.user2login);
Assert.assertEquals(user.getPassword(), this.user2Password);
Assert.assertEquals(user.getAddress().getZip_code(), this.user2Address.getZip_code());
Assert.assertEquals(user.getAddress().getCountry(), this.user2Address.getCountry());
Assert.assertEquals(user.getAddress().getCity(), this.user2Address.getCity());
Assert.assertEquals(user.getAddress().getStreet(), this.user2Address.getStreet());
Assert.assertEquals(user.getAddress().getHouse(), this.user2Address.getHouse());
Assert.assertEquals(user.getAddress().getFlat(), this.user2Address.getFlat());
Assert.assertEquals(user.getRole(), this.user2Role);
Assert.assertEquals(user.getMusicTypes(), this.user2MusicTypeList);
}
}
| |
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.index;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.collection.OCompositeKey;
import com.orientechnologies.orient.core.db.record.OMultiValueChangeEvent;
import com.orientechnologies.orient.core.db.record.ORecordElement;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
/**
* Index that consist of several indexDefinitions like {@link OPropertyIndexDefinition}.
*/
public class OCompositeIndexDefinition extends OAbstractIndexDefinition {
private final List<OIndexDefinition> indexDefinitions;
private String className;
private int multiValueDefinitionIndex = -1;
public OCompositeIndexDefinition() {
indexDefinitions = new ArrayList<OIndexDefinition>(5);
}
/**
* Constructor for new index creation.
*
* @param iClassName
* - name of class which is owner of this index
*/
public OCompositeIndexDefinition(final String iClassName) {
indexDefinitions = new ArrayList<OIndexDefinition>(5);
className = iClassName;
}
/**
* Constructor for new index creation.
*
* @param iClassName
* - name of class which is owner of this index
* @param iIndexes
* List of indexDefinitions to add in given index.
*/
public OCompositeIndexDefinition(final String iClassName, final List<? extends OIndexDefinition> iIndexes) {
indexDefinitions = new ArrayList<OIndexDefinition>(5);
for (OIndexDefinition indexDefinition : iIndexes) {
indexDefinitions.add(indexDefinition);
if (indexDefinition instanceof OIndexDefinitionMultiValue)
if (multiValueDefinitionIndex == -1)
multiValueDefinitionIndex = indexDefinitions.size() - 1;
else
throw new OIndexException("Composite key can not contain more than one collection item");
}
className = iClassName;
}
/**
* {@inheritDoc}
*/
public String getClassName() {
return className;
}
/**
* Add new indexDefinition in current composite.
*
* @param indexDefinition
* Index to add.
*/
public void addIndex(final OIndexDefinition indexDefinition) {
indexDefinitions.add(indexDefinition);
if (indexDefinition instanceof OIndexDefinitionMultiValue) {
if (multiValueDefinitionIndex == -1)
multiValueDefinitionIndex = indexDefinitions.size() - 1;
else
throw new OIndexException("Composite key can not contain more than one collection item");
}
}
/**
* {@inheritDoc}
*/
public List<String> getFields() {
final List<String> fields = new LinkedList<String>();
for (final OIndexDefinition indexDefinition : indexDefinitions) {
fields.addAll(indexDefinition.getFields());
}
return Collections.unmodifiableList(fields);
}
/**
* {@inheritDoc}
*/
public List<String> getFieldsToIndex() {
final List<String> fields = new LinkedList<String>();
for (final OIndexDefinition indexDefinition : indexDefinitions) {
fields.addAll(indexDefinition.getFieldsToIndex());
}
return Collections.unmodifiableList(fields);
}
/**
* {@inheritDoc}
*/
public Object getDocumentValueToIndex(final ODocument iDocument) {
final List<OCompositeKey> compositeKeys = new ArrayList<OCompositeKey>(10);
final OCompositeKey firstKey = new OCompositeKey();
boolean containsCollection = false;
compositeKeys.add(firstKey);
for (final OIndexDefinition indexDefinition : indexDefinitions) {
final Object result = indexDefinition.getDocumentValueToIndex(iDocument);
if (result == null)
return null;
containsCollection = addKey(firstKey, compositeKeys, containsCollection, result);
}
if (!containsCollection)
return firstKey;
return compositeKeys;
}
public int getMultiValueDefinitionIndex() {
return multiValueDefinitionIndex;
}
public String getMultiValueField() {
if (multiValueDefinitionIndex >= 0)
return indexDefinitions.get(multiValueDefinitionIndex).getFields().get(0);
return null;
}
/**
* {@inheritDoc}
*/
public Object createValue(final List<?> params) {
int currentParamIndex = 0;
final OCompositeKey firstKey = new OCompositeKey();
final List<OCompositeKey> compositeKeys = new ArrayList<OCompositeKey>(10);
compositeKeys.add(firstKey);
boolean containsCollection = false;
for (final OIndexDefinition indexDefinition : indexDefinitions) {
if (currentParamIndex + 1 > params.size())
break;
final int endIndex;
if (currentParamIndex + indexDefinition.getParamCount() > params.size())
endIndex = params.size();
else
endIndex = currentParamIndex + indexDefinition.getParamCount();
final List<?> indexParams = params.subList(currentParamIndex, endIndex);
currentParamIndex += indexDefinition.getParamCount();
final Object keyValue = indexDefinition.createValue(indexParams);
if (keyValue == null)
return null;
containsCollection = addKey(firstKey, compositeKeys, containsCollection, keyValue);
}
if (!containsCollection)
return firstKey;
return compositeKeys;
}
public OIndexDefinitionMultiValue getMultiValueDefinition() {
if (multiValueDefinitionIndex > -1)
return (OIndexDefinitionMultiValue) indexDefinitions.get(multiValueDefinitionIndex);
return null;
}
public OCompositeKey createSingleValue(final List<?> params) {
final OCompositeKey compositeKey = new OCompositeKey();
int currentParamIndex = 0;
for (final OIndexDefinition indexDefinition : indexDefinitions) {
if (currentParamIndex + 1 > params.size())
break;
final int endIndex;
if (currentParamIndex + indexDefinition.getParamCount() > params.size())
endIndex = params.size();
else
endIndex = currentParamIndex + indexDefinition.getParamCount();
final List<?> indexParams = params.subList(currentParamIndex, endIndex);
currentParamIndex += indexDefinition.getParamCount();
final Object keyValue;
if (indexDefinition instanceof OIndexDefinitionMultiValue)
keyValue = ((OIndexDefinitionMultiValue) indexDefinition).createSingleValue(indexParams.toArray());
else
keyValue = indexDefinition.createValue(indexParams);
if (keyValue == null)
return null;
compositeKey.addKey(keyValue);
}
return compositeKey;
}
private static boolean addKey(OCompositeKey firstKey, List<OCompositeKey> compositeKeys, boolean containsCollection,
Object keyValue) {
if (keyValue instanceof Collection) {
final Collection<?> collectionKey = (Collection<?>) keyValue;
if (!containsCollection)
for (int i = 1; i < collectionKey.size(); i++) {
final OCompositeKey compositeKey = new OCompositeKey(firstKey.getKeys());
compositeKeys.add(compositeKey);
}
else
throw new OIndexException("Composite key can not contain more than one collection item");
int compositeIndex = 0;
for (final Object keyItem : collectionKey) {
final OCompositeKey compositeKey = compositeKeys.get(compositeIndex);
compositeKey.addKey(keyItem);
compositeIndex++;
}
containsCollection = true;
} else if (containsCollection)
for (final OCompositeKey compositeKey : compositeKeys)
compositeKey.addKey(keyValue);
else
firstKey.addKey(keyValue);
return containsCollection;
}
/**
* {@inheritDoc}
*/
public Object createValue(final Object... params) {
return createValue(Arrays.asList(params));
}
public void processChangeEvent(OMultiValueChangeEvent<?, ?> changeEvent, Map<OCompositeKey, Integer> keysToAdd,
Map<OCompositeKey, Integer> keysToRemove, Object... params) {
final OIndexDefinitionMultiValue indexDefinitionMultiValue = (OIndexDefinitionMultiValue) indexDefinitions
.get(multiValueDefinitionIndex);
final CompositeWrapperMap compositeWrapperKeysToAdd = new CompositeWrapperMap(keysToAdd, indexDefinitions, params,
multiValueDefinitionIndex);
final CompositeWrapperMap compositeWrapperKeysToRemove = new CompositeWrapperMap(keysToRemove, indexDefinitions, params,
multiValueDefinitionIndex);
indexDefinitionMultiValue.processChangeEvent(changeEvent, compositeWrapperKeysToAdd, compositeWrapperKeysToRemove);
}
/**
* {@inheritDoc}
*/
public int getParamCount() {
int total = 0;
for (final OIndexDefinition indexDefinition : indexDefinitions)
total += indexDefinition.getParamCount();
return total;
}
/**
* {@inheritDoc}
*/
public OType[] getTypes() {
final List<OType> types = new LinkedList<OType>();
for (final OIndexDefinition indexDefinition : indexDefinitions)
Collections.addAll(types, indexDefinition.getTypes());
return types.toArray(new OType[types.size()]);
}
@Override
public boolean equals(final Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
final OCompositeIndexDefinition that = (OCompositeIndexDefinition) o;
if (!className.equals(that.className))
return false;
if (!indexDefinitions.equals(that.indexDefinitions))
return false;
return true;
}
@Override
public int hashCode() {
int result = indexDefinitions.hashCode();
result = 31 * result + className.hashCode();
return result;
}
@Override
public String toString() {
return "OCompositeIndexDefinition{" + "indexDefinitions=" + indexDefinitions + ", className='" + className + '\'' + '}';
}
/**
* {@inheritDoc}
*/
@Override
public ODocument toStream() {
document.setInternalStatus(ORecordElement.STATUS.UNMARSHALLING);
final List<ODocument> inds = new ArrayList<ODocument>(indexDefinitions.size());
final List<String> indClasses = new ArrayList<String>(indexDefinitions.size());
try {
document.field("className", className);
for (final OIndexDefinition indexDefinition : indexDefinitions) {
final ODocument indexDocument = indexDefinition.toStream();
inds.add(indexDocument);
indClasses.add(indexDefinition.getClass().getName());
}
document.field("indexDefinitions", inds, OType.EMBEDDEDLIST);
document.field("indClasses", indClasses, OType.EMBEDDEDLIST);
} finally {
document.setInternalStatus(ORecordElement.STATUS.LOADED);
}
document.field("collate", collate.getName());
return document;
}
/**
* {@inheritDoc}
*/
public String toCreateIndexDDL(final String indexName, final String indexType) {
final StringBuilder ddl = new StringBuilder("create index ");
ddl.append(indexName).append(" on ").append(className).append(" ( ");
final Iterator<String> fieldIterator = getFieldsToIndex().iterator();
if (fieldIterator.hasNext()) {
ddl.append(fieldIterator.next());
while (fieldIterator.hasNext()) {
ddl.append(", ").append(fieldIterator.next());
}
}
ddl.append(" ) ").append(indexType).append(' ');
if (multiValueDefinitionIndex == -1) {
boolean first = true;
for (OType oType : getTypes()) {
if (first)
first = false;
else
ddl.append(", ");
ddl.append(oType.name());
}
}
return ddl.toString();
}
/**
* {@inheritDoc}
*/
@Override
protected void fromStream() {
try {
className = document.field("className");
final List<ODocument> inds = document.field("indexDefinitions");
final List<String> indClasses = document.field("indClasses");
indexDefinitions.clear();
for (int i = 0; i < indClasses.size(); i++) {
final Class<?> clazz = Class.forName(indClasses.get(i));
final ODocument indDoc = inds.get(i);
final OIndexDefinition indexDefinition = (OIndexDefinition) clazz.getDeclaredConstructor().newInstance();
indexDefinition.fromStream(indDoc);
indexDefinitions.add(indexDefinition);
if (indexDefinition instanceof OIndexDefinitionMultiValue)
multiValueDefinitionIndex = indexDefinitions.size() - 1;
}
setCollate((String) document.field("collate"));
} catch (final ClassNotFoundException e) {
throw new OIndexException("Error during composite index deserialization", e);
} catch (final NoSuchMethodException e) {
throw new OIndexException("Error during composite index deserialization", e);
} catch (final InvocationTargetException e) {
throw new OIndexException("Error during composite index deserialization", e);
} catch (final InstantiationException e) {
throw new OIndexException("Error during composite index deserialization", e);
} catch (final IllegalAccessException e) {
throw new OIndexException("Error during composite index deserialization", e);
}
}
private static final class CompositeWrapperMap implements Map<Object, Integer> {
private final Map<OCompositeKey, Integer> underlying;
private final Object[] params;
private final List<OIndexDefinition> indexDefinitions;
private final int multiValueIndex;
private CompositeWrapperMap(Map<OCompositeKey, Integer> underlying, List<OIndexDefinition> indexDefinitions, Object[] params,
int multiValueIndex) {
this.underlying = underlying;
this.params = params;
this.multiValueIndex = multiValueIndex;
this.indexDefinitions = indexDefinitions;
}
public int size() {
return underlying.size();
}
public boolean isEmpty() {
return underlying.isEmpty();
}
public boolean containsKey(Object key) {
final OCompositeKey compositeKey = convertToCompositeKey(key);
return underlying.containsKey(compositeKey);
}
public boolean containsValue(Object value) {
return underlying.containsValue(value);
}
public Integer get(Object key) {
return underlying.get(convertToCompositeKey(key));
}
public Integer put(Object key, Integer value) {
final OCompositeKey compositeKey = convertToCompositeKey(key);
return underlying.put(compositeKey, value);
}
public Integer remove(Object key) {
return underlying.remove(convertToCompositeKey(key));
}
public void putAll(Map<? extends Object, ? extends Integer> m) {
throw new UnsupportedOperationException("Unsupported because of performance reasons");
}
public void clear() {
underlying.clear();
}
public Set<Object> keySet() {
throw new UnsupportedOperationException("Unsupported because of performance reasons");
}
public Collection<Integer> values() {
return underlying.values();
}
public Set<Entry<Object, Integer>> entrySet() {
throw new UnsupportedOperationException();
}
private OCompositeKey convertToCompositeKey(Object key) {
final OCompositeKey compositeKey = new OCompositeKey();
int paramsIndex = 0;
for (int i = 0; i < indexDefinitions.size(); i++) {
final OIndexDefinition indexDefinition = indexDefinitions.get(i);
if (i != multiValueIndex) {
compositeKey.addKey(indexDefinition.createValue(params[paramsIndex]));
paramsIndex++;
} else
compositeKey.addKey(((OIndexDefinitionMultiValue) indexDefinition).createSingleValue(key));
}
return compositeKey;
}
}
@Override
public boolean isAutomatic() {
return indexDefinitions.get(0).isAutomatic();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.client.impl;
import static java.util.UUID.randomUUID;
import static org.apache.pulsar.broker.service.BrokerService.BROKER_SERVICE_CONFIGURATION_PATH;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.lang.reflect.Field;
import java.net.URI;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.pulsar.broker.PulsarService;
import org.apache.pulsar.broker.namespace.OwnershipCache;
import org.apache.pulsar.broker.service.Topic;
import org.apache.pulsar.client.admin.PulsarAdminException;
import org.apache.pulsar.client.api.Consumer;
import org.apache.pulsar.client.api.ConsumerBuilder;
import org.apache.pulsar.client.api.Message;
import org.apache.pulsar.client.api.MessageId;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.ProducerConsumerBase;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.PulsarClientException;
import org.apache.pulsar.client.api.SubscriptionType;
import org.apache.pulsar.client.impl.HandlerState.State;
import org.apache.pulsar.common.api.PulsarHandler;
import org.apache.pulsar.common.naming.NamespaceBundle;
import org.apache.pulsar.common.naming.TopicName;
import org.apache.pulsar.common.policies.data.ClusterData;
import org.apache.pulsar.common.policies.data.RetentionPolicies;
import org.apache.pulsar.common.util.FutureUtil;
import org.apache.pulsar.common.util.ObjectMapperFactory;
import org.apache.pulsar.common.util.collections.ConcurrentLongHashMap;
import org.apache.pulsar.zookeeper.ZooKeeperDataCache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
public class BrokerClientIntegrationTest extends ProducerConsumerBase {
private static final Logger log = LoggerFactory.getLogger(BrokerClientIntegrationTest.class);
@BeforeMethod
@Override
protected void setup() throws Exception {
super.internalSetup();
producerBaseSetup();
}
@AfterMethod
@Override
protected void cleanup() throws Exception {
super.internalCleanup();
}
@DataProvider
public Object[][] subType() {
return new Object[][] { { SubscriptionType.Shared }, { SubscriptionType.Failover } };
}
/**
* Verifies unload namespace-bundle doesn't close shared connection used by other namespace-bundle.
*
* <pre>
* 1. after disabling broker fron loadbalancer
* 2. unload namespace-bundle "my-ns1" which disconnects client (producer/consumer) connected on that namespacebundle
* 3. but doesn't close the connection for namesapce-bundle "my-ns2" and clients are still connected
* 4. verifies unloaded "my-ns1" should not connected again with the broker as broker is disabled
* 5. unload "my-ns2" which closes the connection as broker doesn't have any more client connected on that connection
* 6. all namespace-bundles are in "connecting" state and waiting for available broker
* </pre>
*
* @throws Exception
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
@Test
public void testDisconnectClientWithoutClosingConnection() throws Exception {
final String ns1 = "my-property/con-ns1";
final String ns2 = "my-property/con-ns2";
admin.namespaces().createNamespace(ns1, Sets.newHashSet("test"));
admin.namespaces().createNamespace(ns2, Sets.newHashSet("test"));
final String topic1 = "persistent://" + ns1 + "/my-topic";
final String topic2 = "persistent://" + ns2 + "/my-topic";
ConsumerImpl<byte[]> cons1 = (ConsumerImpl<byte[]>) pulsarClient.newConsumer().topic(topic1)
.subscriptionName("my-subscriber-name").subscribe();
ProducerImpl<byte[]> prod1 = (ProducerImpl<byte[]>) pulsarClient.newProducer().topic(topic1).create();
ProducerImpl<byte[]> prod2 = (ProducerImpl<byte[]>) pulsarClient.newProducer().topic(topic2).create();
ConsumerImpl<byte[]> consumer1 = spy(cons1);
doAnswer(invocationOnMock -> cons1.getState()).when(consumer1).getState();
doAnswer(invocationOnMock -> cons1.getClientCnx()).when(consumer1).getClientCnx();
doAnswer(invocationOnMock -> cons1.cnx()).when(consumer1).cnx();
doAnswer(invocationOnMock -> {
cons1.connectionClosed((ClientCnx) invocationOnMock.getArguments()[0]);
return null;
}).when(consumer1).connectionClosed(anyObject());
ProducerImpl<byte[]> producer1 = spy(prod1);
doAnswer(invocationOnMock -> prod1.getState()).when(producer1).getState();
doAnswer(invocationOnMock -> prod1.getClientCnx()).when(producer1).getClientCnx();
doAnswer(invocationOnMock -> prod1.cnx()).when(producer1).cnx();
doAnswer(invocationOnMock -> {
prod1.connectionClosed((ClientCnx) invocationOnMock.getArguments()[0]);
return null;
}).when(producer1).connectionClosed(anyObject());
ProducerImpl<byte[]> producer2 = spy(prod2);
doAnswer(invocationOnMock -> prod2.getState()).when(producer2).getState();
doAnswer(invocationOnMock -> prod2.getClientCnx()).when(producer2).getClientCnx();
doAnswer(invocationOnMock -> prod2.cnx()).when(producer2).cnx();
doAnswer(invocationOnMock -> {
prod2.connectionClosed((ClientCnx) invocationOnMock.getArguments()[0]);
return null;
}).when(producer2).connectionClosed(anyObject());
ClientCnx clientCnx = producer1.getClientCnx();
Field pfield = ClientCnx.class.getDeclaredField("producers");
pfield.setAccessible(true);
Field cfield = ClientCnx.class.getDeclaredField("consumers");
cfield.setAccessible(true);
ConcurrentLongHashMap<ProducerImpl<byte[]>> producers = (ConcurrentLongHashMap) pfield.get(clientCnx);
ConcurrentLongHashMap<ConsumerImpl<byte[]>> consumers = (ConcurrentLongHashMap) cfield.get(clientCnx);
producers.put(2, producers.get(0));
producers.put(3, producers.get(1));
consumers.put(1, consumers.get(0));
producers.put(0, producer1);
producers.put(1, producer2);
consumers.put(0, consumer1);
// disable this broker to avoid any new requests
pulsar.getLoadManager().get().disableBroker();
NamespaceBundle bundle1 = pulsar.getNamespaceService().getBundle(TopicName.get(topic1));
NamespaceBundle bundle2 = pulsar.getNamespaceService().getBundle(TopicName.get(topic2));
// unload ns-bundle:1
pulsar.getNamespaceService().unloadNamespaceBundle((NamespaceBundle) bundle1);
// let server send signal to close-connection and client close the connection
Thread.sleep(1000);
// [1] Verify: producer1 must get connectionClosed signal
verify(producer1, atLeastOnce()).connectionClosed(anyObject());
// [2] Verify: consumer1 must get connectionClosed signal
verify(consumer1, atLeastOnce()).connectionClosed(anyObject());
// [3] Verify: producer2 should have not received connectionClosed signal
verify(producer2, never()).connectionClosed(anyObject());
// sleep for sometime to let other disconnected producer and consumer connect again: but they should not get
// connected with same broker as that broker is already out from active-broker list
Thread.sleep(200);
// producer1 must not be able to connect again
assertTrue(prod1.getClientCnx() == null);
assertTrue(prod1.getState().equals(State.Connecting));
// consumer1 must not be able to connect again
assertTrue(cons1.getClientCnx() == null);
assertTrue(cons1.getState().equals(State.Connecting));
// producer2 must have live connection
assertTrue(prod2.getClientCnx() != null);
assertTrue(prod2.getState().equals(State.Ready));
// unload ns-bundle2 as well
pulsar.getNamespaceService().unloadNamespaceBundle((NamespaceBundle) bundle2);
// let producer2 give some time to get disconnect signal and get disconencted
Thread.sleep(200);
verify(producer2, atLeastOnce()).connectionClosed(anyObject());
// producer1 must not be able to connect again
assertTrue(prod1.getClientCnx() == null);
assertTrue(prod1.getState().equals(State.Connecting));
// consumer1 must not be able to connect again
assertTrue(cons1.getClientCnx() == null);
assertTrue(cons1.getState().equals(State.Connecting));
// producer2 must not be able to connect again
assertTrue(prod2.getClientCnx() == null);
assertTrue(prod2.getState().equals(State.Connecting));
producer1.close();
producer2.close();
consumer1.close();
prod1.close();
prod2.close();
cons1.close();
}
/**
* Verifies: 1. Closing of Broker service unloads all bundle gracefully and there must not be any connected bundles
* after closing broker service
*
* @throws Exception
*/
@Test
public void testCloseBrokerService() throws Exception {
final String ns1 = "my-property/brok-ns1";
final String ns2 = "my-property/brok-ns2";
admin.namespaces().createNamespace(ns1, Sets.newHashSet("test"));
admin.namespaces().createNamespace(ns2, Sets.newHashSet("test"));
final String topic1 = "persistent://" + ns1 + "/my-topic";
final String topic2 = "persistent://" + ns2 + "/my-topic";
ConsumerImpl<byte[]> consumer1 = (ConsumerImpl<byte[]>) pulsarClient.newConsumer().topic(topic1)
.subscriptionName("my-subscriber-name").subscribe();
ProducerImpl<byte[]> producer1 = (ProducerImpl<byte[]>) pulsarClient.newProducer().topic(topic1).create();
ProducerImpl<byte[]> producer2 = (ProducerImpl<byte[]>) pulsarClient.newProducer().topic(topic2).create();
// unload all other namespace
pulsar.getBrokerService().close();
// [1] OwnershipCache should not contain any more namespaces
OwnershipCache ownershipCache = pulsar.getNamespaceService().getOwnershipCache();
assertTrue(ownershipCache.getOwnedBundles().keySet().isEmpty());
// Strategical retry
retryStrategically((test) -> (producer1.getClientCnx() == null && consumer1.getClientCnx() == null
&& producer2.getClientCnx() == null), 5, 100);
// [2] All clients must be disconnected and in connecting state
// producer1 must not be able to connect again
assertTrue(producer1.getClientCnx() == null);
assertTrue(producer1.getState().equals(State.Connecting));
// consumer1 must not be able to connect again
assertTrue(consumer1.getClientCnx() == null);
assertTrue(consumer1.getState().equals(State.Connecting));
// producer2 must not be able to connect again
assertTrue(producer2.getClientCnx() == null);
assertTrue(producer2.getState().equals(State.Connecting));
producer1.close();
producer2.close();
consumer1.close();
}
/**
* It verifies that consumer which doesn't support batch-message:
* <p>
* 1. broker disconnects that consumer
* <p>
* 2. redeliver all those messages to other supported consumer under the same subscription
*
* @param subType
* @throws Exception
*/
@Test(timeOut = 7000, dataProvider = "subType")
public void testUnsupportedBatchMessageConsumer(SubscriptionType subType) throws Exception {
log.info("-- Starting {} test --", methodName);
final String topicName = "persistent://my-property/my-ns/my-topic1";
final String subscriptionName = "my-subscriber-name" + subType;
ConsumerImpl<byte[]> consumer1 = (ConsumerImpl<byte[]>) pulsarClient.newConsumer().topic(topicName)
.subscriptionName(subscriptionName).subscriptionType(subType).subscribe();
final int numMessagesPerBatch = 10;
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
Producer<byte[]> batchProducer = pulsarClient.newProducer().topic(topicName).enableBatching(true)
.batchingMaxPublishDelay(Long.MAX_VALUE, TimeUnit.SECONDS)
.batchingMaxMessages(numMessagesPerBatch).create();
// update consumer's version to incompatible batch-message version = Version.V3
Topic topic = pulsar.getBrokerService().getOrCreateTopic(topicName).get();
org.apache.pulsar.broker.service.Consumer brokerConsumer = topic.getSubscriptions().get(subscriptionName)
.getConsumers().get(0);
Field cnxField = org.apache.pulsar.broker.service.Consumer.class.getDeclaredField("cnx");
cnxField.setAccessible(true);
PulsarHandler cnx = (PulsarHandler) cnxField.get(brokerConsumer);
Field versionField = PulsarHandler.class.getDeclaredField("remoteEndpointProtocolVersion");
versionField.setAccessible(true);
versionField.set(cnx, 3);
// (1) send non-batch message: consumer should be able to consume
for (int i = 0; i < numMessagesPerBatch; i++) {
String message = "my-message-" + i;
producer.send(message.getBytes());
}
Set<String> messageSet = Sets.newHashSet();
Message<byte[]> msg = null;
for (int i = 0; i < numMessagesPerBatch; i++) {
msg = consumer1.receive(1, TimeUnit.SECONDS);
String receivedMessage = new String(msg.getData());
String expectedMessage = "my-message-" + i;
testMessageOrderAndDuplicates(messageSet, receivedMessage, expectedMessage);
consumer1.acknowledge(msg);
}
// Also set clientCnx of the consumer to null so, it avoid reconnection so, other consumer can consume for
// verification
consumer1.setClientCnx(null);
// (2) send batch-message which should not be able to consume: as broker will disconnect the consumer
for (int i = 0; i < numMessagesPerBatch; i++) {
String message = "my-message-" + i;
batchProducer.sendAsync(message.getBytes());
}
batchProducer.flush();
// consumer should have not received any message as it should have been disconnected
msg = consumer1.receive(2, TimeUnit.SECONDS);
assertNull(msg);
// subscrie consumer2 with supporting batch version
Consumer<byte[]> consumer2 = pulsarClient.newConsumer().topic(topicName).subscriptionName(subscriptionName)
.subscribe();
messageSet.clear();
for (int i = 0; i < numMessagesPerBatch; i++) {
msg = consumer2.receive(1, TimeUnit.SECONDS);
String receivedMessage = new String(msg.getData());
log.debug("Received message: [{}]", receivedMessage);
String expectedMessage = "my-message-" + i;
testMessageOrderAndDuplicates(messageSet, receivedMessage, expectedMessage);
consumer2.acknowledge(msg);
}
consumer2.close();
producer.close();
batchProducer.close();
log.info("-- Exiting {} test --", methodName);
}
@Test(timeOut = 10000, dataProvider = "subType")
public void testResetCursor(SubscriptionType subType) throws Exception {
final RetentionPolicies policy = new RetentionPolicies(60, 52 * 1024);
final TopicName topicName = TopicName.get("persistent://my-property/my-ns/unacked-topic");
final int warmup = 20;
final int testSize = 150;
final List<Message<byte[]>> received = new ArrayList<>();
final String subsId = "sub";
final NavigableMap<Long, TimestampEntryCount> publishTimeIdMap = new ConcurrentSkipListMap<>();
// set delay time to start dispatching messages to active consumer in order to avoid message duplication
conf.setActiveConsumerFailoverDelayTimeMillis(500);
restartBroker();
admin.namespaces().setRetention(topicName.getNamespace(), policy);
ConsumerBuilder<byte[]> consumerBuilder = pulsarClient.newConsumer().topic(topicName.toString())
.subscriptionName(subsId).subscriptionType(subType).messageListener((consumer, msg) -> {
try {
synchronized (received) {
received.add(msg);
}
consumer.acknowledge(msg);
long publishTime = ((MessageImpl<?>) msg).getPublishTime();
log.info(" publish time is " + publishTime + "," + msg.getMessageId());
TimestampEntryCount timestampEntryCount = publishTimeIdMap.computeIfAbsent(publishTime,
(k) -> new TimestampEntryCount(publishTime));
timestampEntryCount.incrementAndGet();
} catch (final PulsarClientException e) {
log.warn("Failed to ack!");
}
});
Consumer<byte[]> consumer1 = consumerBuilder.subscribe();
Consumer<byte[]> consumer2 = consumerBuilder.subscribe();
final Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName.toString()).create();
log.info("warm up started for " + topicName.toString());
// send warmup msgs
byte[] msgBytes = new byte[1000];
for (Integer i = 0; i < warmup; i++) {
producer.send(msgBytes);
}
log.info("warm up finished.");
// sleep to ensure receiving of msgs
for (int n = 0; n < 10 && received.size() < warmup; n++) {
Thread.sleep(200);
}
// validate received msgs
Assert.assertEquals(received.size(), warmup);
received.clear();
// publish testSize num of msgs
log.info("Sending more messages.");
for (Integer n = 0; n < testSize; n++) {
producer.send(msgBytes);
Thread.sleep(1);
}
log.info("Sending more messages done.");
Thread.sleep(3000);
long begints = publishTimeIdMap.firstEntry().getKey();
long endts = publishTimeIdMap.lastEntry().getKey();
// find reset timestamp
long timestamp = (endts - begints) / 2 + begints;
timestamp = publishTimeIdMap.floorKey(timestamp);
NavigableMap<Long, TimestampEntryCount> expectedMessages = new ConcurrentSkipListMap<>();
expectedMessages.putAll(publishTimeIdMap.tailMap(timestamp, true));
received.clear();
log.info("reset cursor to " + timestamp + " for topic " + topicName.toString() + " for subs " + subsId);
log.info("issuing admin operation on " + admin.getServiceUrl());
List<String> subList = admin.topics().getSubscriptions(topicName.toString());
for (String subs : subList) {
log.info("got sub " + subs);
}
publishTimeIdMap.clear();
// reset the cursor to this timestamp
Assert.assertTrue(subList.contains(subsId));
admin.topics().resetCursor(topicName.toString(), subsId, timestamp);
Thread.sleep(3000);
int totalExpected = 0;
for (TimestampEntryCount tec : expectedMessages.values()) {
totalExpected += tec.numMessages;
}
// validate that replay happens after the timestamp
Assert.assertTrue(publishTimeIdMap.firstEntry().getKey() >= timestamp);
consumer1.close();
consumer2.close();
producer.close();
// validate that expected and received counts match
int totalReceived = 0;
for (TimestampEntryCount tec : publishTimeIdMap.values()) {
totalReceived += tec.numMessages;
}
Assert.assertEquals(totalReceived, totalExpected, "did not receive all messages on replay after reset");
resetConfig();
restartBroker();
}
/**
* <pre>
* Verifies: that client-cnx gets closed when server gives TooManyRequestException in certain time frame
* 1. Client1: which has set MaxNumberOfRejectedRequestPerConnection=0
* 2. Client2: which has set MaxNumberOfRejectedRequestPerConnection=100
* 3. create multiple producer and make lookup-requests simultaneously
* 4. Client1 receives TooManyLookupException and should close connection
* </pre>
*
* @throws Exception
*/
@Test(timeOut = 5000)
public void testCloseConnectionOnBrokerRejectedRequest() throws Exception {
final PulsarClient pulsarClient;
final PulsarClient pulsarClient2;
final String topicName = "persistent://prop/usw/my-ns/newTopic";
final int maxConccurentLookupRequest = pulsar.getConfiguration().getMaxConcurrentLookupRequest();
try {
final int concurrentLookupRequests = 20;
stopBroker();
pulsar.getConfiguration().setMaxConcurrentLookupRequest(1);
startBroker();
String lookupUrl = new URI("pulsar://localhost:" + BROKER_PORT).toString();
pulsarClient = PulsarClient.builder().serviceUrl(lookupUrl).statsInterval(0, TimeUnit.SECONDS)
.maxNumberOfRejectedRequestPerConnection(0).build();
pulsarClient2 = PulsarClient.builder().serviceUrl(lookupUrl).statsInterval(0, TimeUnit.SECONDS)
.ioThreads(concurrentLookupRequests).connectionsPerBroker(20).build();
ProducerImpl<byte[]> producer = (ProducerImpl<byte[]>) pulsarClient.newProducer().topic(topicName).create();
ClientCnx cnx = producer.cnx();
assertTrue(cnx.channel().isActive());
ExecutorService executor = Executors.newFixedThreadPool(concurrentLookupRequests);
final int totalProducer = 100;
CountDownLatch latch = new CountDownLatch(totalProducer * 2);
AtomicInteger failed = new AtomicInteger(0);
for (int i = 0; i < totalProducer; i++) {
executor.submit(() -> {
pulsarClient2.newProducer().topic(topicName).createAsync().handle((ok, e) -> {
if (e != null) {
failed.set(1);
}
latch.countDown();
return null;
});
pulsarClient.newProducer().topic(topicName).createAsync().handle((ok, e) -> {
if (e != null) {
failed.set(1);
}
latch.countDown();
return null;
});
});
}
latch.await(10, TimeUnit.SECONDS);
// connection must be closed
assertTrue(failed.get() == 1);
try {
pulsarClient.close();
pulsarClient2.close();
} catch (Exception e) {
// Ok
}
} finally {
pulsar.getConfiguration().setMaxConcurrentLookupRequest(maxConccurentLookupRequest);
}
}
/**
* It verifies that broker throttles down configured concurrent topic loading requests
*
* <pre>
* 1. Start broker with N maxConcurrentTopicLoadRequest
* 2. create concurrent producers on different topics which makes broker to load topics concurrently
* 3. Producer operationtimeout = 1 ms so, if producers creation will fail for throttled topics
* 4. verify all producers should have connected
* </pre>
*
* @throws Exception
*/
@Test(timeOut = 5000)
public void testMaxConcurrentTopicLoading() throws Exception {
final PulsarClientImpl pulsarClient;
final PulsarClientImpl pulsarClient2;
final String topicName = "persistent://prop/usw/my-ns/cocurrentLoadingTopic";
int concurrentTopic = pulsar.getConfiguration().getMaxConcurrentTopicLoadRequest();
try {
pulsar.getConfiguration().setAuthorizationEnabled(false);
final int concurrentLookupRequests = 20;
stopBroker();
pulsar.getConfiguration().setMaxConcurrentTopicLoadRequest(1);
startBroker();
String lookupUrl = new URI("pulsar://localhost:" + BROKER_PORT).toString();
pulsarClient = (PulsarClientImpl) PulsarClient.builder().serviceUrl(lookupUrl)
.statsInterval(0, TimeUnit.SECONDS).maxNumberOfRejectedRequestPerConnection(0).build();
pulsarClient2 = (PulsarClientImpl) PulsarClient.builder().serviceUrl(lookupUrl)
.statsInterval(0, TimeUnit.SECONDS).ioThreads(concurrentLookupRequests).connectionsPerBroker(20)
.build();
ProducerImpl<byte[]> producer = (ProducerImpl<byte[]>) pulsarClient.newProducer().topic(topicName).create();
ClientCnx cnx = producer.cnx();
assertTrue(cnx.channel().isActive());
ExecutorService executor = Executors.newFixedThreadPool(concurrentLookupRequests);
final List<CompletableFuture<Producer<byte[]>>> futures = Lists.newArrayList();
final int totalProducers = 10;
CountDownLatch latch = new CountDownLatch(totalProducers);
for (int i = 0; i < totalProducers; i++) {
executor.submit(() -> {
final String randomTopicName1 = topicName + randomUUID().toString();
final String randomTopicName2 = topicName + randomUUID().toString();
// pass producer-name to avoid exception: producer is already connected to topic
synchronized (futures) {
futures.add(pulsarClient2.newProducer().topic(randomTopicName1).createAsync());
futures.add(pulsarClient.newProducer().topic(randomTopicName2).createAsync());
}
latch.countDown();
});
}
latch.await();
synchronized (futures) {
FutureUtil.waitForAll(futures).get();
}
pulsarClient.close();
pulsarClient2.close();
} finally {
// revert back to original value
pulsar.getConfiguration().setMaxConcurrentTopicLoadRequest(concurrentTopic);
}
}
/**
* It verifies that client closes the connection on internalSerevrError which is "ServiceNotReady" from Broker-side
*
* @throws Exception
*/
@Test(timeOut = 5000)
public void testCloseConnectionOnInternalServerError() throws Exception {
final PulsarClient pulsarClient;
final String topicName = "persistent://prop/usw/my-ns/newTopic";
String lookupUrl = new URI("pulsar://localhost:" + BROKER_PORT).toString();
pulsarClient = PulsarClient.builder().serviceUrl(lookupUrl).statsInterval(0, TimeUnit.SECONDS).build();
ProducerImpl<byte[]> producer = (ProducerImpl<byte[]>) pulsarClient.newProducer().topic(topicName).create();
ClientCnx cnx = producer.cnx();
assertTrue(cnx.channel().isActive());
// Need broker to throw InternalServerError. so, make global-zk unavailable
Field globalZkCacheField = PulsarService.class.getDeclaredField("globalZkCache");
globalZkCacheField.setAccessible(true);
globalZkCacheField.set(pulsar, null);
try {
pulsarClient.newProducer().topic(topicName).create();
fail("it should have fail with lookup-exception:");
} catch (Exception e) {
// ok
}
// connection must be closed
assertFalse(cnx.channel().isActive());
pulsarClient.close();
}
@Test
public void testInvalidDynamicConfiguration() throws Exception {
// (1) try to update invalid loadManagerClass name
try {
admin.brokers().updateDynamicConfiguration("loadManagerClassName", "org.apache.pulsar.invalid.loadmanager");
fail("it should have failed due to invalid argument");
} catch (PulsarAdminException e) {
// Ok: should have failed due to invalid config value
}
// (2) try to update with valid loadManagerClass name
try {
admin.brokers().updateDynamicConfiguration("loadManagerClassName",
"org.apache.pulsar.broker.loadbalance.ModularLoadManager");
} catch (PulsarAdminException e) {
fail("it should have failed due to invalid argument", e);
}
// (3) restart broker with invalid config value
ZooKeeperDataCache<Map<String, String>> dynamicConfigurationCache = pulsar.getBrokerService()
.getDynamicConfigurationCache();
Map<String, String> configurationMap = dynamicConfigurationCache.get(BROKER_SERVICE_CONFIGURATION_PATH).get();
configurationMap.put("loadManagerClassName", "org.apache.pulsar.invalid.loadmanager");
byte[] content = ObjectMapperFactory.getThreadLocal().writeValueAsBytes(configurationMap);
dynamicConfigurationCache.invalidate(BROKER_SERVICE_CONFIGURATION_PATH);
mockZookKeeper.setData(BROKER_SERVICE_CONFIGURATION_PATH, content, -1);
try {
stopBroker();
startBroker();
fail("it should have failed due to invalid argument");
} catch (Exception e) {
// Ok: should have failed due to invalid config value
}
}
static class TimestampEntryCount {
private final long timestamp;
private int numMessages;
public TimestampEntryCount(long ts) {
this.numMessages = 0;
this.timestamp = ts;
}
public int incrementAndGet() {
return ++numMessages;
}
public long getTimestamp() {
return timestamp;
}
}
@Test
public void testCleanProducer() throws Exception {
log.info("-- Starting {} test --", methodName);
admin.clusters().createCluster("global", new ClusterData());
admin.namespaces().createNamespace("my-property/global/lookup");
final int operationTimeOut = 500;
PulsarClient pulsarClient = PulsarClient.builder().serviceUrl(lookupUrl.toString())
.statsInterval(0, TimeUnit.SECONDS).operationTimeout(operationTimeOut, TimeUnit.MILLISECONDS).build();
CountDownLatch latch = new CountDownLatch(1);
pulsarClient.newProducer().topic("persistent://my-property/global/lookup/my-topic1").createAsync()
.handle((producer, e) -> {
latch.countDown();
return null;
});
latch.await(operationTimeOut + 1000, TimeUnit.MILLISECONDS);
Field prodField = PulsarClientImpl.class.getDeclaredField("producers");
prodField.setAccessible(true);
@SuppressWarnings("unchecked")
IdentityHashMap<ProducerBase<byte[]>, Boolean> producers = (IdentityHashMap<ProducerBase<byte[]>, Boolean>) prodField
.get(pulsarClient);
assertTrue(producers.isEmpty());
pulsarClient.close();
log.info("-- Exiting {} test --", methodName);
}
}
| |
/*
* Copyright 2017 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
package io.flutter.run.test;
import com.google.common.annotations.VisibleForTesting;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.configurations.RuntimeConfigurationError;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.execution.ParametersListUtil;
import io.flutter.pub.PubRoot;
import io.flutter.run.FlutterDevice;
import io.flutter.run.MainFile;
import io.flutter.run.common.RunMode;
import io.flutter.run.daemon.DeviceService;
import io.flutter.sdk.FlutterCommandStartResult;
import io.flutter.sdk.FlutterSdk;
import io.flutter.utils.ElementIO;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* Settings for running a Flutter test.
*/
public class TestFields {
@Nullable
private final String testName;
@Nullable
private final String testFile;
@Nullable
private final String testDir;
@Nullable
private String additionalArgs;
private boolean useRegexp = false;
private TestFields(@Nullable String testName, @Nullable String testFile, @Nullable String testDir, @Nullable String additionalArgs) {
if (testFile == null && testDir == null) {
throw new IllegalArgumentException("either testFile or testDir must be non-null");
}
else if (testFile != null && testDir != null) {
throw new IllegalArgumentException("either testFile or testDir must be null");
}
else if (testName != null && testFile == null) {
throw new IllegalArgumentException("testName can only be specified along with a testFile");
}
this.testName = testName;
this.testFile = testFile;
this.testDir = testDir;
this.additionalArgs = additionalArgs;
}
public TestFields useRegexp(boolean useRegexp) {
this.useRegexp = useRegexp;
return this;
}
@VisibleForTesting
public boolean getUseRegexp() {
return useRegexp;
}
public TestFields copy() {
return new TestFields(testName, testFile, testDir, additionalArgs).useRegexp(useRegexp);
}
/**
* Creates settings for running tests with the given name within a Dart file.
*/
public static TestFields forTestName(String testName, String path) {
return new TestFields(testName, path, null, null);
}
/**
* Creates settings for running all the tests in a Dart file.
*/
public static TestFields forFile(String path) {
return new TestFields(null, path, null, null);
}
/**
* Creates settings for running all the tests in directory.
*/
public static TestFields forDir(String path) {
return new TestFields(null, null, path, null);
}
/**
* Returns a value indicating whether we're running tests in a file or in a directory.
*/
@NotNull
public Scope getScope() {
if (testName != null) {
return Scope.NAME;
}
else if (testFile != null) {
return Scope.FILE;
}
else {
return Scope.DIRECTORY;
}
}
/**
* If not null, tests will only be run if their name contains this string.
*/
@Nullable
public String getTestName() {
return testName;
}
/**
* The Dart file containing the tests to run, or null if we are running tests in a directory.
*/
@Nullable
public String getTestFile() {
return testFile;
}
/**
* The directory containing the tests to run, or null if we are running tests in a file.
*/
@Nullable
public String getTestDir() {
return testDir;
}
/**
* The additional arguments to pass to the test runner.
*/
@Nullable
public String getAdditionalArgs() {
return additionalArgs;
}
public void setAdditionalArgs(@Nullable String args) {
additionalArgs = args;
}
/**
* Returns the file or directory containing the tests to run, or null if it doesn't exist.
*/
@Nullable
public VirtualFile getFileOrDir() {
final String path = testFile != null ? testFile : testDir;
if (path == null) return null;
return LocalFileSystem.getInstance().findFileByPath(path);
}
/**
* Returns the PubRoot containing the file or directory being tested, or null if none.
*/
@Nullable
public PubRoot getPubRoot(@NotNull Project project) {
final VirtualFile dir = getFileOrDir();
if (dir == null) return null;
final PubRoot root = PubRoot.forFile(dir);
if (root != null) return root;
return PubRoot.forDescendant(dir, project);
}
/**
* Returns the relative path to the file or directory from the pub root, or null if not in a pub root.
*/
@Nullable
public String getRelativePath(@NotNull Project project) {
final PubRoot root = getPubRoot(project);
if (root == null) return null;
final VirtualFile fileOrDir = getFileOrDir();
if (fileOrDir == null) return null;
return root.getRelativePath(fileOrDir);
}
/**
* Generates a name for these test settings, if they are valid.
*/
@NotNull
public String getSuggestedName(@NotNull Project project, @NotNull String defaultName) {
switch (getScope()) {
case NAME:
final String name = getTestName();
if (name == null) return defaultName;
return name;
case FILE:
final VirtualFile file = getFileOrDir();
if (file == null) return defaultName;
return "tests in " + file.getName();
case DIRECTORY:
final String relativePath = getRelativePath(project);
if (relativePath != null) return "tests in " + relativePath;
// check if it's the pub root itself.
final PubRoot root = getPubRoot(project);
if (root != null && root.getRoot().equals(getFileOrDir())) {
return "all tests in " + root.getRoot().getName();
}
}
return defaultName;
}
void writeTo(Element elt) {
ElementIO.addOption(elt, "testName", testName);
ElementIO.addOption(elt, "testFile", testFile);
ElementIO.addOption(elt, "testDir", testDir);
ElementIO.addOption(elt, "useRegexp", useRegexp ? "true" : "false");
ElementIO.addOption(elt, "additionalArgs", additionalArgs);
}
/**
* Reads the fields from an XML Element, if available.
*/
@NotNull
static TestFields readFrom(Element elt) throws InvalidDataException {
final Map<String, String> options = ElementIO.readOptions(elt);
final String testName = options.get("testName");
final String testFile = options.get("testFile");
final String testDir = options.get("testDir");
final String useRegexp = options.get("useRegexp");
final String additionalArgs = options.get("additionalArgs");
try {
return new TestFields(testName, testFile, testDir, additionalArgs).useRegexp("true".equals(useRegexp));
}
catch (IllegalArgumentException e) {
throw new InvalidDataException(e.getMessage());
}
}
/**
* Reports any errors that the user should correct.
* <p>
* This will be called while the user is typing; see RunConfiguration.checkConfiguration.
*/
void checkRunnable(@NotNull Project project) throws RuntimeConfigurationError {
checkSdk(project);
getScope().checkRunnable(this, project);
}
/**
* Starts running the tests.
*/
@NotNull
FlutterCommandStartResult run(@NotNull Project project, @NotNull RunMode mode) throws ExecutionException {
final FlutterSdk sdk = FlutterSdk.getFlutterSdk(project);
if (sdk == null) {
throw new ExecutionException("The Flutter SDK is not configured");
}
final VirtualFile fileOrDir = getFileOrDir();
if (fileOrDir == null) {
throw new ExecutionException("File or directory not found");
}
final String testName = getTestName();
final PubRoot root = getPubRoot(project);
if (root == null) {
throw new ExecutionException("Test file isn't within a Flutter pub root");
}
final String args = adjustArgs(root, fileOrDir, project);
return sdk.flutterTest(root, fileOrDir, testName, mode, args, getScope(), useRegexp).startProcess(project);
}
@Nullable
private String adjustArgs(@NotNull PubRoot root, @NotNull VirtualFile fileOrDir, @NotNull Project project) {
final VirtualFile testDir = root.getIntegrationTestDir();
if (testDir == null || !VfsUtilCore.isAncestor(testDir, fileOrDir, false)) {
return additionalArgs;
}
final List<String> args = additionalArgs == null
? new ArrayList<>()
: ParametersListUtil.parse(additionalArgs);
if (args.contains("-d") || args.contains("--device-id")) {
return additionalArgs;
}
final FlutterDevice device = DeviceService.getInstance(project).getSelectedDevice();
if (device == null) {
return additionalArgs;
}
args.add(0, "-d");
args.add(1, device.deviceId());
return String.join(" ", args);
}
private void checkSdk(@NotNull Project project) throws RuntimeConfigurationError {
if (FlutterSdk.getFlutterSdk(project) == null) {
throw new RuntimeConfigurationError("Flutter SDK isn't set");
}
}
/**
* Selects which tests to run.
*/
public enum Scope {
NAME("Tests in file, filtered by name") {
@Override
public void checkRunnable(@NotNull TestFields fields, @NotNull Project project) throws RuntimeConfigurationError {
final FlutterSdk sdk = FlutterSdk.getFlutterSdk(project);
if (sdk != null && !sdk.getVersion().flutterTestSupportsFiltering()) {
throw new RuntimeConfigurationError("Flutter SDK is too old to filter tests by name");
}
FILE.checkRunnable(fields, project);
}
},
FILE("All in file") {
@Override
public void checkRunnable(@NotNull TestFields fields, @NotNull Project project) throws RuntimeConfigurationError {
final MainFile.Result main = MainFile.verify(fields.testFile, project);
if (!main.canLaunch()) {
throw new RuntimeConfigurationError(main.getError());
}
final PubRoot root = PubRoot.forDirectory(main.get().getAppDir());
if (root == null) {
throw new RuntimeConfigurationError("Test file isn't within a Flutter pub root");
}
}
},
DIRECTORY("All in directory") {
@Override
public void checkRunnable(@NotNull TestFields fields, @NotNull Project project) throws RuntimeConfigurationError {
final VirtualFile dir = fields.getFileOrDir();
if (dir == null) {
throw new RuntimeConfigurationError("Directory not found");
}
final PubRoot root = PubRoot.forDescendant(dir, project);
if (root == null) {
throw new RuntimeConfigurationError("Directory is not in a pub root");
}
}
};
private final String displayName;
Scope(String displayName) {
this.displayName = displayName;
}
public String getDisplayName() {
return displayName;
}
public abstract void checkRunnable(@NotNull TestFields fields, @NotNull Project project) throws RuntimeConfigurationError;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.processing.loading.sort.unsafe.holder;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Comparator;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.impl.FileFactory;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.processing.loading.row.IntermediateSortTempRow;
import org.apache.carbondata.processing.loading.sort.SortStepRowHandler;
import org.apache.carbondata.processing.sort.exception.CarbonSortKeyAndGroupByException;
import org.apache.carbondata.processing.sort.sortdata.IntermediateSortTempRowComparator;
import org.apache.carbondata.processing.sort.sortdata.SortParameters;
import org.apache.carbondata.processing.sort.sortdata.TableFieldStat;
import org.apache.log4j.Logger;
public class UnsafeSortTempFileChunkHolder implements SortTempChunkHolder {
/**
* LOGGER
*/
private static final Logger LOGGER =
LogServiceFactory.getLogService(UnsafeSortTempFileChunkHolder.class.getName());
/**
* temp file
*/
private File tempFile;
/**
* read stream
*/
private DataInputStream stream;
/**
* entry count
*/
private int entryCount;
/**
* return row
*/
private IntermediateSortTempRow returnRow;
private int readBufferSize;
private String compressorName;
private IntermediateSortTempRow[] currentBuffer;
private IntermediateSortTempRow[] backupBuffer;
private boolean isBackupFilled;
private boolean prefetch;
private int bufferSize;
private int bufferRowCounter;
private ExecutorService executorService;
private Future<Void> submit;
private int prefetchRecordsProceesed;
/**
* totalRecordFetch
*/
private int totalRecordFetch;
private int numberOfObjectRead;
private TableFieldStat tableFieldStat;
private SortStepRowHandler sortStepRowHandler;
private Comparator<IntermediateSortTempRow> comparator;
private boolean convertNoSortFields;
/**
* Constructor to initialize
*/
public UnsafeSortTempFileChunkHolder(File tempFile, SortParameters parameters,
boolean convertNoSortFields) {
// set temp file
this.tempFile = tempFile;
this.readBufferSize = parameters.getBufferSize();
this.compressorName = parameters.getSortTempCompressorName();
this.tableFieldStat = new TableFieldStat(parameters);
this.sortStepRowHandler = new SortStepRowHandler(tableFieldStat);
this.executorService = Executors.newFixedThreadPool(1);
comparator = new IntermediateSortTempRowComparator(parameters.getNoDictionarySortColumn(),
parameters.getNoDictDataType());
this.convertNoSortFields = convertNoSortFields;
initialize();
}
/**
* This method will be used to initialize
*
* @throws CarbonSortKeyAndGroupByException problem while initializing
*/
public void initialize() {
prefetch = Boolean.parseBoolean(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.CARBON_MERGE_SORT_PREFETCH,
CarbonCommonConstants.CARBON_MERGE_SORT_PREFETCH_DEFAULT));
bufferSize = Integer.parseInt(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.CARBON_PREFETCH_BUFFERSIZE,
CarbonCommonConstants.CARBON_PREFETCH_BUFFERSIZE_DEFAULT));
initialise();
}
private void initialise() {
try {
stream = FileFactory.getDataInputStream(tempFile.getPath(), FileFactory.FileType.LOCAL,
readBufferSize, compressorName);
this.entryCount = stream.readInt();
LOGGER.info("Processing unsafe mode file rows with size : " + entryCount);
if (prefetch) {
new DataFetcher(false).call();
totalRecordFetch += currentBuffer.length;
if (totalRecordFetch < this.entryCount) {
submit = executorService.submit(new DataFetcher(true));
}
}
} catch (FileNotFoundException e) {
LOGGER.error(e);
throw new RuntimeException(tempFile + " No Found", e);
} catch (IOException e) {
LOGGER.error(e);
throw new RuntimeException(tempFile + " No Found", e);
} catch (Exception e) {
LOGGER.error(e);
throw new RuntimeException(tempFile + " Problem while reading", e);
}
}
/**
* This method will be used to read new row from file
*
* @throws CarbonSortKeyAndGroupByException problem while reading
*/
@Override
public void readRow() throws CarbonSortKeyAndGroupByException {
if (prefetch) {
fillDataForPrefetch();
} else {
try {
if (convertNoSortFields) {
this.returnRow = sortStepRowHandler.readWithNoSortFieldConvert(stream);
} else {
this.returnRow = sortStepRowHandler.readWithoutNoSortFieldConvert(stream);
}
this.numberOfObjectRead++;
} catch (IOException e) {
throw new CarbonSortKeyAndGroupByException("Problems while reading row", e);
}
}
}
private void fillDataForPrefetch() {
if (bufferRowCounter >= bufferSize) {
if (isBackupFilled) {
bufferRowCounter = 0;
currentBuffer = backupBuffer;
totalRecordFetch += currentBuffer.length;
isBackupFilled = false;
if (totalRecordFetch < this.entryCount) {
submit = executorService.submit(new DataFetcher(true));
}
} else {
try {
submit.get();
} catch (Exception e) {
LOGGER.error(e);
}
bufferRowCounter = 0;
currentBuffer = backupBuffer;
isBackupFilled = false;
totalRecordFetch += currentBuffer.length;
if (totalRecordFetch < this.entryCount) {
submit = executorService.submit(new DataFetcher(true));
}
}
}
prefetchRecordsProceesed++;
returnRow = currentBuffer[bufferRowCounter++];
}
/**
* get a batch of row, this interface is used in reading compressed sort temp files
*
* @param expected expected number in a batch
* @return a batch of row
* @throws IOException if error occurs while reading from stream
*/
private IntermediateSortTempRow[] readBatchedRowFromStream(int expected)
throws IOException {
IntermediateSortTempRow[] holders = new IntermediateSortTempRow[expected];
for (int i = 0; i < expected; i++) {
if (convertNoSortFields) {
holders[i] = sortStepRowHandler.readWithNoSortFieldConvert(stream);
} else {
holders[i] = sortStepRowHandler.readWithoutNoSortFieldConvert(stream);
}
}
this.numberOfObjectRead += expected;
return holders;
}
/**
* below method will be used to get the row
*
* @return row
*/
public IntermediateSortTempRow getRow() {
return this.returnRow;
}
/**
* below method will be used to check whether any more records are present
* in file or not
*
* @return more row present in file
*/
public boolean hasNext() {
if (prefetch) {
return this.prefetchRecordsProceesed < this.entryCount;
}
return this.numberOfObjectRead < this.entryCount;
}
/**
* Below method will be used to close streams
*/
public void close() {
CarbonUtil.closeStreams(stream);
if (null != executorService && !executorService.isShutdown()) {
executorService.shutdownNow();
}
}
/**
* This method will number of entries
*
* @return entryCount
*/
public int numberOfRows() {
return entryCount;
}
@Override public int compareTo(SortTempChunkHolder other) {
return comparator.compare(returnRow, other.getRow());
}
@Override public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof UnsafeSortTempFileChunkHolder)) {
return false;
}
UnsafeSortTempFileChunkHolder o = (UnsafeSortTempFileChunkHolder) obj;
return this == o;
}
@Override public int hashCode() {
int hash = 0;
hash += tableFieldStat.hashCode();
hash += tempFile.hashCode();
return hash;
}
private final class DataFetcher implements Callable<Void> {
private boolean isBackUpFilling;
private int numberOfRecords;
private DataFetcher(boolean backUp) {
isBackUpFilling = backUp;
calculateNumberOfRecordsToBeFetched();
}
private void calculateNumberOfRecordsToBeFetched() {
int numberOfRecordsLeftToBeRead = entryCount - totalRecordFetch;
numberOfRecords =
bufferSize < numberOfRecordsLeftToBeRead ? bufferSize : numberOfRecordsLeftToBeRead;
}
@Override public Void call() throws Exception {
try {
if (isBackUpFilling) {
backupBuffer = prefetchRecordsFromFile(numberOfRecords);
isBackupFilled = true;
} else {
currentBuffer = prefetchRecordsFromFile(numberOfRecords);
}
} catch (Exception e) {
LOGGER.error(e);
}
return null;
}
}
/**
* This method will read the records from sort temp file and keep it in a buffer
*
* @param numberOfRecords number of records to be read
* @return batch of intermediate sort temp row
* @throws IOException if error occurs reading records from file
*/
private IntermediateSortTempRow[] prefetchRecordsFromFile(int numberOfRecords)
throws IOException {
return readBatchedRowFromStream(numberOfRecords);
}
}
| |
/*******************************************************************************
*
* Copyright (c) 2004-2011 Oracle Corporation.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
*
* Inc., Kohsuke Kawaguchi, Winston Prakash
*
*
*******************************************************************************/
package hudson.lifecycle;
import hudson.Launcher.LocalLauncher;
import hudson.Util;
import hudson.model.TaskListener;
import hudson.remoting.Callable;
import hudson.remoting.Engine;
import hudson.remoting.jnlp.MainDialog;
import hudson.remoting.jnlp.MainMenu;
import hudson.util.StreamTaskListener;
import org.eclipse.hudson.jna.NativeAccessException;
import org.eclipse.hudson.jna.NativeUtils;
import java.util.logging.Logger;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.output.ByteArrayOutputStream;
import javax.swing.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.URL;
import java.util.logging.Level;
import static javax.swing.JOptionPane.*;
/**
* @author Kohsuke Kawaguchi
*/
public class WindowsSlaveInstaller implements Callable<Void, RuntimeException>, ActionListener {
/**
* Root directory of this slave. String, not File because the platform can
* be different.
*/
private final String rootDir;
private transient Engine engine;
private transient MainDialog dialog;
private NativeUtils nativeUtils = NativeUtils.getInstance();
public WindowsSlaveInstaller(String rootDir, NativeUtils nativeUtils) {
this.rootDir = rootDir;
}
public WindowsSlaveInstaller(String rootDir) {
this.rootDir = rootDir;
}
public Void call() {
if (File.separatorChar == '/') {
return null; // not Windows
}
if (System.getProperty("hudson.showWindowsServiceInstallLink") == null) {
return null; // only show this when it makes sense, which is when we run from JNLP
}
dialog = MainDialog.get();
if (dialog == null) {
return null; // can't find the main window. Maybe not running with GUI
}
// capture the engine
engine = Engine.current();
SwingUtilities.invokeLater(new Runnable() {
public void run() {
MainMenu mainMenu = dialog.getMainMenu();
JMenu m = mainMenu.getFileMenu();
JMenuItem menu = new JMenuItem(Messages.WindowsInstallerLink_DisplayName(), KeyEvent.VK_W);
menu.addActionListener(WindowsSlaveInstaller.this);
m.add(menu);
mainMenu.commit();
}
});
return null;
}
/**
* Invokes slave.exe with a SCM management command.
*
* <p> If it fails in a way that indicates the presence of UAC, retry in an
* UAC compatible manner.
*/
static int runElevated(File slaveExe, String command, TaskListener out, File pwd, NativeUtils nativeUtils) throws IOException, InterruptedException {
try {
return new LocalLauncher(out).launch().cmds(slaveExe, command).stdout(out).pwd(pwd).join();
} catch (IOException e) {
if (e.getMessage().contains("CreateProcess") && e.getMessage().contains("=740")) {
// fall through
} else {
throw e;
}
}
String logFile = "redirect.log";
try {
return nativeUtils.windowsExec(slaveExe, command, logFile, pwd);
} catch (NativeAccessException ex) {
Logger.getLogger(WindowsSlaveInstaller.class.getName()).log(Level.SEVERE, null, ex);
return -1;
} finally {
FileInputStream fin = null;
try {
fin = new FileInputStream(new File(pwd, "redirect.log"));
IOUtils.copy(fin, out.getLogger());
} finally {
IOUtils.closeQuietly(fin);
}
}
}
/**
* Called when the install menu is selected
*/
public void actionPerformed(ActionEvent e) {
try {
int r = JOptionPane.showConfirmDialog(dialog,
Messages.WindowsSlaveInstaller_ConfirmInstallation(),
Messages.WindowsInstallerLink_DisplayName(), OK_CANCEL_OPTION);
if (r != JOptionPane.OK_OPTION) {
return;
}
if (!nativeUtils.isDotNetInstalled(2, 0)) {
JOptionPane.showMessageDialog(dialog, Messages.WindowsSlaveInstaller_DotNetRequired(),
Messages.WindowsInstallerLink_DisplayName(), ERROR_MESSAGE);
return;
}
final File dir = new File(rootDir);
if (!dir.exists()) {
JOptionPane.showMessageDialog(dialog, Messages.WindowsSlaveInstaller_RootFsDoesntExist(rootDir),
Messages.WindowsInstallerLink_DisplayName(), ERROR_MESSAGE);
return;
}
final File slaveExe = new File(dir, "hudson-slave.exe");
FileUtils.copyURLToFile(getClass().getResource("/windows-service/hudson.exe"), slaveExe);
// write out the descriptor
URL jnlp = new URL(engine.getHudsonUrl(), "computer/" + Util.rawEncode(engine.slaveName) + "/slave-agent.jnlp");
String xml = generateSlaveXml(
generateServiceId(rootDir),
System.getProperty("java.home") + "\\bin\\java.exe", "-jnlpUrl " + jnlp.toExternalForm());
FileUtils.writeStringToFile(new File(dir, "hudson-slave.xml"), xml, "UTF-8");
// copy slave.jar
URL slaveJar = new URL(engine.getHudsonUrl(), "jnlpJars/remoting.jar");
File dstSlaveJar = new File(dir, "slave.jar").getCanonicalFile();
if (!dstSlaveJar.exists()) // perhaps slave.jar is already there?
{
FileUtils.copyURLToFile(slaveJar, dstSlaveJar);
}
// install as a service
ByteArrayOutputStream baos = new ByteArrayOutputStream();
StreamTaskListener task = new StreamTaskListener(baos);
r = runElevated(slaveExe, "install", task, dir, nativeUtils);
if (r != 0) {
JOptionPane.showMessageDialog(
dialog, baos.toString(), "Error", ERROR_MESSAGE);
return;
}
r = JOptionPane.showConfirmDialog(dialog,
Messages.WindowsSlaveInstaller_InstallationSuccessful(),
Messages.WindowsInstallerLink_DisplayName(), OK_CANCEL_OPTION);
if (r != JOptionPane.OK_OPTION) {
return;
}
// let the service start after we close our connection, to avoid conflicts
Runtime.getRuntime().addShutdownHook(new Thread("service starter") {
public void run() {
try {
StreamTaskListener task = StreamTaskListener.fromStdout();
int r = runElevated(slaveExe, "start", task, dir, nativeUtils);
task.getLogger().println(r == 0 ? "Successfully started" : "start service failed. Exit code=" + r);
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
System.exit(0);
} catch (Exception t) {// this runs as a JNLP app, so if we let an exeption go, we'll never find out why it failed
StringWriter sw = new StringWriter();
t.printStackTrace(new PrintWriter(sw));
JOptionPane.showMessageDialog(dialog, sw.toString(), "Error", ERROR_MESSAGE);
}
}
public static String generateServiceId(String slaveRoot) throws IOException {
return "hudsonslave-" + slaveRoot.replace(':', '_').replace('\\', '_').replace('/', '_');
}
public static String generateSlaveXml(String id, String java, String args) throws IOException {
String xml = IOUtils.toString(WindowsSlaveInstaller.class.getResourceAsStream("/windows-service/hudson-slave.xml"), "UTF-8");
xml = xml.replace("@ID@", id);
xml = xml.replace("@JAVA@", java);
xml = xml.replace("@ARGS@", args);
return xml;
}
private static final long serialVersionUID = 1L;
}
| |
/*
* Copyright (C) 2009 Teleca Poland Sp. z o.o. <android@teleca.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.teleca.jamendo.api;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import android.util.Log;
/**
* @author Lukasz Wisniewski
*/
public class Playlist implements Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
private static final String TAG = "Playlist";
public enum PlaylistPlaybackMode {
NORMAL, SHUFFLE, REPEAT, SHUFFLE_AND_REPEAT
}
/**
* Keep order in which tracks will be play
*/
private ArrayList<Integer> mPlayOrder = new ArrayList<Integer>();
/**
* Keep playlist playback mode
*/
private PlaylistPlaybackMode mPlaylistPlaybackMode = PlaylistPlaybackMode.NORMAL;
/**
* Give playlist playback mode
*
* @return enum with playback mode
*/
public PlaylistPlaybackMode getPlaylistPlaybackMode() {
return mPlaylistPlaybackMode;
}
/**
* Set playlist playback mode
*
* @param aPlaylistPlaybackMode
*/
public void setPlaylistPlaybackMode(
PlaylistPlaybackMode aPlaylistPlaybackMode) {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "(Set mode) selected = " + selected);
Log.d(TAG, "Plyback mode set on: " + aPlaylistPlaybackMode);
}
boolean force = false;
switch (aPlaylistPlaybackMode) {
case NORMAL:
case REPEAT:
if (mPlaylistPlaybackMode == PlaylistPlaybackMode.SHUFFLE
|| mPlaylistPlaybackMode == PlaylistPlaybackMode.SHUFFLE_AND_REPEAT) {
force = true;
}
break;
case SHUFFLE:
case SHUFFLE_AND_REPEAT:
if (mPlaylistPlaybackMode == PlaylistPlaybackMode.NORMAL
|| mPlaylistPlaybackMode == PlaylistPlaybackMode.REPEAT) {
force = true;
}
break;
}
mPlaylistPlaybackMode = aPlaylistPlaybackMode;
calculateOrder(force);
}
/**
* Keeps playlist's entries
*/
protected ArrayList<PlaylistEntry> playlist = null;
/**
* Keeps record of currently selected track
*/
protected int selected = -1;
public Playlist() {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "Playlist constructor start");
}
playlist = new ArrayList<PlaylistEntry>();
calculateOrder(true);
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "Playlist constructor stop");
}
}
/**
* Add single track to the playlist
*
* @param track
* <code>Track</code> instance
* @param album
* <code>Album</code> instance
*/
public void addTrack(Track track, Album album) {
PlaylistEntry playlistEntry = new PlaylistEntry();
playlistEntry.setAlbum(album);
playlistEntry.setTrack(track);
playlist.add(playlistEntry);
mPlayOrder.add(size() - 1);
}
/**
* Add multiple tracks from one album to the playlist
*
* @param album
* <code>Album</code> instance with loaded tracks
*/
public void addTracks(Album album) {
for (Track track : album.getTracks()) {
addTrack(track, album);
}
}
/**
* Checks if the playlist is empty
*
* @return boolean value
*/
public boolean isEmpty() {
return playlist.size() == 0;
}
/**
* Selects next song from the playlist
*/
public void selectNext() {
if (!isEmpty()) {
selected++;
selected %= playlist.size();
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d("TAG", "Current (next) selected = " + selected);
}
}
}
/**
* Selects previous song from the playlist
*/
public void selectPrev() {
if (!isEmpty()) {
selected--;
if (selected < 0)
selected = playlist.size() - 1;
}
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d("TAG", "Current (prev) selected = " + selected);
}
}
/**
* Select song with a given index
*
* @param index
*/
public void select(int index) {
if (!isEmpty()) {
if (index >= 0 && index < playlist.size())
selected = mPlayOrder.indexOf(index);
}
}
public void selectOrAdd(Track track, Album album) {
// first search thru available tracks
for (int i = 0; i < playlist.size(); i++) {
if (playlist.get(i).getTrack().getId() == track.getId()) {
select(i);
return;
}
}
// add track if necessary
addTrack(track, album);
select(playlist.size() - 1);
}
/**
* Return index of the currently selected song
*
* @return int value (-1 if the playlist is empty)
*/
public int getSelectedIndex() {
if (isEmpty()) {
selected = -1;
}
if (selected == -1 && !isEmpty()) {
selected = 0;
}
return selected;
}
/**
* Return currently selected song
*
* @return <code>PlaylistEntry</code> instance
*/
public PlaylistEntry getSelectedTrack() {
PlaylistEntry playlistEntry = null;
int index = getSelectedIndex();
if (index == -1) {
return null;
}
index = mPlayOrder.get(index);
if (index == -1) {
return null;
}
playlistEntry = playlist.get(index);
return playlistEntry;
}
/**
* Adds PlaylistEntry object to the playlist
*
* @param playlistEntry
*/
public void addPlaylistEntry(PlaylistEntry playlistEntry) {
if (playlistEntry != null) {
playlist.add(playlistEntry);
mPlayOrder.add(size() - 1);
}
}
/**
* Count of playlist entries
*
* @return
*/
public int size() {
return playlist == null ? 0 : playlist.size();
}
/**
* Given track index getter
*
* @param index
* @return
*/
public PlaylistEntry getTrack(int index) {
return playlist.get(index);
}
/**
* Give all entrys in playlist
*
* @return
*/
public PlaylistEntry[] getAllTracks() {
PlaylistEntry[] out = new PlaylistEntry[playlist.size()];
playlist.toArray(out);
return out;
}
/**
* Remove a track with a given index from the playlist
*
* @param position
*/
public void remove(int position) {
if (playlist != null && position < playlist.size() && position >= 0) {
if (selected >= position) {
selected--;
}
playlist.remove(position);
mPlayOrder.remove(position);
}
}
/**
* Change order playback list when it is needed
*
* @param force
*/
private void calculateOrder(boolean force) {
if (mPlayOrder.isEmpty() || force) {
int oldSelected = 0;
if (!mPlayOrder.isEmpty()) {
oldSelected = mPlayOrder.get(selected);
mPlayOrder.clear();
}
for (int i = 0; i < size(); i++) {
mPlayOrder.add(i, i);
}
if (mPlaylistPlaybackMode == null) {
mPlaylistPlaybackMode = PlaylistPlaybackMode.NORMAL;
}
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "Playlist has been maped in "
+ mPlaylistPlaybackMode + " mode.");
}
switch (mPlaylistPlaybackMode) {
case NORMAL:
case REPEAT:
selected = oldSelected;
break;
case SHUFFLE:
case SHUFFLE_AND_REPEAT:
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "Before shuffle: "
+ Arrays.toString(mPlayOrder.toArray()));
}
Collections.shuffle(mPlayOrder);
selected = mPlayOrder.indexOf(selected);
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "After shuffle: "
+ Arrays.toString(mPlayOrder.toArray()));
}
break;
}
}
}
/**
* Inform weather it is last track on playlist
*
* @return
*/
public boolean isLastTrackOnList() {
if (selected == size() - 1)
return true;
else
return false;
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
//This method is used when playlist is deserializable form DB
in.defaultReadObject();
if(mPlayOrder == null){
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "mPlayOrder is NULL");
}
mPlayOrder = new ArrayList<Integer>();
calculateOrder(true);
}
}
}
| |
package org.activiti.engine.impl.history;
import java.util.Date;
import java.util.Map;
import org.activiti.engine.impl.db.DbSqlSession;
import org.activiti.engine.impl.interceptor.Session;
import org.activiti.engine.impl.persistence.entity.ExecutionEntity;
import org.activiti.engine.impl.persistence.entity.HistoricActivityInstanceEntity;
import org.activiti.engine.impl.persistence.entity.IdentityLinkEntity;
import org.activiti.engine.impl.persistence.entity.TaskEntity;
import org.activiti.engine.impl.persistence.entity.VariableInstanceEntity;
import org.activiti.engine.impl.pvm.runtime.InterpretableExecution;
import org.flowable.common.engine.impl.history.HistoryLevel;
import org.flowable.identitylink.api.IdentityLink;
public interface HistoryManager extends Session {
/**
* @return true, if the configured history-level is equal to OR set to a higher value than the given level.
*/
public abstract boolean isHistoryLevelAtLeast(HistoryLevel level);
/**
* @return true, if history-level is configured to level other than "none".
*/
public abstract boolean isHistoryEnabled();
/**
* Record a process-instance ended. Updates the historic process instance if activity history is enabled.
*/
public abstract void recordProcessInstanceEnd(String processInstanceId,
String deleteReason, String activityId);
/**
* Record a process-instance started and record start-event if activity history is enabled.
*/
public abstract void recordProcessInstanceStart(
ExecutionEntity processInstance);
/**
* Record a process-instance name change.
*/
public abstract void recordProcessInstanceNameChange(
String processInstanceId, String newName);
/**
* Record a sub-process-instance started and alters the calledProcessinstanceId on the current active activity's historic counterpart. Only effective when activity history is enabled.
*/
public abstract void recordSubProcessInstanceStart(
ExecutionEntity parentExecution, ExecutionEntity subProcessInstance);
/**
* Record the start of an activity, if activity history is enabled.
*/
public abstract void recordActivityStart(ExecutionEntity executionEntity);
/**
* Record the end of an activity, if activity history is enabled.
*/
public abstract void recordActivityEnd(ExecutionEntity executionEntity);
/**
* Record the end of a start-task, if activity history is enabled.
*/
public abstract void recordStartEventEnded(ExecutionEntity execution, String activityId);
/**
* Finds the {@link HistoricActivityInstanceEntity} that is active in the given execution. Uses the {@link DbSqlSession} cache to make sure the right instance is returned, regardless of whether or
* not entities have already been flushed to DB.
*/
public abstract HistoricActivityInstanceEntity findActivityInstance(
ExecutionEntity execution);
/**
* Replaces any open historic activityInstances' execution-id's to the id of the replaced execution, if activity history is enabled.
*/
public abstract void recordExecutionReplacedBy(ExecutionEntity execution,
InterpretableExecution replacedBy);
/**
* Record a change of the process-definition id of a process instance, if activity history is enabled.
*/
public abstract void recordProcessDefinitionChange(
String processInstanceId, String processDefinitionId);
/**
* Record the creation of a task, if audit history is enabled.
*/
public abstract void recordTaskCreated(TaskEntity task,
ExecutionEntity execution);
/**
* Record the assignment of task, if activity history is enabled.
*/
public abstract void recordTaskAssignment(TaskEntity task);
/**
* record task instance claim time, if audit history is enabled
*
* @param taskId
*/
public abstract void recordTaskClaim(String taskId);
/**
* Record the id of a the task associated with a historic activity, if activity history is enabled.
*/
public abstract void recordTaskId(TaskEntity task);
/**
* Record task as ended, if audit history is enabled.
*/
public abstract void recordTaskEnd(String taskId, String deleteReason);
/**
* Record task assignee change, if audit history is enabled.
*/
public abstract void recordTaskAssigneeChange(String taskId, String assignee);
/**
* Record task owner change, if audit history is enabled.
*/
public abstract void recordTaskOwnerChange(String taskId, String owner);
/**
* Record task name change, if audit history is enabled.
*/
public abstract void recordTaskNameChange(String taskId, String taskName);
/**
* Record task description change, if audit history is enabled.
*/
public abstract void recordTaskDescriptionChange(String taskId,
String description);
/**
* Record task due date change, if audit history is enabled.
*/
public abstract void recordTaskDueDateChange(String taskId, Date dueDate);
/**
* Record task priority change, if audit history is enabled.
*/
public abstract void recordTaskPriorityChange(String taskId, int priority);
/**
* Record task category change, if audit history is enabled.
*/
public abstract void recordTaskCategoryChange(String taskId, String category);
/**
* Record task form key change, if audit history is enabled.
*/
public abstract void recordTaskFormKeyChange(String taskId, String formKey);
/**
* Record task parent task id change, if audit history is enabled.
*/
public abstract void recordTaskParentTaskIdChange(String taskId, String parentTaskId);
/**
* Record task execution id change, if audit history is enabled.
*/
public abstract void recordTaskExecutionIdChange(String taskId, String executionId);
/**
* Record task definition key change, if audit history is enabled.
*/
public abstract void recordTaskDefinitionKeyChange(TaskEntity task, String taskDefinitionKey);
/**
* Record a change of the process-definition id of a task instance, if activity history is enabled.
*/
public abstract void recordTaskProcessDefinitionChange(String taskId, String processDefinitionId);
/**
* Record a variable has been created, if audit history is enabled.
*/
public abstract void recordVariableCreate(VariableInstanceEntity variable);
/**
* Record a variable has been created, if audit history is enabled.
*/
public abstract void recordHistoricDetailVariableCreate(
VariableInstanceEntity variable,
ExecutionEntity sourceActivityExecution, boolean useActivityId);
/**
* Record a variable has been updated, if audit history is enabled.
*/
public abstract void recordVariableUpdate(VariableInstanceEntity variable);
/**
* Record a variable has been deleted, if audit history is enabled.
*/
public abstract void recordVariableRemoved(VariableInstanceEntity variable);
/**
* Creates a new comment to indicate a new {@link IdentityLink} has been created or deleted, if history is enabled.
*/
public abstract void createIdentityLinkComment(String taskId,
String userId, String groupId, String type, boolean create);
/**
* Creates a new comment to indicate a new user {@link IdentityLink} has been created or deleted, if history is enabled.
*/
public abstract void createUserIdentityLinkComment(String taskId, String userId, String type, boolean create);
/**
* Creates a new comment to indicate a new group {@link IdentityLink} has been created or deleted, if history is enabled.
*/
public abstract void createGroupIdentityLinkComment(String taskId, String groupId, String type, boolean create);
/**
* Creates a new comment to indicate a new {@link IdentityLink} has been created or deleted, if history is enabled.
*/
public abstract void createIdentityLinkComment(String taskId, String userId, String groupId, String type, boolean create, boolean forceNullUserId);
/**
* Creates a new comment to indicate a new user {@link IdentityLink} has been created or deleted, if history is enabled.
*/
public abstract void createUserIdentityLinkComment(String taskId, String userId, String type, boolean create, boolean forceNullUserId);
/**
* Creates a new comment to indicate a new {@link IdentityLink} has been created or deleted, if history is enabled.
*/
public abstract void createProcessInstanceIdentityLinkComment(String processInstanceId,
String userId, String groupId, String type, boolean create);
/**
* Creates a new comment to indicate a new {@link IdentityLink} has been created or deleted, if history is enabled.
*/
public abstract void createProcessInstanceIdentityLinkComment(String processInstanceId,
String userId, String groupId, String type, boolean create,
boolean forceNullUserId);
/**
* Creates a new comment to indicate a new attachment has been created or deleted, if history is enabled.
*/
public abstract void createAttachmentComment(String taskId,
String processInstanceId, String attachmentName, boolean create);
/**
* Report form properties submitted, if audit history is enabled.
*/
public abstract void reportFormPropertiesSubmitted(
ExecutionEntity processInstance, Map<String, String> properties,
String taskId);
// Identity link related history
/**
* Record the creation of a new {@link IdentityLink}, if audit history is enabled.
*/
public abstract void recordIdentityLinkCreated(
IdentityLinkEntity identityLink);
public abstract void deleteHistoricIdentityLink(String id);
public abstract void updateProcessBusinessKeyInHistory(
ExecutionEntity processInstance);
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.slicer;
import com.intellij.icons.AllIcons;
import com.intellij.ide.CommonActionsManager;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.actions.RefreshAction;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Splitter;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowAnchor;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.openapi.wm.ex.ToolWindowManagerListener;
import com.intellij.pom.Navigatable;
import com.intellij.ui.*;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.usageView.UsageInfo;
import com.intellij.usageView.UsageViewBundle;
import com.intellij.usages.Usage;
import com.intellij.usages.UsageViewPresentation;
import com.intellij.usages.UsageViewSettings;
import com.intellij.usages.impl.UsagePreviewPanel;
import com.intellij.util.EditSourceOnDoubleClickHandler;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.tree.TreeUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import javax.swing.event.TreeExpansionEvent;
import javax.swing.event.TreeWillExpandListener;
import javax.swing.tree.*;
import java.awt.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.util.List;
import java.util.*;
/**
* @author cdr
*/
public abstract class SlicePanel extends JPanel implements TypeSafeDataProvider, Disposable {
private final SliceTreeBuilder myBuilder;
private final JTree myTree;
private final AutoScrollToSourceHandler myAutoScrollToSourceHandler = new AutoScrollToSourceHandler() {
@Override
protected boolean isAutoScrollMode() {
return isAutoScroll();
}
@Override
protected void setAutoScrollMode(final boolean state) {
setAutoScroll(state);
}
};
private UsagePreviewPanel myUsagePreviewPanel;
private final Project myProject;
private boolean isDisposed;
private final ToolWindow myToolWindow;
private final SliceLanguageSupportProvider myProvider;
protected SlicePanel(@NotNull final Project project,
boolean dataFlowToThis,
@NotNull SliceNode rootNode,
boolean splitByLeafExpressions,
@NotNull final ToolWindow toolWindow) {
super(new BorderLayout());
myProvider = rootNode.getProvider();
myToolWindow = toolWindow;
ApplicationManager.getApplication().assertIsDispatchThread();
myProject = project;
myProject.getMessageBus().connect(this).subscribe(ToolWindowManagerListener.TOPIC, new ToolWindowManagerListener() {
ToolWindowAnchor myAnchor = toolWindow.getAnchor();
@Override
public void stateChanged(@NotNull ToolWindowManager toolWindowManager) {
if (!project.isOpen()) {
return;
}
if (toolWindow.getAnchor() != myAnchor) {
myAnchor = myToolWindow.getAnchor();
layoutPanel();
}
}
});
myTree = createTree();
myBuilder = new SliceTreeBuilder(myTree, project, dataFlowToThis, rootNode, splitByLeafExpressions);
myBuilder.setCanYieldUpdate(!ApplicationManager.getApplication().isUnitTestMode());
Disposer.register(this, myBuilder);
myBuilder.addSubtreeToUpdate((DefaultMutableTreeNode)myTree.getModel().getRoot(), () -> {
if (isDisposed || myBuilder.isDisposed() || myProject.isDisposed()) return;
final SliceNode rootNode1 = myBuilder.getRootSliceNode();
myBuilder.expand(rootNode1, new Runnable() {
@Override
public void run() {
if (isDisposed || myBuilder.isDisposed() || myProject.isDisposed()) return;
myBuilder.select(rootNode1.myCachedChildren.get(0)); //first there is ony one child
}
});
treeSelectionChanged();
});
layoutPanel();
}
private void layoutPanel() {
if (myUsagePreviewPanel != null) {
Disposer.dispose(myUsagePreviewPanel);
}
removeAll();
JScrollPane pane = ScrollPaneFactory.createScrollPane(myTree);
if (isPreview()) {
pane.setBorder(IdeBorderFactory.createBorder(SideBorder.LEFT | SideBorder.RIGHT));
boolean vertical = myToolWindow.getAnchor() == ToolWindowAnchor.LEFT || myToolWindow.getAnchor() == ToolWindowAnchor.RIGHT;
Splitter splitter = new Splitter(vertical, UsageViewSettings.getInstance().getPreviewUsagesSplitterProportion());
splitter.setFirstComponent(pane);
myUsagePreviewPanel = new UsagePreviewPanel(myProject, new UsageViewPresentation());
myUsagePreviewPanel.setBorder(IdeBorderFactory.createBorder(SideBorder.LEFT));
Disposer.register(this, myUsagePreviewPanel);
splitter.setSecondComponent(myUsagePreviewPanel);
add(splitter, BorderLayout.CENTER);
}
else {
pane.setBorder(IdeBorderFactory.createBorder(SideBorder.LEFT));
add(pane, BorderLayout.CENTER);
}
add(createToolbar().getComponent(), BorderLayout.WEST);
myTree.getParent().setBackground(UIUtil.getTreeBackground());
revalidate();
}
@Override
public void dispose() {
if (myUsagePreviewPanel != null) {
UsageViewSettings.getInstance().setPreviewUsagesSplitterProportion(((Splitter)myUsagePreviewPanel.getParent()).getProportion());
myUsagePreviewPanel = null;
}
isDisposed = true;
ToolTipManager.sharedInstance().unregisterComponent(myTree);
}
static class MultiLanguageTreeCellRenderer implements TreeCellRenderer {
@NotNull
private final SliceUsageCellRendererBase rootRenderer;
@NotNull
private final Map<SliceLanguageSupportProvider, SliceUsageCellRendererBase> providersToRenderers = new HashMap<>();
MultiLanguageTreeCellRenderer(@NotNull SliceUsageCellRendererBase rootRenderer) {
this.rootRenderer = rootRenderer;
rootRenderer.setOpaque(false);
}
@NotNull
private SliceUsageCellRendererBase getRenderer(Object value) {
if (!(value instanceof DefaultMutableTreeNode)) return rootRenderer;
Object userObject = ((DefaultMutableTreeNode)value).getUserObject();
if (!(userObject instanceof SliceNode)) return rootRenderer;
SliceLanguageSupportProvider provider = ((SliceNode)userObject).getProvider();
if (provider == null) return rootRenderer;
SliceUsageCellRendererBase renderer = providersToRenderers.get(provider);
if (renderer == null) {
renderer = provider.getRenderer();
renderer.setOpaque(false);
providersToRenderers.put(provider, renderer);
}
return renderer;
}
@Override
public Component getTreeCellRendererComponent(JTree tree,
Object value,
boolean selected,
boolean expanded,
boolean leaf,
int row,
boolean hasFocus) {
return getRenderer(value).getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus);
}
}
@NotNull
private JTree createTree() {
DefaultMutableTreeNode root = new DefaultMutableTreeNode();
final Tree tree = new Tree(new DefaultTreeModel(root))/* {
@Override
protected void paintComponent(Graphics g) {
DuplicateNodeRenderer.paintDuplicateNodesBackground(g, this);
super.paintComponent(g);
}
}*/;
tree.setOpaque(false);
tree.setToggleClickCount(-1);
tree.setCellRenderer(new MultiLanguageTreeCellRenderer(myProvider.getRenderer()));
tree.setRootVisible(false);
tree.setShowsRootHandles(true);
tree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
tree.setSelectionPath(new TreePath(root.getPath()));
//ActionGroup group = (ActionGroup)ActionManager.getInstance().getAction(IdeActions.GROUP_METHOD_HIERARCHY_POPUP);
//PopupHandler.installPopupHandler(tree, group, ActionPlaces.METHOD_HIERARCHY_VIEW_POPUP, ActionManager.getInstance());
EditSourceOnDoubleClickHandler.install(tree);
new TreeSpeedSearch(tree);
TreeUtil.installActions(tree);
ToolTipManager.sharedInstance().registerComponent(tree);
myAutoScrollToSourceHandler.install(tree);
tree.getSelectionModel().addTreeSelectionListener(e -> treeSelectionChanged());
tree.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (KeyEvent.VK_ENTER == e.getKeyCode()) {
List<Navigatable> navigatables = getNavigatables();
if (navigatables.isEmpty()) return;
for (Navigatable navigatable : navigatables) {
if (navigatable instanceof AbstractTreeNode && ((AbstractTreeNode)navigatable).getValue() instanceof Usage) {
navigatable = (Usage)((AbstractTreeNode)navigatable).getValue();
}
if (navigatable.canNavigateToSource()) {
navigatable.navigate(false);
if (navigatable instanceof Usage) {
((Usage)navigatable).highlightInEditor();
}
}
}
e.consume();
}
}
});
tree.addTreeWillExpandListener(new TreeWillExpandListener() {
@Override
public void treeWillCollapse(TreeExpansionEvent event) {
}
@Override
public void treeWillExpand(TreeExpansionEvent event) {
TreePath path = event.getPath();
SliceNode node = fromPath(path);
node.calculateDupNode();
}
});
return tree;
}
private void treeSelectionChanged() {
SwingUtilities.invokeLater(() -> {
if (isDisposed) return;
List<UsageInfo> infos = getSelectedUsageInfos();
if (infos != null && myUsagePreviewPanel != null) {
myUsagePreviewPanel.updateLayout(infos);
}
});
}
private static SliceNode fromPath(TreePath path) {
Object lastPathComponent = path.getLastPathComponent();
if (lastPathComponent instanceof DefaultMutableTreeNode) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode)lastPathComponent;
Object userObject = node.getUserObject();
if (userObject instanceof SliceNode) {
return (SliceNode)userObject;
}
}
return null;
}
@Nullable
private List<UsageInfo> getSelectedUsageInfos() {
TreePath[] paths = myTree.getSelectionPaths();
if (paths == null) return null;
final ArrayList<UsageInfo> result = new ArrayList<>();
for (TreePath path : paths) {
SliceNode sliceNode = fromPath(path);
if (sliceNode != null) {
result.add(sliceNode.getValue().getUsageInfo());
}
}
if (result.isEmpty()) return null;
return result;
}
@Override
public void calcData(@NotNull DataKey key, @NotNull DataSink sink) {
if (key == CommonDataKeys.NAVIGATABLE_ARRAY) {
List<Navigatable> navigatables = getNavigatables();
if (!navigatables.isEmpty()) {
sink.put(CommonDataKeys.NAVIGATABLE_ARRAY, navigatables.toArray(new Navigatable[0]));
}
}
}
@NotNull
private List<Navigatable> getNavigatables() {
TreePath[] paths = myTree.getSelectionPaths();
if (paths == null) return Collections.emptyList();
final ArrayList<Navigatable> navigatables = new ArrayList<>();
for (TreePath path : paths) {
Object lastPathComponent = path.getLastPathComponent();
if (lastPathComponent instanceof DefaultMutableTreeNode) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode)lastPathComponent;
Object userObject = node.getUserObject();
if (userObject instanceof Navigatable) {
navigatables.add((Navigatable)userObject);
}
else if (node instanceof Navigatable) {
navigatables.add((Navigatable)node);
}
}
}
return navigatables;
}
@NotNull
private ActionToolbar createToolbar() {
final DefaultActionGroup actionGroup = new DefaultActionGroup();
actionGroup.add(new MyRefreshAction(myTree));
if (isToShowAutoScrollButton()) {
actionGroup.add(myAutoScrollToSourceHandler.createToggleAction());
}
if (isToShowPreviewButton()) {
actionGroup.add(new ToggleAction(UsageViewBundle.message("preview.usages.action.text", "usages"), "preview", AllIcons.Actions.PreviewDetails) {
@Override
public boolean isSelected(@NotNull AnActionEvent e) {
return isPreview();
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
setPreview(state);
layoutPanel();
}
});
}
myProvider.registerExtraPanelActions(actionGroup, myBuilder);
actionGroup.add(CommonActionsManager.getInstance().createExportToTextFileAction(new SliceToTextFileExporter(myBuilder, UsageViewSettings.getInstance())));
//actionGroup.add(new ContextHelpAction(HELP_ID));
return ActionManager.getInstance().createActionToolbar(ActionPlaces.TYPE_HIERARCHY_VIEW_TOOLBAR, actionGroup, false);
}
public boolean isToShowAutoScrollButton() {return true;}
public abstract boolean isAutoScroll();
public abstract void setAutoScroll(boolean autoScroll);
public boolean isToShowPreviewButton() {return true;}
public abstract boolean isPreview();
public abstract void setPreview(boolean preview);
protected void close() {
final ProgressIndicator progress = myBuilder.getUi().getProgress();
if (progress != null) {
progress.cancel();
}
}
private final class MyRefreshAction extends RefreshAction {
private MyRefreshAction(JComponent tree) {
super(IdeBundle.message("action.refresh"), IdeBundle.message("action.refresh"), AllIcons.Actions.Refresh);
registerShortcutOn(tree);
}
@Override
public final void actionPerformed(@NotNull final AnActionEvent e) {
SliceNode rootNode = (SliceNode)myBuilder.getRootNode().getUserObject();
rootNode.setChanged();
myBuilder.addSubtreeToUpdate(myBuilder.getRootNode());
}
@Override
public final void update(@NotNull final AnActionEvent event) {
final Presentation presentation = event.getPresentation();
presentation.setEnabled(true);
}
}
@TestOnly
public SliceTreeBuilder getBuilder() {
return myBuilder;
}
}
| |
/*
* $Id$
*
* SARL is an general-purpose agent programming language.
* More details on http://www.sarl.io
*
* Copyright (C) 2014-2021 the original authors or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.sarl.lang.scoping.extensions.numbers.arithmetic;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.eclipse.xtext.xbase.lib.Inline;
import org.eclipse.xtext.xbase.lib.Pure;
/** Provide static operators for numbers of type {@code long}.
*
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
* @since 0.7
* @see "https://github.com/eclipse/xtext-extras/issues/186"
*/
@SuppressWarnings("checkstyle:methodcount")
public final class PrimitiveLongArithmeticExtensions {
private PrimitiveLongArithmeticExtensions() {
//
}
/** The binary {@code minus} operator. This is the equivalent to
* the Java {@code -} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left-right}
*/
@Pure
@Inline(value = "($1 - $2.doubleValue())", constantExpression = true)
public static double operator_minus(long left, Number right) {
return left - right.doubleValue();
}
/** The binary {@code minus} operator. This is the equivalent to
* the Java {@code -} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left-right}
*/
@Pure
@Inline(value = "($1 - $2.longValue())", constantExpression = true)
public static long operator_minus(long left, Long right) {
return left - right.longValue();
}
/** The binary {@code minus} operator. This is the equivalent to
* the Java {@code -} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left-right}
*/
@Pure
@Inline(value = "($1 - $2.byteValue())", constantExpression = true)
public static long operator_minus(long left, Byte right) {
return left - right.byteValue();
}
/** The binary {@code minus} operator. This is the equivalent to
* the Java {@code -} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left-right}
*/
@Pure
@Inline(value = "($1 - $2.floatValue())", constantExpression = true)
public static float operator_minus(long left, Float right) {
return left - right.floatValue();
}
/** The binary {@code minus} operator. This is the equivalent to
* the Java {@code -} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left-right}
*/
@Pure
@Inline(value = "($1 - $2.intValue())", constantExpression = true)
public static long operator_minus(long left, Integer right) {
return left - right.intValue();
}
/** The binary {@code minus} operator. This is the equivalent to
* the Java {@code -} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left-right}
*/
@Pure
@Inline(value = "($1 - $2.shortValue())", constantExpression = true)
public static long operator_minus(long left, Short right) {
return left - right.shortValue();
}
/** The binary {@code minus} operator. This is the equivalent to
* the Java {@code -} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left-right}
*/
@Pure
@Inline(value = "($1 - $2.intValue())", constantExpression = true)
public static long operator_minus(long left, AtomicInteger right) {
return left - right.intValue();
}
/** The binary {@code minus} operator. This is the equivalent to
* the Java {@code -} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left-right}
*/
@Pure
@Inline(value = "($1 - $2.longValue())", constantExpression = true)
public static long operator_minus(long left, AtomicLong right) {
return left - right.longValue();
}
/** The binary {@code plus} operator. This is the equivalent to
* the Java {@code +} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left+right}
*/
@Pure
@Inline(value = "($1 + $2.longValue())", constantExpression = true)
public static long operator_plus(long left, Long right) {
return left + right.longValue();
}
/** The binary {@code plus} operator. This is the equivalent to
* the Java {@code +} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left+right}
*/
@Pure
@Inline(value = "($1 + $2.byteValue())", constantExpression = true)
public static long operator_plus(long left, Byte right) {
return left + right.byteValue();
}
/** The binary {@code plus} operator. This is the equivalent to
* the Java {@code +} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left+right}
*/
@Pure
@Inline(value = "($1 + $2.floatValue())", constantExpression = true)
public static float operator_plus(long left, Float right) {
return left + right.floatValue();
}
/** The binary {@code plus} operator. This is the equivalent to
* the Java {@code +} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left+right}
*/
@Pure
@Inline(value = "($1 + $2.intValue())", constantExpression = true)
public static long operator_plus(long left, Integer right) {
return left + right.intValue();
}
/** The binary {@code plus} operator. This is the equivalent to
* the Java {@code +} operator.
*
* @param left a number.
* @param right a number.
* @return {@code left+right}
*/
@Pure
@Inline(value = "($1 + $2.shortValue())", constantExpression = true)
public static long operator_plus(long left, Short right) {
return left + right.shortValue();
}
/** The binary {@code plus} operator. This is the equivalent to
* the Java {@code +} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left+right}
*/
@Pure
@Inline(value = "($1 + $2.intValue())", constantExpression = true)
public static long operator_plus(long left, AtomicInteger right) {
return left + right.intValue();
}
/** The binary {@code plus} operator. This is the equivalent to
* the Java {@code +} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left+right}
*/
@Pure
@Inline(value = "($1 + $2.longValue())", constantExpression = true)
public static long operator_plus(long left, AtomicLong right) {
return left + right.longValue();
}
/** The binary {@code plus} operator. This is the equivalent to
* the Java {@code +} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left+right}
*/
@Pure
@Inline(value = "($1 + $2.doubleValue())", constantExpression = true)
public static double operator_plus(long left, Number right) {
return left + right.doubleValue();
}
/** The binary {@code power} operator. This is the equivalent to
* the Java's {@code Math.pow()} function. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code Math::pow(left, right)}
*/
@Pure
@Inline(value = "$3.pow($1, $2.doubleValue())", imported = Math.class)
public static double operator_power(long left, Number right) {
return Math.pow(left, right.doubleValue());
}
/** The binary {@code divide} operator. This is the equivalent to
* the Java {@code /} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left/right}
*/
@Pure
@Inline(value = "($1 / $2.longValue())", constantExpression = true)
public static long operator_divide(long left, Long right) {
return left / right.longValue();
}
/** The binary {@code divide} operator. This is the equivalent to
* the Java {@code /} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left/right}
*/
@Pure
@Inline(value = "($1 / $2.byteValue())", constantExpression = true)
public static long operator_divide(long left, Byte right) {
return left / right.byteValue();
}
/** The binary {@code divide} operator. This is the equivalent to
* the Java {@code /} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left/right}
*/
@Pure
@Inline(value = "($1 / $2.floatValue())", constantExpression = true)
public static float operator_divide(long left, Float right) {
return left / right.floatValue();
}
/** The binary {@code divide} operator. This is the equivalent to
* the Java {@code /} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left/right}
*/
@Pure
@Inline(value = "($1 / $2.intValue())", constantExpression = true)
public static long operator_divide(long left, Integer right) {
return left / right.intValue();
}
/** The binary {@code divide} operator. This is the equivalent to
* the Java {@code /} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left/right}
*/
@Pure
@Inline(value = "($1 / $2.doubleValue())", constantExpression = true)
public static double operator_divide(long left, Number right) {
return left / right.doubleValue();
}
/** The binary {@code divide} operator. This is the equivalent to
* the Java {@code /} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left/right}
*/
@Pure
@Inline(value = "($1 / $2.shortValue())", constantExpression = true)
public static long operator_divide(long left, Short right) {
return left / right.shortValue();
}
/** The binary {@code divide} operator. This is the equivalent to
* the Java {@code /} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left/right}
*/
@Pure
@Inline(value = "($1 / $2.intValue())", constantExpression = true)
public static long operator_divide(long left, AtomicInteger right) {
return left / right.intValue();
}
/** The binary {@code divide} operator. This is the equivalent to
* the Java {@code /} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left/right}
*/
@Pure
@Inline(value = "($1 / $2.longValue())", constantExpression = true)
public static long operator_divide(long left, AtomicLong right) {
return left / right.longValue();
}
/** The binary {@code multiply} operator. This is the equivalent to
* the Java {@code *} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left*right}
*/
@Pure
@Inline(value = "($1 * $2.longValue())", constantExpression = true)
public static long operator_multiply(long left, Long right) {
return left * right.longValue();
}
/** The binary {@code multiply} operator. This is the equivalent to
* the Java {@code *} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left*right}
*/
@Pure
@Inline(value = "($1 * $2.byteValue())", constantExpression = true)
public static long operator_multiply(long left, Byte right) {
return left * right.byteValue();
}
/** The binary {@code multiply} operator. This is the equivalent to
* the Java {@code *} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left*right}
*/
@Pure
@Inline(value = "($1 * $2.floatValue())", constantExpression = true)
public static float operator_multiply(long left, Float right) {
return left * right.floatValue();
}
/** The binary {@code multiply} operator. This is the equivalent to
* the Java {@code *} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left*right}
*/
@Pure
@Inline(value = "($1 * $2.intValue())", constantExpression = true)
public static long operator_multiply(long left, Integer right) {
return left * right.intValue();
}
/** The binary {@code multiply} operator. This is the equivalent to
* the Java {@code *} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left*right}
*/
@Pure
@Inline(value = "($1 * $2.doubleValue())", constantExpression = true)
public static double operator_multiply(long left, Number right) {
return left * right.doubleValue();
}
/** The binary {@code multiply} operator. This is the equivalent to
* the Java {@code *} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left*right}
*/
@Pure
@Inline(value = "($1 * $2.shortValue())", constantExpression = true)
public static long operator_multiply(long left, Short right) {
return left * right.shortValue();
}
/** The binary {@code multiply} operator. This is the equivalent to
* the Java {@code *} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left*right}
*/
@Pure
@Inline(value = "($1 * $2.intValue())", constantExpression = true)
public static long operator_multiply(long left, AtomicInteger right) {
return left * right.intValue();
}
/** The binary {@code multiply} operator. This is the equivalent to
* the Java {@code *} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left*right}
*/
@Pure
@Inline(value = "($1 * $2.longValue())", constantExpression = true)
public static long operator_multiply(long left, AtomicLong right) {
return left * right.longValue();
}
/** The binary {@code modulo} operator. This is the equivalent to
* the Java {@code %} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left%right}
*/
@Pure
@Inline(value = "($1 % $2.longValue())", constantExpression = true)
public static long operator_modulo(long left, Long right) {
return left % right.longValue();
}
/** The binary {@code modulo} operator. This is the equivalent to
* the Java {@code %} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left%right}
*/
@Pure
@Inline(value = "($1 % $2.byteValue())", constantExpression = true)
public static long operator_modulo(long left, Byte right) {
return left % right.byteValue();
}
/** The binary {@code modulo} operator. This is the equivalent to
* the Java {@code %} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left%right}
*/
@Pure
@Inline(value = "($1 % $2.floatValue())", constantExpression = true)
public static float operator_modulo(long left, Float right) {
return left % right.floatValue();
}
/** The binary {@code modulo} operator. This is the equivalent to
* the Java {@code %} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left%right}
*/
@Pure
@Inline(value = "($1 % $2.intValue())", constantExpression = true)
public static long operator_modulo(long left, Integer right) {
return left % right.intValue();
}
/** The binary {@code modulo} operator. This is the equivalent to
* the Java {@code %} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left%right}
*/
@Pure
@Inline(value = "($1 % $2.doubleValue())", constantExpression = true)
public static double operator_modulo(long left, Number right) {
return left % right.doubleValue();
}
/** The binary {@code modulo} operator. This is the equivalent to
* the Java {@code %} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left%right}
*/
@Pure
@Inline(value = "($1 % $2.shortValue())", constantExpression = true)
public static long operator_modulo(long left, Short right) {
return left % right.shortValue();
}
/** The binary {@code modulo} operator. This is the equivalent to
* the Java {@code %} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left%right}
*/
@Pure
@Inline(value = "($1 % $2.intValue())", constantExpression = true)
public static long operator_modulo(long left, AtomicInteger right) {
return left % right.intValue();
}
/** The binary {@code modulo} operator. This is the equivalent to
* the Java {@code %} operator. This function is not null-safe.
*
* @param left a number.
* @param right a number.
* @return {@code left%right}
*/
@Pure
@Inline(value = "($1 % $2.longValue())", constantExpression = true)
public static long operator_modulo(long left, AtomicLong right) {
return left % right.longValue();
}
}
| |
/*
* Copyright (c) 2011, the Last.fm Java Project and Committers
* All rights reserved.
*
* Redistribution and use of this software in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package de.umass.lastfm;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import de.umass.util.StringUtilities;
import de.umass.xml.DomElement;
/**
* Bean for Tag data and provides methods for global tags.
*
* @author Janni Kovacs
*/
public class Tag implements Comparable<Tag> {
/**
* Implementation of {@link ItemFactory} for this class
*/
static final ItemFactory<Tag> FACTORY = new TagFactory();
private static final DateFormat DATE_FORMAT = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZZ", Locale.ENGLISH);
private String name;
private String url;
private int count;
private boolean streamable;
private int reach;
private Date wikiLastChanged;
private String wikiSummary;
private String wikiText;
private Tag(String name) {
this.name = name;
}
public int getCount() {
return count;
}
/**
* Returns the number of taggings of this specific tag. Alias for {@link #getCount()}.
*
* @return Number of Taggings
* @see Tag#getInfo(String, String)
*/
public int getTaggings() {
return count;
}
public String getName() {
return name;
}
public String getUrl() {
return url;
}
public boolean isStreamable() {
return streamable;
}
public int getReach() {
return reach;
}
public Date getWikiLastChanged() {
return wikiLastChanged;
}
public String getWikiSummary() {
return wikiSummary;
}
public String getWikiText() {
return wikiText;
}
/**
* Returns the sum of all <code>count</code> elements in the results.
*
* @param tags a list of tags
* @return the total count of all tags
*/
public static long getTagCountSum(Collection<Tag> tags) {
long total = 0;
for (Tag topTag : tags) {
total += topTag.count;
}
return total;
}
/**
* Filters tags from the given list; retains only those tags with a count
* higher than the given percentage of the total sum as from
* {@link #getTagCountSum(Collection)}.
*
* @param tags list of tags
* @param percentage cut off percentage
* @return the filtered list of tags
*/
public static List<Tag> filter(Collection<Tag> tags, double percentage) {
ArrayList<Tag> tops = new ArrayList<Tag>();
long total = getTagCountSum(tags);
double cutOff = total / 100.0 * percentage;
for (Tag tag : tags) {
if (tag.count > cutOff) {
tops.add(tag);
}
}
return tops;
}
/**
* Search for tags similar to this one. Returns tags ranked by similarity, based on listening data.
*
* @param tag The tag name
* @param apiKey A Last.fm API key
* @return a List of <code>Tag</code>s
*/
public static Collection<Tag> getSimilar(String tag, String apiKey) {
Result result = Caller.getInstance().call("tag.getSimilar", apiKey, "tag", tag);
return ResponseBuilder.buildCollection(result, Tag.class);
}
public static Collection<Tag> getTopTags(String apiKey) {
Result result = Caller.getInstance().call("tag.getTopTags", apiKey);
return ResponseBuilder.buildCollection(result, Tag.class);
}
public static Collection<Album> getTopAlbums(String tag, String apiKey) {
Result result = Caller.getInstance().call("tag.getTopAlbums", apiKey, "tag", tag);
return ResponseBuilder.buildCollection(result, Album.class);
}
public static Collection<Track> getTopTracks(String tag, String apiKey) {
Result result = Caller.getInstance().call("tag.getTopTracks", apiKey, "tag", tag);
return ResponseBuilder.buildCollection(result, Track.class);
}
public static Collection<Artist> getTopArtists(String tag, String apiKey) {
Result result = Caller.getInstance().call("tag.getTopArtists", apiKey, "tag", tag);
return ResponseBuilder.buildCollection(result, Artist.class);
}
public static Collection<Tag> search(String tag, String apiKey) {
return search(tag, 30, apiKey);
}
public static Collection<Tag> search(String tag, int limit, String apiKey) {
Result result = Caller.getInstance().call("tag.search", apiKey, "tag", tag, "limit", String.valueOf(limit));
Collection<DomElement> children = result.getContentElement().getChild("tagmatches").getChildren("tag");
List<Tag> tags = new ArrayList<Tag>(children.size());
for (DomElement s : children) {
tags.add(FACTORY.createItemFromElement(s));
}
return tags;
}
public static Chart<Artist> getWeeklyArtistChart(String tag, String apiKey) {
return getWeeklyArtistChart(tag, null, null, -1, apiKey);
}
public static Chart<Artist> getWeeklyArtistChart(String tag, int limit, String apiKey) {
return getWeeklyArtistChart(tag, null, null, limit, apiKey);
}
public static Chart<Artist> getWeeklyArtistChart(String tag, String from, String to, int limit, String apiKey) {
return Chart.getChart("tag.getWeeklyArtistChart", "tag", tag, "artist", from, to, limit, apiKey);
}
public static LinkedHashMap<String, String> getWeeklyChartList(String tag, String apiKey) {
return Chart.getWeeklyChartList("tag.getWeeklyChartList", "tag", tag, apiKey);
}
@SuppressWarnings("rawtypes")
public static Collection<Chart> getWeeklyChartListAsCharts(String tag, String apiKey) {
return Chart.getWeeklyChartListAsCharts("tag", tag, apiKey);
}
/**
* Gets the metadata for a tag.
*
* @param tag The tag name
* @param apiKey A Last.fm API key
* @return Tag metdata such as Wiki Text, reach and tag count
*/
public static Tag getInfo(String tag, String apiKey) {
return getInfo(tag, null, apiKey);
}
/**
* Gets the metadata for a tag.
*
* @param tag The tag name
* @param locale The language to fetch info in, or <code>null</code>
* @param apiKey A Last.fm API key
* @return Tag metdata such as Wiki Text, reach and tag count
*/
public static Tag getInfo(String tag, Locale locale, String apiKey) {
Map<String, String> params = new HashMap<String, String>();
params.put("tag", tag);
if (locale != null && locale.getLanguage().length() != 0) {
params.put("lang", locale.getLanguage());
}
Result result = Caller.getInstance().call("tag.getInfo", apiKey, params);
return ResponseBuilder.buildItem(result, Tag.class);
}
public int compareTo(Tag o) {
// descending order
return Double.compare(o.getCount(), this.getCount());
}
/**
* This implementation of {@link ItemFactory} creates {@link Tag} objects based on the passed xml element.
*
* @see Tag
* @see Tag#FACTORY
*/
private static class TagFactory implements ItemFactory<Tag> {
public Tag createItemFromElement(DomElement element) {
Tag t = new Tag(element.getChildText("name"));
t.url = element.getChildText("url");
if (element.hasChild("count"))
t.count = Integer.parseInt(element.getChildText("count"));
else if (element.hasChild("taggings"))
t.count = Integer.parseInt(element.getChildText("taggings"));
if (element.hasChild("reach"))
t.reach = Integer.parseInt(element.getChildText("reach"));
if (element.hasChild("streamable"))
t.streamable = StringUtilities.convertToBoolean(element.getChildText("streamable"));
// wiki
DomElement wiki = element.getChild("wiki");
if (wiki != null) {
String publishedText = wiki.getChildText("published");
try {
t.wikiLastChanged = DATE_FORMAT.parse(publishedText);
} catch (ParseException e) {
// try parsing it with current locale
try {
DateFormat clFormat = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss ZZZZ", Locale.getDefault());
t.wikiLastChanged = clFormat.parse(publishedText);
} catch (ParseException e2) {
// cannot parse date, wrong locale. wait for last.fm to fix.
}
}
t.wikiSummary = wiki.getChildText("summary");
t.wikiText = wiki.getChildText("content");
}
return t;
}
}
}
| |
package org.motechproject.admin.service.impl;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.motechproject.admin.bundles.BundleDirectoryManager;
import org.motechproject.admin.bundles.ExtendedBundleInformation;
import org.motechproject.admin.bundles.ImportExportResolver;
import org.motechproject.admin.bundles.MotechBundleFilter;
import org.motechproject.admin.ex.BundleNotFoundException;
import org.motechproject.admin.service.ModuleAdminService;
import org.motechproject.commons.api.MotechException;
import org.motechproject.server.api.BundleIcon;
import org.motechproject.server.api.BundleInformation;
import org.motechproject.server.api.JarInformation;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.List;
import static org.motechproject.server.api.BundleIcon.ICON_LOCATIONS;
@Service
public class ModuleAdminServiceImpl implements ModuleAdminService {
private static final Logger LOG = LoggerFactory.getLogger(ModuleAdminServiceImpl.class);
private static final String DEFAULT_ICON = "/bundle_icon.png";
@Autowired
private BundleContext bundleContext;
@Autowired
private BundleDirectoryManager bundleDirectoryManager;
@Autowired
private ImportExportResolver importExportResolver;
@Autowired
private MotechBundleFilter motechBundleFilter;
@Override
public List<BundleInformation> getBundles() {
List<BundleInformation> bundles = new ArrayList<>();
List<Bundle> motechBundles = motechBundleFilter.filter(bundleContext.getBundles());
for (Bundle bundle : motechBundles) {
bundles.add(new BundleInformation(bundle));
}
return bundles;
}
@Override
public BundleInformation getBundleInfo(long bundleId) {
Bundle bundle = getBundle(bundleId);
return new BundleInformation(bundle);
}
@Override
public BundleInformation stopBundle(long bundleId) throws BundleException {
Bundle bundle = getBundle(bundleId);
bundle.stop();
return new BundleInformation(bundle);
}
@Override
public BundleInformation startBundle(long bundleId) throws BundleException {
Bundle bundle = getBundle(bundleId);
bundle.start();
return new BundleInformation(bundle);
}
@Override
public BundleInformation restartBundle(long bundleId) throws BundleException {
Bundle bundle = getBundle(bundleId);
bundle.stop();
bundle.start();
return new BundleInformation(bundle);
}
@Override
public void uninstallBundle(long bundleId) throws BundleException {
Bundle bundle = getBundle(bundleId);
bundle.uninstall();
}
@Override
public BundleIcon getBundleIcon(long bundleId) {
BundleIcon bundleIcon = null;
Bundle bundle = getBundle(bundleId);
for (String iconLocation : ICON_LOCATIONS) {
URL iconURL = bundle.getResource(iconLocation);
if (iconURL != null) {
bundleIcon = loadBundleIcon(iconURL);
break;
}
}
if (bundleIcon == null) {
URL defaultIconURL = getClass().getResource(DEFAULT_ICON);
bundleIcon = loadBundleIcon(defaultIconURL);
}
return bundleIcon;
}
@Override
public BundleInformation installBundle(MultipartFile bundleFile) {
return installBundle(bundleFile, true);
}
@Override
public BundleInformation installBundle(MultipartFile bundleFile, boolean startBundle) {
File savedBundleFile = null;
InputStream bundleInputStream = null;
try {
savedBundleFile = bundleDirectoryManager.saveBundleFile(bundleFile);
bundleInputStream = FileUtils.openInputStream(savedBundleFile);
JarInformation jarInformation = new JarInformation(savedBundleFile);
Bundle bundle = findMatchingBundle(jarInformation);
if (bundle == null) {
bundle = bundleContext.installBundle(savedBundleFile.getAbsolutePath(), bundleInputStream);
} else {
bundle.update(bundleInputStream);
}
if (startBundle) {
bundle.start();
} else {
bundle.stop();
}
return new BundleInformation(bundle);
} catch (Exception e) {
if (savedBundleFile != null) {
LOG.error("Removing bundle due to exception", e);
savedBundleFile.delete();
}
throw new MotechException("Cannot install file", e);
} finally {
IOUtils.closeQuietly(bundleInputStream);
}
}
@Override
public ExtendedBundleInformation getBundleDetails(long bundleId) {
Bundle bundle = getBundle(bundleId);
ExtendedBundleInformation bundleInfo = new ExtendedBundleInformation(bundle);
importExportResolver.resolveBundleWiring(bundleInfo);
return bundleInfo;
}
private BundleIcon loadBundleIcon(URL iconURL) {
InputStream is = null;
try {
URLConnection urlConn = iconURL.openConnection();
is = urlConn.getInputStream();
String mime = urlConn.getContentType();
byte[] image = IOUtils.toByteArray(is);
return new BundleIcon(image, mime);
} catch (IOException e) {
throw new MotechException("Error loading icon", e);
} finally {
IOUtils.closeQuietly(is);
}
}
private Bundle getBundle(long bundleId) {
Bundle bundle = bundleContext.getBundle(bundleId);
if (bundle == null || !motechBundleFilter.passesCriteria(bundle)) {
throw new BundleNotFoundException("Bundle with id [" + bundleId + "] not found");
}
return bundle;
}
private Bundle findMatchingBundle(JarInformation jarInformation) {
Bundle result = null;
for (Bundle bundle : bundleContext.getBundles()) {
if (bundle.getSymbolicName().equals(jarInformation.getBundleSymbolicName())
&& bundle.getHeaders().get(JarInformation.BUNDLE_VERSION).equals(jarInformation.getBundleVersion())) {
result = bundle;
break;
}
}
return result;
}
}
| |
package com.ajlopez.ajtalk.compiler;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import com.ajlopez.ajtalk.compiler.ast.*;
public class Parser {
private Lexer lexer;
private Token next;
public Parser(String text) {
this(new Lexer(text));
}
public Parser(Lexer lexer) {
this.lexer = lexer;
}
public MethodNode parseMethodNode() throws ParserException, IOException, LexerException {
Token token = this.nextToken();
if (token == null)
return null;
if (token.getType() == TokenType.ID) {
String selector = token.getValue();
String[] locals = this.parseLocalNames();
Node expr = this.parseExpressionNode();
return new MethodNode(selector, null, locals, expr);
}
if (token.getType() == TokenType.BINSELECTOR) {
String selector = token.getValue();
String[] arguments = new String[] { this.parseId() };
String[] locals = this.parseLocalNames();
Node expr = this.parseExpressionNode();
return new MethodNode(selector, arguments, locals, expr);
}
if (token.getType() == TokenType.KEYSELECTOR) {
String selector = token.getValue();
List<String> ids = new ArrayList<String>();
ids.add(this.parseId());
for (token = this.nextToken(); token != null && token.getType() == TokenType.KEYSELECTOR; token = this.nextToken()) {
selector += token.getValue();
ids.add(this.parseId());
}
this.pushToken(token);
String[] arguments = new String[ids.size()];
arguments = ids.toArray(arguments);
String[] locals = this.parseLocalNames();
Node expr = this.parseExpressionNode();
return new MethodNode(selector, arguments, locals, expr);
}
throw new ParserException("Unexpected '" + token.getValue() + "'");
}
public Node parseExpressionNode() throws ParserException, IOException, LexerException {
Token token = this.nextToken();
if (token == null)
return null;
this.pushToken(token);
Node node = this.parseKeywordExpression();
List<Node> list = null;
while (node != null && node instanceof PrimitiveNode) {
if (list == null) {
list = new ArrayList<Node>();
list.add(node);
}
node = this.parseKeywordExpression();
}
if (node == null && list.size() == 1)
return list.get(0);
if (list != null && node != null)
list.add(node);
if (node != null) {
for (token = this.nextToken(); token != null && token.getType() == TokenType.SEPARATOR && token.getValue().equals("."); token = this.nextToken()) {
if (list == null) {
list = new ArrayList<Node>();
list.add(node);
}
node = this.parseKeywordExpression();
if (node == null)
break;
list.add(node);
}
}
this.pushToken(token);
if (list == null)
return node;
Node[] nodes = new Node[list.size()];
nodes = list.toArray(nodes);
return new CompositeExpressionNode(nodes);
}
private Node parseKeywordExpression() throws ParserException, IOException, LexerException {
Node expression = this.parseBinaryExpression();
Token token = null;
List<Node> arguments = null;
String selector = null;
for (token = this.nextToken(); token != null && token.getType() == TokenType.KEYSELECTOR; token = this.nextToken()) {
if (selector == null) {
arguments = new ArrayList<Node>();
selector = token.getValue();
}
else
selector += token.getValue();
arguments.add(this.parseBinaryExpression());
}
this.pushToken(token);
if (selector != null) {
Node[] args = new Node[arguments.size()];
expression = new KeywordMessageNode(expression, selector, arguments.toArray(args));
}
return expression;
}
private Node parseBinaryExpression() throws ParserException, IOException, LexerException {
Node expression = this.parseUnaryExpression();
Token token = null;
for (token = this.nextToken(); token != null && token.getType() == TokenType.BINSELECTOR; token = this.nextToken())
expression = new BinaryMessageNode(expression, token.getValue(), this.parseUnaryExpression());
this.pushToken(token);
return expression;
}
private Node parseUnaryExpression() throws ParserException, IOException, LexerException {
Node expression = this.parseTerm();
Token token = null;
for (token = this.nextToken(); token != null && token.getType() == TokenType.ID; token = this.nextToken())
expression = new UnaryMessageNode(expression, token.getValue());
this.pushToken(token);
return expression;
}
private Node parseTerm() throws ParserException, IOException, LexerException
{
Token token = this.nextToken();
if (token == null)
return null;
switch (token.getType()) {
case INTEGER:
return new IntegerNode(Integer.parseInt(token.getValue()));
case STRING:
return new StringNode(token.getValue());
case SYMBOL:
return new SymbolNode(token.getValue());
case CHARACTER:
return new CharacterNode(token.getValue().charAt(0));
case ID:
String name = token.getValue();
token = this.nextToken();
if (token != null && token.getType() == TokenType.BINSELECTOR && token.getValue().equals(":="))
return new AssignmentNode(name, this.parseKeywordExpression());
this.pushToken(token);
return new IdNode(name);
}
if (token.getType() == TokenType.SEPARATOR && token.getValue().equals("^")) {
Node expr = this.parseKeywordExpression();
return new ReturnNode(expr);
}
if (token.getType() == TokenType.SEPARATOR && token.getValue().equals("(")) {
Node expr = this.parseKeywordExpression();
this.parseToken(")", TokenType.SEPARATOR);
return expr;
}
if (token.getType() == TokenType.SEPARATOR && token.getValue().equals("[")) {
String[] arguments = this.parseArgumentNames();
String[] locals = this.parseLocalNames();
Node expr = this.parseExpressionNode();
this.parseToken("]", TokenType.SEPARATOR);
return new BlockNode(arguments, locals, expr);
}
if (token.getType() == TokenType.SEPARATOR && token.getValue().equals("#(")) {
Node[] elements = this.parseLiteralArrayElements();
return new LiteralArrayNode(elements);
}
if (token.getType() == TokenType.SEPARATOR && token.getValue().equals("{")) {
Node[] expressions = this.parseExpressionArray();
this.parseToken("}", TokenType.SEPARATOR);
return new ExpressionArrayNode(expressions);
}
if (token.getType() == TokenType.BINSELECTOR && token.getValue().equals("<")) {
this.parseToken("primitive:", TokenType.KEYSELECTOR);
int value = this.parseInteger();
this.parseToken(">", TokenType.BINSELECTOR);
return new PrimitiveNode(value);
}
throw new ParserException("Unexpected '" + token.getValue() + "'");
}
private Node[] parseExpressionArray() throws ParserException, IOException, LexerException {
Node node = this.parseExpressionNode();
if (node instanceof CompositeExpressionNode)
return ((CompositeExpressionNode)node).getExpressions();
return new Node[] { node };
}
private Node[] parseLiteralArrayElements() throws IOException, LexerException, ParserException
{
List<Node> nodes = new ArrayList<Node>();
Token token;
for (token = this.nextToken(); token != null; token = this.nextToken()) {
if (token.getType() == TokenType.SEPARATOR && token.getValue().equals(")"))
break;
switch (token.getType()) {
case CHARACTER:
nodes.add(new CharacterNode(token.getValue().charAt(0)));
break;
case INTEGER:
nodes.add(new IntegerNode(Integer.parseInt(token.getValue())));
break;
case STRING:
nodes.add(new StringNode(token.getValue()));
break;
case SYMBOL:
nodes.add(new SymbolNode(token.getValue()));
break;
default:
throw new ParserException("Unexpected '" + token.getValue() + "'");
}
}
Node[] elements = new Node[nodes.size()];
return nodes.toArray(elements);
}
private String[] parseLocalNames() throws IOException, LexerException, ParserException {
Token token = this.nextToken();
if (!(token != null && token.getType()==TokenType.SEPARATOR && token.getValue().equals("|"))) {
this.pushToken(token);
return null;
}
List<String> names = new ArrayList<String>();
for (token = this.nextToken(); token != null && token.getType()==TokenType.ID; token = this.nextToken())
names.add(token.getValue());
if (token == null || token.getType() != TokenType.SEPARATOR || !token.getValue().equals("|"))
throw new ParserException("Expected '|'");
String[] localnames = new String[names.size()];
return names.toArray(localnames);
}
private String[] parseArgumentNames() throws IOException, LexerException, ParserException {
Token token = this.nextToken();
if (!(token != null && token.getType()==TokenType.SEPARATOR && token.getValue().equals(":"))) {
this.pushToken(token);
return null;
}
List<String> names = new ArrayList<String>();
names.add(this.parseId());
for (token = this.nextToken(); token != null && token.getType()==TokenType.SEPARATOR && token.getValue().equals(":"); token = this.nextToken())
names.add(this.parseId());
if (token == null || token.getType() != TokenType.SEPARATOR || !token.getValue().equals("|"))
throw new ParserException("Expected '|'");
String[] argnames = new String[names.size()];
return names.toArray(argnames);
}
private String parseId() throws IOException, LexerException, ParserException {
Token token = this.nextToken();
if (token == null || token.getType() != TokenType.ID)
throw new ParserException("Expected name");
return token.getValue();
}
private int parseInteger() throws IOException, LexerException, ParserException {
Token token = this.nextToken();
if (token == null || token.getType() != TokenType.INTEGER)
throw new ParserException("Expected integer");
return Integer.parseInt(token.getValue());
}
private void parseToken(String value, TokenType type) throws IOException, LexerException, ParserException {
Token token = this.nextToken();
if (token == null || token.getType() != type || !token.getValue().equals(value))
throw new ParserException("Expected '" + value + "'");
}
private Token nextToken() throws IOException, LexerException {
if (next != null) {
Token token = next;
next = null;
return token;
}
return this.lexer.nextToken();
}
private void pushToken(Token token) {
this.next = token;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager;
import java.io.IOException;
import java.net.ConnectException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataInputByteBuffer;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.util.VersionUtil;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.api.ResourceManagerConstants;
import org.apache.hadoop.yarn.server.api.ResourceTracker;
import org.apache.hadoop.yarn.server.api.ServerRMProxy;
import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse;
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest;
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse;
import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.apache.hadoop.yarn.server.api.records.NodeAction;
import org.apache.hadoop.yarn.server.api.records.NodeHealthStatus;
import org.apache.hadoop.yarn.server.api.records.NodeStatus;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager.NMContext;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.util.YarnVersionInfo;
import com.google.common.annotations.VisibleForTesting;
public class NodeStatusUpdaterImpl extends AbstractService implements
NodeStatusUpdater {
public static final String YARN_NODEMANAGER_DURATION_TO_TRACK_STOPPED_CONTAINERS =
YarnConfiguration.NM_PREFIX + "duration-to-track-stopped-containers";
private static final Log LOG = LogFactory.getLog(NodeStatusUpdaterImpl.class);
private final Object heartbeatMonitor = new Object();
private final Context context;
private final Dispatcher dispatcher;
private NodeId nodeId;
private long nextHeartBeatInterval;
private ResourceTracker resourceTracker;
private Resource totalResource;
private int httpPort;
private String nodeManagerVersionId;
private String minimumResourceManagerVersion;
private volatile boolean isStopped;
private boolean tokenKeepAliveEnabled;
private long tokenRemovalDelayMs;
/** Keeps track of when the next keep alive request should be sent for an app*/
private Map<ApplicationId, Long> appTokenKeepAliveMap =
new HashMap<ApplicationId, Long>();
private Random keepAliveDelayRandom = new Random();
// It will be used to track recently stopped containers on node manager, this
// is to avoid the misleading no-such-container exception messages on NM, when
// the AM finishes it informs the RM to stop the may-be-already-completed
// containers.
private final Map<ContainerId, Long> recentlyStoppedContainers;
// Duration for which to track recently stopped container.
private long durationToTrackStoppedContainers;
private final NodeHealthCheckerService healthChecker;
private final NodeManagerMetrics metrics;
private Runnable statusUpdaterRunnable;
private Thread statusUpdater;
private long rmIdentifier = ResourceManagerConstants.RM_INVALID_IDENTIFIER;
Set<ContainerId> pendingContainersToRemove = new HashSet<ContainerId>();
public NodeStatusUpdaterImpl(Context context, Dispatcher dispatcher,
NodeHealthCheckerService healthChecker, NodeManagerMetrics metrics) {
super(NodeStatusUpdaterImpl.class.getName());
this.healthChecker = healthChecker;
this.context = context;
this.dispatcher = dispatcher;
this.metrics = metrics;
this.recentlyStoppedContainers =
new LinkedHashMap<ContainerId, Long>();
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
int memoryMb =
conf.getInt(
YarnConfiguration.NM_PMEM_MB, YarnConfiguration.DEFAULT_NM_PMEM_MB);
float vMemToPMem =
conf.getFloat(
YarnConfiguration.NM_VMEM_PMEM_RATIO,
YarnConfiguration.DEFAULT_NM_VMEM_PMEM_RATIO);
int virtualMemoryMb = (int)Math.ceil(memoryMb * vMemToPMem);
int virtualCores =
conf.getInt(
YarnConfiguration.NM_VCORES, YarnConfiguration.DEFAULT_NM_VCORES);
int accs =
conf.getInt(
YarnConfiguration.NM_ACCS, YarnConfiguration.DEFAULT_NM_ACCS);
this.totalResource = Resource.newInstance(memoryMb, virtualCores, accs);
metrics.addResource(totalResource);
this.tokenKeepAliveEnabled = isTokenKeepAliveEnabled(conf);
this.tokenRemovalDelayMs =
conf.getInt(YarnConfiguration.RM_NM_EXPIRY_INTERVAL_MS,
YarnConfiguration.DEFAULT_RM_NM_EXPIRY_INTERVAL_MS);
this.minimumResourceManagerVersion = conf.get(
YarnConfiguration.NM_RESOURCEMANAGER_MINIMUM_VERSION,
YarnConfiguration.DEFAULT_NM_RESOURCEMANAGER_MINIMUM_VERSION);
// Default duration to track stopped containers on nodemanager is 10Min.
// This should not be assigned very large value as it will remember all the
// containers stopped during that time.
durationToTrackStoppedContainers =
conf.getLong(YARN_NODEMANAGER_DURATION_TO_TRACK_STOPPED_CONTAINERS,
600000);
if (durationToTrackStoppedContainers < 0) {
String message = "Invalid configuration for "
+ YARN_NODEMANAGER_DURATION_TO_TRACK_STOPPED_CONTAINERS + " default "
+ "value is 10Min(600000).";
LOG.error(message);
throw new YarnException(message);
}
if (LOG.isDebugEnabled()) {
LOG.debug(YARN_NODEMANAGER_DURATION_TO_TRACK_STOPPED_CONTAINERS + " :"
+ durationToTrackStoppedContainers);
}
super.serviceInit(conf);
LOG.info("Initialized nodemanager for " + nodeId + ":" +
" physical-memory=" + memoryMb + " virtual-memory=" + virtualMemoryMb +
" virtual-cores=" + virtualCores +
" number-accelerator=" + accs);
}
@Override
protected void serviceStart() throws Exception {
// NodeManager is the last service to start, so NodeId is available.
this.nodeId = this.context.getNodeId();
this.httpPort = this.context.getHttpPort();
this.nodeManagerVersionId = YarnVersionInfo.getVersion();
try {
// Registration has to be in start so that ContainerManager can get the
// perNM tokens needed to authenticate ContainerTokens.
this.resourceTracker = getRMClient();
registerWithRM();
super.serviceStart();
startStatusUpdater();
} catch (Exception e) {
String errorMessage = "Unexpected error starting NodeStatusUpdater";
LOG.error(errorMessage, e);
throw new YarnRuntimeException(e);
}
}
@Override
protected void serviceStop() throws Exception {
// Interrupt the updater.
this.isStopped = true;
stopRMProxy();
super.serviceStop();
}
protected void rebootNodeStatusUpdaterAndRegisterWithRM() {
// Interrupt the updater.
this.isStopped = true;
try {
statusUpdater.join();
registerWithRM();
statusUpdater = new Thread(statusUpdaterRunnable, "Node Status Updater");
this.isStopped = false;
statusUpdater.start();
LOG.info("NodeStatusUpdater thread is reRegistered and restarted");
} catch (Exception e) {
String errorMessage = "Unexpected error rebooting NodeStatusUpdater";
LOG.error(errorMessage, e);
throw new YarnRuntimeException(e);
}
}
@VisibleForTesting
protected void stopRMProxy() {
if(this.resourceTracker != null) {
RPC.stopProxy(this.resourceTracker);
}
}
@Private
protected boolean isTokenKeepAliveEnabled(Configuration conf) {
return conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED,
YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)
&& UserGroupInformation.isSecurityEnabled();
}
@VisibleForTesting
protected ResourceTracker getRMClient() throws IOException {
Configuration conf = getConfig();
return ServerRMProxy.createRMProxy(conf, ResourceTracker.class);
}
@VisibleForTesting
protected void registerWithRM()
throws YarnException, IOException {
List<NMContainerStatus> containerReports = getNMContainerStatuses();
RegisterNodeManagerRequest request =
RegisterNodeManagerRequest.newInstance(nodeId, httpPort, totalResource,
nodeManagerVersionId, containerReports, getRunningApplications());
if (containerReports != null) {
LOG.info("Registering with RM using containers :" + containerReports);
}
RegisterNodeManagerResponse regNMResponse =
resourceTracker.registerNodeManager(request);
this.rmIdentifier = regNMResponse.getRMIdentifier();
// if the Resourcemanager instructs NM to shutdown.
if (NodeAction.SHUTDOWN.equals(regNMResponse.getNodeAction())) {
String message =
"Message from ResourceManager: "
+ regNMResponse.getDiagnosticsMessage();
throw new YarnRuntimeException(
"Recieved SHUTDOWN signal from Resourcemanager ,Registration of NodeManager failed, "
+ message);
}
// if ResourceManager version is too old then shutdown
if (!minimumResourceManagerVersion.equals("NONE")){
if (minimumResourceManagerVersion.equals("EqualToNM")){
minimumResourceManagerVersion = nodeManagerVersionId;
}
String rmVersion = regNMResponse.getRMVersion();
if (rmVersion == null) {
String message = "The Resource Manager's did not return a version. "
+ "Valid version cannot be checked.";
throw new YarnRuntimeException("Shutting down the Node Manager. "
+ message);
}
if (VersionUtil.compareVersions(rmVersion,minimumResourceManagerVersion) < 0) {
String message = "The Resource Manager's version ("
+ rmVersion +") is less than the minimum "
+ "allowed version " + minimumResourceManagerVersion;
throw new YarnRuntimeException("Shutting down the Node Manager on RM "
+ "version error, " + message);
}
}
MasterKey masterKey = regNMResponse.getContainerTokenMasterKey();
// do this now so that its set before we start heartbeating to RM
// It is expected that status updater is started by this point and
// RM gives the shared secret in registration during
// StatusUpdater#start().
if (masterKey != null) {
this.context.getContainerTokenSecretManager().setMasterKey(masterKey);
}
masterKey = regNMResponse.getNMTokenMasterKey();
if (masterKey != null) {
this.context.getNMTokenSecretManager().setMasterKey(masterKey);
}
LOG.info("Registered with ResourceManager as " + this.nodeId
+ " with total resource of " + this.totalResource);
LOG.info("Notifying ContainerManager to unblock new container-requests");
((ContainerManagerImpl) this.context.getContainerManager())
.setBlockNewContainerRequests(false);
}
private List<ApplicationId> createKeepAliveApplicationList() {
if (!tokenKeepAliveEnabled) {
return Collections.emptyList();
}
List<ApplicationId> appList = new ArrayList<ApplicationId>();
for (Iterator<Entry<ApplicationId, Long>> i =
this.appTokenKeepAliveMap.entrySet().iterator(); i.hasNext();) {
Entry<ApplicationId, Long> e = i.next();
ApplicationId appId = e.getKey();
Long nextKeepAlive = e.getValue();
if (!this.context.getApplications().containsKey(appId)) {
// Remove if the application has finished.
i.remove();
} else if (System.currentTimeMillis() > nextKeepAlive) {
// KeepAlive list for the next hearbeat.
appList.add(appId);
trackAppForKeepAlive(appId);
}
}
return appList;
}
private NodeStatus getNodeStatus(int responseId) throws IOException {
NodeHealthStatus nodeHealthStatus = this.context.getNodeHealthStatus();
nodeHealthStatus.setHealthReport(healthChecker.getHealthReport());
nodeHealthStatus.setIsNodeHealthy(healthChecker.isHealthy());
nodeHealthStatus.setLastHealthReportTime(healthChecker
.getLastHealthReportTime());
if (LOG.isDebugEnabled()) {
LOG.debug("Node's health-status : " + nodeHealthStatus.getIsNodeHealthy()
+ ", " + nodeHealthStatus.getHealthReport());
}
List<ContainerStatus> containersStatuses = getContainerStatuses();
NodeStatus nodeStatus =
NodeStatus.newInstance(nodeId, responseId, containersStatuses,
createKeepAliveApplicationList(), nodeHealthStatus);
return nodeStatus;
}
// Iterate through the NMContext and clone and get all the containers'
// statuses. If it's a completed container, add into the
// recentlyStoppedContainers collections.
@VisibleForTesting
protected List<ContainerStatus> getContainerStatuses() throws IOException {
List<ContainerStatus> containerStatuses = new ArrayList<ContainerStatus>();
for (Container container : this.context.getContainers().values()) {
ContainerId containerId = container.getContainerId();
ApplicationId applicationId = container.getContainerId()
.getApplicationAttemptId().getApplicationId();
org.apache.hadoop.yarn.api.records.ContainerStatus containerStatus =
container.cloneAndGetContainerStatus();
containerStatuses.add(containerStatus);
if (containerStatus.getState() == ContainerState.COMPLETE) {
if (isApplicationStopped(applicationId)) {
if (LOG.isDebugEnabled()) {
LOG.debug(applicationId + " is completing, " + " remove "
+ containerId + " from NM context.");
}
context.getContainers().remove(containerId);
} else {
// Adding to finished containers cache. Cache will keep it around at
// least for #durationToTrackStoppedContainers duration. In the
// subsequent call to stop container it will get removed from cache.
addCompletedContainer(container.getContainerId());
}
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Sending out " + containerStatuses.size()
+ " container statuses: " + containerStatuses);
}
return containerStatuses;
}
private List<ApplicationId> getRunningApplications() {
List<ApplicationId> runningApplications = new ArrayList<ApplicationId>();
runningApplications.addAll(this.context.getApplications().keySet());
return runningApplications;
}
// These NMContainerStatus are sent on NM registration and used by YARN only.
private List<NMContainerStatus> getNMContainerStatuses() throws IOException {
List<NMContainerStatus> containerStatuses =
new ArrayList<NMContainerStatus>();
for (Container container : this.context.getContainers().values()) {
ContainerId containerId = container.getContainerId();
ApplicationId applicationId = container.getContainerId()
.getApplicationAttemptId().getApplicationId();
if (!this.context.getApplications().containsKey(applicationId)) {
context.getContainers().remove(containerId);
continue;
}
NMContainerStatus status =
container.getNMContainerStatus();
containerStatuses.add(status);
if (status.getContainerState() == ContainerState.COMPLETE) {
// Adding to finished containers cache. Cache will keep it around at
// least for #durationToTrackStoppedContainers duration. In the
// subsequent call to stop container it will get removed from cache.
addCompletedContainer(container.getContainerId());
}
}
LOG.info("Sending out " + containerStatuses.size()
+ " NM container statuses: " + containerStatuses);
return containerStatuses;
}
private boolean isApplicationStopped(ApplicationId applicationId) {
if (!this.context.getApplications().containsKey(applicationId)) {
return true;
}
ApplicationState applicationState = this.context.getApplications().get(
applicationId).getApplicationState();
if (applicationState == ApplicationState.FINISHING_CONTAINERS_WAIT
|| applicationState == ApplicationState.APPLICATION_RESOURCES_CLEANINGUP
|| applicationState == ApplicationState.FINISHED) {
return true;
} else {
return false;
}
}
@Override
public void addCompletedContainer(ContainerId containerId) {
synchronized (recentlyStoppedContainers) {
removeVeryOldStoppedContainersFromCache();
if (!recentlyStoppedContainers.containsKey(containerId)) {
recentlyStoppedContainers.put(containerId,
System.currentTimeMillis() + durationToTrackStoppedContainers);
}
}
}
@VisibleForTesting
@Private
public void removeOrTrackCompletedContainersFromContext(
List<ContainerId> containerIds) throws IOException {
Set<ContainerId> removedContainers = new HashSet<ContainerId>();
pendingContainersToRemove.addAll(containerIds);
Iterator<ContainerId> iter = pendingContainersToRemove.iterator();
while (iter.hasNext()) {
ContainerId containerId = iter.next();
// remove the container only if the container is at DONE state
Container nmContainer = context.getContainers().get(containerId);
if (nmContainer != null && nmContainer.getContainerState().equals(
org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState.DONE)) {
context.getContainers().remove(containerId);
removedContainers.add(containerId);
iter.remove();
}
}
if (!removedContainers.isEmpty()) {
LOG.info("Removed completed containers from NM context: "
+ removedContainers);
}
}
private void trackAppsForKeepAlive(List<ApplicationId> appIds) {
if (tokenKeepAliveEnabled && appIds != null && appIds.size() > 0) {
for (ApplicationId appId : appIds) {
trackAppForKeepAlive(appId);
}
}
}
private void trackAppForKeepAlive(ApplicationId appId) {
// Next keepAlive request for app between 0.7 & 0.9 of when the token will
// likely expire.
long nextTime = System.currentTimeMillis()
+ (long) (0.7 * tokenRemovalDelayMs + (0.2 * tokenRemovalDelayMs
* keepAliveDelayRandom.nextInt(100))/100);
appTokenKeepAliveMap.put(appId, nextTime);
}
@Override
public void sendOutofBandHeartBeat() {
synchronized (this.heartbeatMonitor) {
this.heartbeatMonitor.notify();
}
}
public boolean isContainerRecentlyStopped(ContainerId containerId) {
synchronized (recentlyStoppedContainers) {
return recentlyStoppedContainers.containsKey(containerId);
}
}
@Override
public void clearFinishedContainersFromCache() {
synchronized (recentlyStoppedContainers) {
recentlyStoppedContainers.clear();
}
}
@Private
@VisibleForTesting
public void removeVeryOldStoppedContainersFromCache() {
synchronized (recentlyStoppedContainers) {
long currentTime = System.currentTimeMillis();
Iterator<ContainerId> i =
recentlyStoppedContainers.keySet().iterator();
while (i.hasNext()) {
ContainerId cid = i.next();
if (recentlyStoppedContainers.get(cid) < currentTime) {
if (!context.getContainers().containsKey(cid)) {
i.remove();
try {
context.getNMStateStore().removeContainer(cid);
} catch (IOException e) {
LOG.error("Unable to remove container " + cid + " in store", e);
}
}
} else {
break;
}
}
}
}
@Override
public long getRMIdentifier() {
return this.rmIdentifier;
}
private static Map<ApplicationId, Credentials> parseCredentials(
Map<ApplicationId, ByteBuffer> systemCredentials) throws IOException {
Map<ApplicationId, Credentials> map =
new HashMap<ApplicationId, Credentials>();
for (Map.Entry<ApplicationId, ByteBuffer> entry : systemCredentials.entrySet()) {
Credentials credentials = new Credentials();
DataInputByteBuffer buf = new DataInputByteBuffer();
ByteBuffer buffer = entry.getValue();
buffer.rewind();
buf.reset(buffer);
credentials.readTokenStorageStream(buf);
map.put(entry.getKey(), credentials);
}
if (LOG.isDebugEnabled()) {
for (Map.Entry<ApplicationId, Credentials> entry : map.entrySet()) {
LOG.debug("Retrieved credentials form RM for " + entry.getKey() + ": "
+ entry.getValue().getAllTokens());
}
}
return map;
}
protected void startStatusUpdater() {
statusUpdaterRunnable = new Runnable() {
@Override
@SuppressWarnings("unchecked")
public void run() {
int lastHeartBeatID = 0;
while (!isStopped) {
// Send heartbeat
try {
NodeHeartbeatResponse response = null;
NodeStatus nodeStatus = getNodeStatus(lastHeartBeatID);
NodeHeartbeatRequest request =
NodeHeartbeatRequest.newInstance(nodeStatus,
NodeStatusUpdaterImpl.this.context
.getContainerTokenSecretManager().getCurrentKey(),
NodeStatusUpdaterImpl.this.context.getNMTokenSecretManager()
.getCurrentKey());
response = resourceTracker.nodeHeartbeat(request);
//get next heartbeat interval from response
nextHeartBeatInterval = response.getNextHeartBeatInterval();
updateMasterKeys(response);
if (response.getNodeAction() == NodeAction.SHUTDOWN) {
LOG
.warn("Recieved SHUTDOWN signal from Resourcemanager as part of heartbeat,"
+ " hence shutting down.");
LOG.warn("Message from ResourceManager: "
+ response.getDiagnosticsMessage());
context.setDecommissioned(true);
dispatcher.getEventHandler().handle(
new NodeManagerEvent(NodeManagerEventType.SHUTDOWN));
break;
}
if (response.getNodeAction() == NodeAction.RESYNC) {
LOG.warn("Node is out of sync with ResourceManager,"
+ " hence resyncing.");
LOG.warn("Message from ResourceManager: "
+ response.getDiagnosticsMessage());
// Invalidate the RMIdentifier while resync
NodeStatusUpdaterImpl.this.rmIdentifier =
ResourceManagerConstants.RM_INVALID_IDENTIFIER;
dispatcher.getEventHandler().handle(
new NodeManagerEvent(NodeManagerEventType.RESYNC));
break;
}
// Explicitly put this method after checking the resync response. We
// don't want to remove the completed containers before resync
// because these completed containers will be reported back to RM
// when NM re-registers with RM.
// Only remove the cleanedup containers that are acked
removeOrTrackCompletedContainersFromContext(response
.getContainersToBeRemovedFromNM());
lastHeartBeatID = response.getResponseId();
List<ContainerId> containersToCleanup = response
.getContainersToCleanup();
if (!containersToCleanup.isEmpty()) {
dispatcher.getEventHandler().handle(
new CMgrCompletedContainersEvent(containersToCleanup,
CMgrCompletedContainersEvent.Reason.BY_RESOURCEMANAGER));
}
List<ApplicationId> appsToCleanup =
response.getApplicationsToCleanup();
//Only start tracking for keepAlive on FINISH_APP
trackAppsForKeepAlive(appsToCleanup);
if (!appsToCleanup.isEmpty()) {
dispatcher.getEventHandler().handle(
new CMgrCompletedAppsEvent(appsToCleanup,
CMgrCompletedAppsEvent.Reason.BY_RESOURCEMANAGER));
}
Map<ApplicationId, ByteBuffer> systemCredentials =
response.getSystemCredentialsForApps();
if (systemCredentials != null && !systemCredentials.isEmpty()) {
((NMContext) context)
.setSystemCrendentialsForApps(parseCredentials(systemCredentials));
}
} catch (ConnectException e) {
//catch and throw the exception if tried MAX wait time to connect RM
dispatcher.getEventHandler().handle(
new NodeManagerEvent(NodeManagerEventType.SHUTDOWN));
throw new YarnRuntimeException(e);
} catch (Throwable e) {
// TODO Better error handling. Thread can die with the rest of the
// NM still running.
LOG.error("Caught exception in status-updater", e);
} finally {
synchronized (heartbeatMonitor) {
nextHeartBeatInterval = nextHeartBeatInterval <= 0 ?
YarnConfiguration.DEFAULT_RM_NM_HEARTBEAT_INTERVAL_MS :
nextHeartBeatInterval;
try {
heartbeatMonitor.wait(nextHeartBeatInterval);
} catch (InterruptedException e) {
// Do Nothing
}
}
}
}
}
private void updateMasterKeys(NodeHeartbeatResponse response) {
// See if the master-key has rolled over
MasterKey updatedMasterKey = response.getContainerTokenMasterKey();
if (updatedMasterKey != null) {
// Will be non-null only on roll-over on RM side
context.getContainerTokenSecretManager().setMasterKey(updatedMasterKey);
}
updatedMasterKey = response.getNMTokenMasterKey();
if (updatedMasterKey != null) {
context.getNMTokenSecretManager().setMasterKey(updatedMasterKey);
}
}
};
statusUpdater =
new Thread(statusUpdaterRunnable, "Node Status Updater");
statusUpdater.start();
}
}
| |
/*
* Copyright 2017 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.plugin.access.scm;
import com.thoughtworks.go.plugin.access.common.settings.PluginSettingsConfiguration;
import com.thoughtworks.go.plugin.access.common.settings.PluginSettingsConstants;
import com.thoughtworks.go.plugin.access.common.settings.PluginSettingsJsonMessageHandler1_0;
import com.thoughtworks.go.plugin.access.scm.material.MaterialPollResult;
import com.thoughtworks.go.plugin.access.scm.revision.SCMRevision;
import com.thoughtworks.go.plugin.api.request.GoPluginApiRequest;
import com.thoughtworks.go.plugin.api.response.DefaultGoPluginApiResponse;
import com.thoughtworks.go.plugin.api.response.Result;
import com.thoughtworks.go.plugin.api.response.validation.ValidationResult;
import com.thoughtworks.go.plugin.infra.PluginManager;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import java.util.HashMap;
import java.util.Map;
import static java.util.Arrays.asList;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
public class SCMExtensionTest {
public static final String PLUGIN_ID = "plugin-id";
@Mock
private PluginManager pluginManager;
@Mock
private PluginSettingsJsonMessageHandler1_0 pluginSettingsJSONMessageHandler;
@Mock
private JsonMessageHandler1_0 jsonMessageHandler;
private SCMExtension scmExtension;
private String requestBody = "expected-request";
private String responseBody = "expected-response";
private PluginSettingsConfiguration pluginSettingsConfiguration;
private SCMPropertyConfiguration scmPropertyConfiguration;
private Map<String, String> materialData;
private ArgumentCaptor<GoPluginApiRequest> requestArgumentCaptor;
@Before
public void setUp() throws Exception {
initMocks(this);
scmExtension = new SCMExtension(pluginManager);
scmExtension.getPluginSettingsMessageHandlerMap().put("1.0", pluginSettingsJSONMessageHandler);
scmExtension.getMessageHandlerMap().put("1.0", jsonMessageHandler);
pluginSettingsConfiguration = new PluginSettingsConfiguration();
scmPropertyConfiguration = new SCMPropertyConfiguration();
materialData = new HashMap<>();
requestArgumentCaptor = ArgumentCaptor.forClass(GoPluginApiRequest.class);
when(pluginManager.resolveExtensionVersion(PLUGIN_ID, asList("1.0"))).thenReturn("1.0");
when(pluginManager.isPluginOfType(SCMExtension.EXTENSION_NAME, PLUGIN_ID)).thenReturn(true);
when(pluginManager.submitTo(eq(PLUGIN_ID), requestArgumentCaptor.capture())).thenReturn(DefaultGoPluginApiResponse.success(responseBody));
}
@Test
public void shouldTalkToPluginToGetPluginSettingsConfiguration() throws Exception {
PluginSettingsConfiguration deserializedResponse = new PluginSettingsConfiguration();
when(pluginSettingsJSONMessageHandler.responseMessageForPluginSettingsConfiguration(responseBody)).thenReturn(deserializedResponse);
PluginSettingsConfiguration response = scmExtension.getPluginSettingsConfiguration(PLUGIN_ID);
assertRequest(requestArgumentCaptor.getValue(), SCMExtension.EXTENSION_NAME, "1.0", PluginSettingsConstants.REQUEST_PLUGIN_SETTINGS_CONFIGURATION, null);
verify(pluginSettingsJSONMessageHandler).responseMessageForPluginSettingsConfiguration(responseBody);
assertSame(response, deserializedResponse);
}
@Test
public void shouldTalkToPluginToGetPluginSettingsView() throws Exception {
String deserializedResponse = "";
when(pluginSettingsJSONMessageHandler.responseMessageForPluginSettingsView(responseBody)).thenReturn(deserializedResponse);
String response = scmExtension.getPluginSettingsView(PLUGIN_ID);
assertRequest(requestArgumentCaptor.getValue(), SCMExtension.EXTENSION_NAME, "1.0", PluginSettingsConstants.REQUEST_PLUGIN_SETTINGS_VIEW, null);
verify(pluginSettingsJSONMessageHandler).responseMessageForPluginSettingsView(responseBody);
assertSame(response, deserializedResponse);
}
@Test
public void shouldTalkToPluginToGetValidatePluginSettings() throws Exception {
when(pluginSettingsJSONMessageHandler.requestMessageForPluginSettingsValidation(pluginSettingsConfiguration)).thenReturn(requestBody);
ValidationResult deserializedResponse = new ValidationResult();
when(pluginSettingsJSONMessageHandler.responseMessageForPluginSettingsValidation(responseBody)).thenReturn(deserializedResponse);
ValidationResult response = scmExtension.validatePluginSettings(PLUGIN_ID, pluginSettingsConfiguration);
assertRequest(requestArgumentCaptor.getValue(), SCMExtension.EXTENSION_NAME, "1.0", PluginSettingsConstants.REQUEST_VALIDATE_PLUGIN_SETTINGS, requestBody);
verify(pluginSettingsJSONMessageHandler).responseMessageForPluginSettingsValidation(responseBody);
assertSame(response, deserializedResponse);
}
@Test
public void shouldTalkToPluginToGetSCMConfiguration() throws Exception {
SCMPropertyConfiguration deserializedResponse = new SCMPropertyConfiguration();
when(jsonMessageHandler.responseMessageForSCMConfiguration(responseBody)).thenReturn(deserializedResponse);
SCMPropertyConfiguration response = scmExtension.getSCMConfiguration(PLUGIN_ID);
assertRequest(requestArgumentCaptor.getValue(), SCMExtension.EXTENSION_NAME, "1.0", SCMExtension.REQUEST_SCM_CONFIGURATION, null);
verify(jsonMessageHandler).responseMessageForSCMConfiguration(responseBody);
assertSame(response, deserializedResponse);
}
@Test
public void shouldTalkToPluginToGetSCMView() throws Exception {
SCMView deserializedResponse = new SCMView() {
@Override
public String displayValue() {
return null;
}
@Override
public String template() {
return null;
}
};
when(jsonMessageHandler.responseMessageForSCMView(responseBody)).thenReturn(deserializedResponse);
SCMView response = scmExtension.getSCMView(PLUGIN_ID);
assertRequest(requestArgumentCaptor.getValue(), SCMExtension.EXTENSION_NAME, "1.0", SCMExtension.REQUEST_SCM_VIEW, null);
verify(jsonMessageHandler).responseMessageForSCMView(responseBody);
assertSame(response, deserializedResponse);
}
@Test
public void shouldTalkToPluginToCheckIfSCMConfigurationIsValid() throws Exception {
when(jsonMessageHandler.requestMessageForIsSCMConfigurationValid(scmPropertyConfiguration)).thenReturn(requestBody);
ValidationResult deserializedResponse = new ValidationResult();
when(jsonMessageHandler.responseMessageForIsSCMConfigurationValid(responseBody)).thenReturn(deserializedResponse);
ValidationResult response = scmExtension.isSCMConfigurationValid(PLUGIN_ID, scmPropertyConfiguration);
assertRequest(requestArgumentCaptor.getValue(), SCMExtension.EXTENSION_NAME, "1.0", SCMExtension.REQUEST_VALIDATE_SCM_CONFIGURATION, requestBody);
verify(jsonMessageHandler).requestMessageForIsSCMConfigurationValid(scmPropertyConfiguration);
verify(jsonMessageHandler).responseMessageForIsSCMConfigurationValid(responseBody);
assertSame(response, deserializedResponse);
}
@Test
public void shouldTalkToPluginToCheckSCMConnectionSuccessful() throws Exception {
when(jsonMessageHandler.requestMessageForCheckConnectionToSCM(scmPropertyConfiguration)).thenReturn(requestBody);
Result deserializedResponse = new Result();
when(jsonMessageHandler.responseMessageForCheckConnectionToSCM(responseBody)).thenReturn(deserializedResponse);
Result response = scmExtension.checkConnectionToSCM(PLUGIN_ID, scmPropertyConfiguration);
assertRequest(requestArgumentCaptor.getValue(), SCMExtension.EXTENSION_NAME, "1.0", SCMExtension.REQUEST_CHECK_SCM_CONNECTION, requestBody);
verify(jsonMessageHandler).requestMessageForCheckConnectionToSCM(scmPropertyConfiguration);
verify(jsonMessageHandler).responseMessageForCheckConnectionToSCM(responseBody);
assertSame(response, deserializedResponse);
}
@Test
public void shouldTalkToPluginToGetLatestModification() throws Exception {
String flyweight = "flyweight";
when(jsonMessageHandler.requestMessageForLatestRevision(scmPropertyConfiguration, materialData, flyweight)).thenReturn(requestBody);
MaterialPollResult deserializedResponse = new MaterialPollResult();
when(jsonMessageHandler.responseMessageForLatestRevision(responseBody)).thenReturn(deserializedResponse);
MaterialPollResult response = scmExtension.getLatestRevision(PLUGIN_ID, scmPropertyConfiguration, materialData, flyweight);
assertRequest(requestArgumentCaptor.getValue(), SCMExtension.EXTENSION_NAME, "1.0", SCMExtension.REQUEST_LATEST_REVISION, requestBody);
verify(jsonMessageHandler).requestMessageForLatestRevision(scmPropertyConfiguration, materialData, flyweight);
verify(jsonMessageHandler).responseMessageForLatestRevision(responseBody);
assertSame(response, deserializedResponse);
}
@Test
public void shouldTalkToPluginToGetLatestModificationSinceLastRevision() throws Exception {
String flyweight = "flyweight";
SCMRevision previouslyKnownRevision = new SCMRevision();
when(jsonMessageHandler.requestMessageForLatestRevisionsSince(scmPropertyConfiguration, materialData, flyweight, previouslyKnownRevision)).thenReturn(requestBody);
MaterialPollResult deserializedResponse = new MaterialPollResult();
when(jsonMessageHandler.responseMessageForLatestRevisionsSince(responseBody)).thenReturn(deserializedResponse);
MaterialPollResult response = scmExtension.latestModificationSince(PLUGIN_ID, scmPropertyConfiguration, materialData, flyweight, previouslyKnownRevision);
assertRequest(requestArgumentCaptor.getValue(), SCMExtension.EXTENSION_NAME, "1.0", SCMExtension.REQUEST_LATEST_REVISIONS_SINCE, requestBody);
verify(jsonMessageHandler).requestMessageForLatestRevisionsSince(scmPropertyConfiguration, materialData, flyweight, previouslyKnownRevision);
verify(jsonMessageHandler).responseMessageForLatestRevisionsSince(responseBody);
assertSame(response, deserializedResponse);
}
@Test
public void shouldTalkToPluginToCheckout() throws Exception {
String destination = "destination";
SCMRevision revision = new SCMRevision();
when(jsonMessageHandler.requestMessageForCheckout(scmPropertyConfiguration, destination, revision)).thenReturn(requestBody);
Result deserializedResponse = new Result();
when(jsonMessageHandler.responseMessageForCheckout(responseBody)).thenReturn(deserializedResponse);
Result response = scmExtension.checkout(PLUGIN_ID, scmPropertyConfiguration, destination, revision);
assertRequest(requestArgumentCaptor.getValue(), SCMExtension.EXTENSION_NAME, "1.0", SCMExtension.REQUEST_CHECKOUT, requestBody);
verify(jsonMessageHandler).requestMessageForCheckout(scmPropertyConfiguration, destination, revision);
verify(jsonMessageHandler).responseMessageForCheckout(responseBody);
assertSame(response, deserializedResponse);
}
@Test
public void shouldHandleExceptionDuringPluginInteraction() throws Exception {
when(pluginManager.submitTo(eq(PLUGIN_ID), requestArgumentCaptor.capture())).thenThrow(new RuntimeException("exception-from-plugin"));
try {
scmExtension.checkConnectionToSCM(PLUGIN_ID, scmPropertyConfiguration);
} catch (Exception e) {
assertThat(e.getMessage(), is("Interaction with plugin with id 'plugin-id' implementing 'scm' extension failed while requesting for 'check-scm-connection'. Reason: [exception-from-plugin]"));
}
}
private void assertRequest(GoPluginApiRequest goPluginApiRequest, String extensionName, String version, String requestName, String requestBody) {
assertThat(goPluginApiRequest.extension(), is(extensionName));
assertThat(goPluginApiRequest.extensionVersion(), is(version));
assertThat(goPluginApiRequest.requestName(), is(requestName));
assertThat(goPluginApiRequest.requestBody(), is(requestBody));
}
}
| |
package com.tom.factory.tileentity;
import static com.tom.lib.api.energy.EnergyType.*;
import java.util.ArrayList;
import java.util.List;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.ISidedInventory;
import net.minecraft.inventory.InventoryBasic;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.NonNullList;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.MathHelper;
import net.minecraft.util.text.ITextComponent;
import net.minecraft.util.text.TextComponentString;
import net.minecraft.world.IBlockAccess;
import com.tom.api.block.IItemTile;
import com.tom.api.tileentity.IConfigurable;
import com.tom.api.tileentity.TileEntityTomsMod;
import com.tom.config.Config;
import com.tom.config.ConfigurationOptionMachine;
import com.tom.defense.ForceDeviceControlType;
import com.tom.factory.FactoryInit;
import com.tom.factory.block.BlockMachineBase;
import com.tom.factory.block.SteamAlloySmelter;
import com.tom.lib.api.energy.EnergyStorage;
import com.tom.lib.api.energy.EnergyType;
import com.tom.lib.api.energy.IEnergyReceiver;
import com.tom.recipes.handler.MachineCraftingHandler.ItemStackChecker;
import com.tom.util.TomsModUtils;
public abstract class TileEntityMachineBase extends TileEntityTomsMod implements ISidedInventory, IEnergyReceiver, IConfigurable, IItemTile {
protected InventoryBasic inv = new InventoryBasic("", false, this.getSizeInventory());
protected EnergyType TYPE = HV;
public boolean active = false;
// private boolean lastActive = false;
protected static final float[] TYPE_MULTIPLIER_SPEED = new float[]{1.0F, 0.85F, 0.7F};
protected static final int[] MAX_SPEED_UPGRADE_COUNT = Config.max_speed_upgrades;
protected int maxProgress = 1;
protected int progress = -1;
public ForceDeviceControlType rs;
private boolean powersharing = false;
private byte outputSides;
private IConfigurationOption cfgOption;
private List<ItemStackChecker> output = new ArrayList<>();
public TileEntityMachineBase() {
rs = ForceDeviceControlType.IGNORE;
cfgOption = new ConfigurationOptionMachine(getFront(), new ResourceLocation("tomsmodfactory:textures/blocks/itemOutput.png"), getTop());
updateSlots();
}
@Override
public int getInventoryStackLimit() {
return 64;
}
@Override
public boolean isUsableByPlayer(EntityPlayer player) {
return TomsModUtils.isUsable(pos, player, world, this);
}
@Override
public void openInventory(EntityPlayer player) {
}
@Override
public void closeInventory(EntityPlayer player) {
}
@Override
public int getFieldCount() {
return 1;
}
@Override
public boolean hasCustomName() {
return false;
}
@Override
public ITextComponent getDisplayName() {
return new TextComponentString(getName());
}
@Override
public void readFromNBT(NBTTagCompound compound) {
super.readFromNBT(compound);
TomsModUtils.loadAllItems(compound.getTagList("inventory", 10), inv);
getEnergy().readFromNBT(compound);
TYPE = EnergyType.VALUES[compound.getInteger("energyType")];
rs = ForceDeviceControlType.get(compound.getInteger("rsMode"));
outputSides = compound.getByte("output");
powersharing = compound.getBoolean("powersharing");
updateSlots();
}
@Override
public NBTTagCompound writeToNBT(NBTTagCompound compound) {
super.writeToNBT(compound);
compound.setTag("inventory", TomsModUtils.saveAllItems(inv));
getEnergy().writeToNBT(compound);
compound.setInteger("energyType", TYPE.ordinal());
compound.setInteger("rsMode", rs.ordinal());
compound.setByte("output", outputSides);
compound.setBoolean("powersharing", powersharing);
compound.setTag("outputList", TomsModUtils.writeCollection(output, ItemStackChecker::writeToNew));
return compound;
}
@Override
public boolean canConnectEnergy(EnumFacing from, EnergyType type) {
return type == TYPE;
}
@Override
public List<EnergyType> getValidEnergyTypes() {
return TYPE.getList();
}
@Override
public double receiveEnergy(EnumFacing from, EnergyType type, double maxReceive, boolean simulate) {
return canConnectEnergy(from, type) ? TYPE.convertFrom(LV, getEnergy().receiveEnergy(LV.convertFrom(TYPE, maxReceive), simulate)) : 0;
}
@Override
public double getEnergyStored(EnumFacing from, EnergyType type) {
return getEnergy().getEnergyStored();
}
@Override
public long getMaxEnergyStored(EnumFacing from, EnergyType type) {
return getEnergy().getMaxEnergyStored();
}
public abstract EnergyStorage getEnergy();
public void setType(int meta) {
if (meta == 0) {
TYPE = HV;
} else if (meta == 1) {
TYPE = EnergyType.MV;
} else {
TYPE = EnergyType.LV;
}
}
public int getType() {
return getMetaFromEnergyType(TYPE);
}
public void writeToStackNBT(NBTTagCompound tag) {
getEnergy().writeToNBT(tag);
int i = getUpgradeSlot();
if (i > -1) {
NBTTagList list = new NBTTagList();
NBTTagCompound t = new NBTTagCompound();
inv.getStackInSlot(i).writeToNBT(t);
t.setByte("Slot", (byte) i);
list.appendTag(t);
tag.setTag("inventory", list);
}
tag.setInteger("rsMode", rs.ordinal());
tag.setByte("output", outputSides);
}
public abstract int getUpgradeSlot();
public abstract int[] getOutputSlots();
public abstract int[] getInputSlots();
public void pushOutput(EnumFacing side) {
}
public int getMaxProgress() {
return !world.isRemote ? MathHelper.floor(getMaxProcessTimeNormal() / TYPE_MULTIPLIER_SPEED[getType()]) : maxProgress;
}
@Override
public int getField(int id) {
return id == 0 ? progress : id == 1 ? maxProgress : 0;
}
@Override
public void setField(int id, int value) {
if (id == 0)
progress = value;
else if (id == 1)
maxProgress = value;
}
@Override
public final void preUpdate(IBlockState state) {
if (!world.isRemote) {
maxProgress = getMaxProgress();
if (rs == ForceDeviceControlType.HIGH_REDSTONE) {
this.active = world.isBlockIndirectlyGettingPowered(pos) > 0;
} else if (rs == ForceDeviceControlType.LOW_REDSTONE) {
this.active = world.isBlockIndirectlyGettingPowered(pos) == 0;
} else if (rs == ForceDeviceControlType.IGNORE) {
this.active = true;
}
// lastActive = active;
if (powersharing && getEnergy().getEnergyStoredPer() > 0.5F && world.getTotalWorldTime() % 10 == 0) {
EnumFacing facing = state.getValue(BlockMachineBase.FACING);
sharePower(facing.rotateY());
sharePower(facing.rotateYCCW());
}
}
}
private void sharePower(EnumFacing f) {
TileEntityMachineBase t = this;
int d = 0;
while (t != null && d < 8 && getEnergy().getEnergyStoredPer() > 0.5F) {
d++;
TileEntity te = world.getTileEntity(pos.offset(f, d));
if (te != null && te instanceof TileEntityMachineBase) {
t = (TileEntityMachineBase) te;
double r = t.receiveEnergy(f.getOpposite(), TYPE, 250, true);
if (r > 0) {
t.receiveEnergy(f, TYPE, getEnergy().extractEnergy(r, false), false);
r = t.receiveEnergy(f.getOpposite(), TYPE, 250, true);
if (r > 0) {
t.receiveEnergy(f, TYPE, getEnergy().extractEnergy(r, false), false);
r = t.receiveEnergy(f.getOpposite(), TYPE, 250, true);
if (r > 0) {
t.receiveEnergy(f, TYPE, getEnergy().extractEnergy(r, false), false);
}
}
}
} else {
t = null;
}
}
}
@Override
public final void postUpdate(IBlockState state) {
if (outputSides != 0) {
int[] out = getOutputSlots();
EnumFacing facing = state.getValue(BlockMachineBase.FACING);
for (int i = 0;i < EnumFacing.VALUES.length;i++) {
EnumFacing f = EnumFacing.VALUES[i];
EnumFacing f2 = f;
switch (f) {
case DOWN:
f2 = EnumFacing.DOWN;
break;
case EAST:
f2 = facing.rotateYCCW();
break;
case NORTH:
f2 = facing;
break;
case SOUTH:
f2 = facing.getOpposite();
break;
case UP:
f2 = EnumFacing.UP;
break;
case WEST:
f2 = facing.rotateY();
break;
default:
break;
}
if (contains(f)) {
pushOutput(f2);
if (out != null) {
for (int j = 0;j < out.length;j++) {
int s = out[j];
inv.setInventorySlotContents(s, TomsModUtils.pushStackToNeighbours(inv.getStackInSlot(s), world, pos, new EnumFacing[]{f2}));
}
}
}
}
}
}
public abstract int getMaxProcessTimeNormal();
public abstract ResourceLocation getFront();
public ResourceLocation getTop() {
return null;
}
public static int getMetaFromEnergyType(EnergyType type) {
return type == HV ? 0 : type == EnergyType.MV ? 1 : 2;
}
public int getSpeedUpgradeCount() {
int slot = getUpgradeSlot();
return Math.min(slot < 0 ? 0 : !inv.getStackInSlot(slot).isEmpty() && inv.getStackInSlot(slot).getItem() == FactoryInit.speedUpgrade ? inv.getStackInSlot(slot).getCount() : 0, MAX_SPEED_UPGRADE_COUNT[getType()]);
}
public int getMaxSpeedUpgradeCount() {
return MAX_SPEED_UPGRADE_COUNT[getType()];
}
@Override
public IConfigurationOption getOption() {
return cfgOption;
}
@Override
public boolean canConfigure(EntityPlayer player, ItemStack stack) {
return true;
}
@Override
public BlockPos getPos2() {
return pos;
}
@Override
public BlockPos getSecurityStationPos() {
return null;
}
@Override
public void setCardStack(ItemStack stack) {
}
@Override
public ItemStack getCardStack() {
return null;
}
private int[][] SLOTS = new int[6][0];
@Override
public void receiveNBTPacket(EntityPlayer pl, NBTTagCompound tag) {
outputSides = tag.getByte("s");
rs = ForceDeviceControlType.get(tag.getInteger("r"));
powersharing = tag.getBoolean("p");
updateSlots();
}
@Override
public void writeToNBTPacket(NBTTagCompound tag) {
tag.setByte("s", outputSides);
tag.setInteger("r", rs.ordinal());
tag.setBoolean("p", powersharing);
}
@Override
public int[] getSlotsForFace(EnumFacing side) {
return SLOTS[side.ordinal()];
}
protected boolean canRun() {
if (rs == ForceDeviceControlType.HIGH_REDSTONE) {
this.active = world.isBlockIndirectlyGettingPowered(pos) > 0;
} else if (rs == ForceDeviceControlType.LOW_REDSTONE) {
this.active = world.isBlockIndirectlyGettingPowered(pos) == 0;
} else if (rs == ForceDeviceControlType.IGNORE) {
this.active = true;
}
return active;
}
public boolean contains(EnumFacing side) {
return (outputSides & (1 << side.ordinal())) != 0;
}
protected void updateSlots() {
int[] in = getInputSlots();
int[] out = getOutputSlots();
SLOTS = new int[6][];
for (int i = 0;i < EnumFacing.VALUES.length;i++) {
int size = 0;
if (in != null) {
size += in.length;
}
if (out != null && contains(EnumFacing.VALUES[i])) {
size += out.length;
}
SLOTS[i] = new int[size];
if (in != null) {
for (int j = 0;j < in.length;j++) {
SLOTS[i][j] = in[j];
}
}
if (out != null && contains(EnumFacing.VALUES[i])) {
for (int j = 0;j < out.length;j++) {
SLOTS[i][j] = out[j];
}
}
}
}
@Override
public ItemStack getStackInSlot(int index) {
return inv.getStackInSlot(index);
}
@Override
public ItemStack decrStackSize(int index, int count) {
return inv.decrStackSize(index, count);
}
@Override
public ItemStack removeStackFromSlot(int index) {
return inv.removeStackFromSlot(index);
}
@Override
public void setInventorySlotContents(int index, ItemStack stack) {
this.inv.setInventorySlotContents(index, stack);
}
@Override
public boolean isEmpty() {
return inv.isEmpty();
}
@Override
public void clear() {
inv.clear();
}
@Override
public void updateEntity() {
if (!world.isRemote) {
if (getEnergy().extractEnergy(20D, true) == 20D && canRun()) {
if (progress > 0) {
updateProgress();
} else if (progress == 0) {
finish();
} else {
checkItems();
TomsModUtils.setBlockStateWithCondition(world, pos, SteamAlloySmelter.ACTIVE, progress > 0);
}
} else {
TomsModUtils.setBlockStateWithCondition(world, pos, SteamAlloySmelter.ACTIVE, false);
}
}
}
public abstract void checkItems();
public abstract void finish();
public abstract void updateProgress();
public void addItemsAndSetProgress(ItemStackChecker s, int outputSlot) {
addItemsAndSetProgress(s, outputSlot, 0, -1, null);
}
public void addItemsAndSetProgress(ItemStackChecker s, int outputSlot, int inSlot1, int inSlot2) {
addItemsAndSetProgress(s, outputSlot, inSlot1, inSlot2, null);
}
public void addItemsAndSetProgress(ItemStackChecker s, int outputSlot, int inSlot1, int inSlot2, Runnable doRun) {
if (s != null) {
if (!inv.getStackInSlot(outputSlot).isEmpty()) {
if (TomsModUtils.areItemStacksEqual(inv.getStackInSlot(outputSlot), s.getStack(), true, true, false) && inv.getStackInSlot(outputSlot).getCount() + s.getStack().getCount() <= s.getStack().getMaxStackSize()) {
inv.getStackInSlot(outputSlot).grow(s.getStack().getCount());
progress = -1;
if (doRun != null)
doRun.run();
}
} else {
progress = -1;
inv.setInventorySlotContents(outputSlot, s.getStack());
if (doRun != null)
doRun.run();
}
} else {
progress = -1;
}
}
public void checkItems(ItemStackChecker s, int outputSlot, int MAX_PROCESS_TIME, int inSlot1, int inSlot2) {
checkItems(s, outputSlot, MAX_PROCESS_TIME, inSlot1, inSlot2, null);
}
public void checkItems(ItemStackChecker s, int outputSlot, int MAX_PROCESS_TIME, int inSlot1, int inSlot2, Runnable doRun) {
if (s != null) {
if (!inv.getStackInSlot(outputSlot).isEmpty()) {
if (TomsModUtils.areItemStacksEqual(inv.getStackInSlot(outputSlot), s.getStack(), true, true, false) && inv.getStackInSlot(outputSlot).getCount() + s.getStack().getCount() <= s.getStack().getMaxStackSize() && inv.getStackInSlot(0).getCount() >= s.getExtra()) {
progress = MAX_PROCESS_TIME;
if (inSlot1 > -1)
decrStackSize(inSlot1, s.getExtra());
if (inSlot2 > -1)
decrStackSize(inSlot2, s.getExtra2());
if (doRun != null)
doRun.run();
}
} else {
progress = MAX_PROCESS_TIME;
if (inSlot1 > -1)
decrStackSize(inSlot1, s.getExtra());
if (inSlot2 > -1)
decrStackSize(inSlot2, s.getExtra2());
if (doRun != null)
doRun.run();
}
}
}
public ItemStackChecker getOutput(int i) {
return output.size() > i ? output.get(i) : null;
}
public void setOut(int i, ItemStackChecker s) {
if (s == null)
return;
if (output.size() <= i) {
output.add(s);
} else
output.set(i, s);
}
@Override
public String getConfigName() {
return getBlockType().getUnlocalizedName() + ".name";
}
@Override
public void getDrops(NonNullList<ItemStack> drops, IBlockAccess world, BlockPos pos, IBlockState state, int fortune) {
ItemStack s = new ItemStack(state.getBlock(), 1, getType());
s.setTagCompound(new NBTTagCompound());
NBTTagCompound tag = new NBTTagCompound();
writeToStackNBT(tag);
s.getTagCompound().setTag("BlockEntityTag", tag);
s.getTagCompound().setBoolean("stored", true);
drops.add(s);
}
}
| |
/*
* Copyright 2018 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.test.plugin.shared;
import com.navercorp.pinpoint.test.plugin.ForkedPinpointPluginTestRunner;
import com.navercorp.pinpoint.test.plugin.PluginClassLoading;
import com.navercorp.pinpoint.test.plugin.ReflectPluginTestVerifier;
import com.navercorp.pinpoint.test.plugin.util.ArrayUtils;
import com.navercorp.pinpoint.test.plugin.util.ChildFirstClassLoader;
import com.navercorp.pinpoint.test.plugin.util.ProfilerClass;
import com.navercorp.pinpoint.test.plugin.util.TestLogger;
import com.navercorp.pinpoint.test.plugin.util.ThreadContextCallable;
import com.navercorp.pinpoint.test.plugin.util.URLUtils;
import org.junit.runner.JUnitCore;
import org.junit.runner.Runner;
import org.junit.runners.model.InitializationError;
import org.tinylog.TaggedLogger;
import java.io.File;
import java.io.PrintStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* @author Taejin Koo
*/
public class SharedPinpointPluginTest {
private static final TaggedLogger logger = TestLogger.getLogger();
public static void main(String[] args) throws Exception {
final String mavenDependencyResolverClassPaths = System.getProperty(SharedPluginTestConstants.MAVEN_DEPENDENCY_RESOLVER_CLASS_PATHS);
if (mavenDependencyResolverClassPaths == null) {
logger.error("mavenDependencyResolverClassPaths must not be empty");
return;
}
final String repositoryUrlString = System.getProperty(SharedPluginTestConstants.TEST_REPOSITORY_URLS);
if (repositoryUrlString == null) {
logger.error("repositoryUrls must not be empty");
return;
}
logger.debug("-D{}={}", SharedPluginTestConstants.TEST_REPOSITORY_URLS, repositoryUrlString);
final String testLocation = System.getProperty(SharedPluginTestConstants.TEST_LOCATION);
if (testLocation == null) {
logger.error("testLocation must not be empty");
return;
}
logger.debug("-D{}={}", SharedPluginTestConstants.TEST_LOCATION, testLocation);
final String testClazzName = System.getProperty(SharedPluginTestConstants.TEST_CLAZZ_NAME);
if (testClazzName == null) {
logger.error("testClazzName must not be empty");
return;
}
logger.debug("-D{}={}", SharedPluginTestConstants.TEST_CLAZZ_NAME, testClazzName);
String loggerEnable = System.getProperty(SharedPluginTestConstants.TEST_LOGGER);
if (loggerEnable == null) {
logger.debug("-D{} is not set", SharedPluginTestConstants.TEST_LOGGER);
loggerEnable = Boolean.TRUE.toString();
}
final boolean testLogger = Boolean.parseBoolean(loggerEnable);
logger.debug("-D{}={}", SharedPluginTestConstants.TEST_LOGGER, testLogger);
if (ArrayUtils.isEmpty(args)) {
logger.error("test must not be empty");
return;
}
if (logger.isDebugEnabled()) {
logger.debug("main args:{}", Arrays.toString(args));
}
String[] mavenDependencyResolverClassPathArray = mavenDependencyResolverClassPaths.split(File.pathSeparator);
String[] repositoryUrls = repositoryUrlString.split(",");
TestParameterParser parser = new TestParameterParser();
List<TestParameter> testParameters = parser.parse(args);
SharedPinpointPluginTest pluginTest = new SharedPinpointPluginTest(testClazzName, testLocation, testLogger,
mavenDependencyResolverClassPathArray, repositoryUrls, testParameters, System.out);
pluginTest.execute();
}
private final String testClazzName;
private final String testLocation;
private final boolean testLogger;
private final String[] mavenDependencyResolverClassPaths;
private final String[] repositoryUrls;
private final List<TestParameter> testParameters;
private final PrintStream out;
public SharedPinpointPluginTest(String testClazzName, String testLocation, boolean testLogger,
String[] mavenDependencyResolverClassPaths, String[] repositoryUrls,
List<TestParameter> testParameters, PrintStream out) {
this.testClazzName = testClazzName;
this.testLocation = testLocation;
this.testLogger = testLogger;
this.mavenDependencyResolverClassPaths = mavenDependencyResolverClassPaths;
this.repositoryUrls = repositoryUrls;
this.testParameters = testParameters;
this.out = out;
}
private List<TestInfo> newTestCaseInfo(List<TestParameter> testParameters, File testClazzLocation, String[] repositoryUrls, ClassLoader dependencyClassLoader) throws Exception {
ReflectionDependencyResolver dependencyResolver = new ReflectionDependencyResolver(dependencyClassLoader, repositoryUrls);
List<File> loggerDependencies = getLoggerDependencies(dependencyResolver, dependencyClassLoader);
logger.debug("loggerDependency:{}", loggerDependencies);
List<TestInfo> testInfos = new ArrayList<>();
for (TestParameter testParameter : testParameters) {
final List<File> testDependency = new ArrayList<>();
testDependency.add(testClazzLocation);
testDependency.addAll(loggerDependencies);
List<File> testParameterDependency = getTestParameterDependency(dependencyClassLoader, dependencyResolver, testParameter);
testDependency.addAll(testParameterDependency);
final TestInfo testInfo = new TestInfo(testParameter.getTestId(), testDependency, Arrays.asList(repositoryUrls));
testInfos.add(testInfo);
}
return testInfos;
}
private List<File> getTestParameterDependency(ClassLoader mavenDependencyResolverClassLoader,
ReflectionDependencyResolver dependencyResolver,
TestParameter testParameter) throws Exception {
final List<String> mavenDependencies = testParameter.getMavenDependencies();
List<File> testDependencyFileList = lookup(dependencyResolver, mavenDependencies, mavenDependencyResolverClassLoader);
if (logger.isDebugEnabled()) {
logger.debug("@Dependency {}", mavenDependencies);
for (File file : testDependencyFileList) {
logger.debug("-> {}", file);
}
}
return testDependencyFileList;
}
private List<File> getLoggerDependencies(ReflectionDependencyResolver dependencyResolver, ClassLoader mavenDependencyResolverClassLoader) throws Exception {
if (!testLogger) {
return Collections.emptyList();
}
List<String> dependencyLib = PluginClassLoading.LOGGER_DEPENDENCY;
List<File> libFiles = lookup(dependencyResolver, dependencyLib, mavenDependencyResolverClassLoader);
if (logger.isDebugEnabled()) {
logger.debug("LoggerDependency {}", dependencyLib);
for (File libFile : libFiles) {
logger.debug("-> {}", libFile);
}
}
return libFiles;
}
private List<File> lookup(final ReflectionDependencyResolver dependencyResolver, final List<String> dependencyLib, ClassLoader cl) throws Exception {
Callable<List<File>> callable = new ThreadContextCallable<>(new Callable<List<File>>() {
@Override
public List<File> call() throws Exception {
return dependencyResolver.lookup(dependencyLib);
}
}, cl);
return callable.call();
}
private void logTestInformation() {
logger.info("[{}] {}", getClass().getSimpleName(), testClazzName);
if (logger.isDebugEnabled()) {
for (String mavenDependencyResolverClassPath : mavenDependencyResolverClassPaths) {
logger.debug("{}: {}", SharedPluginTestConstants.MAVEN_DEPENDENCY_RESOLVER_CLASS_PATHS, mavenDependencyResolverClassPath);
}
for (TestParameter testParameter : testParameters) {
logger.debug("{} {}", testClazzName, testParameter);
}
for (String repositoryUrl : repositoryUrls) {
logger.debug("{}: {}", SharedPluginTestConstants.TEST_REPOSITORY_URLS, repositoryUrl);
}
}
}
public void execute() throws Exception {
logTestInformation();
ClassLoader mavenDependencyResolverClassLoader = new ChildFirstClassLoader(URLUtils.fileToUrls(mavenDependencyResolverClassPaths));
File testClazzLocation = new File(testLocation);
List<TestInfo> testInfos = newTestCaseInfo(testParameters, testClazzLocation, repositoryUrls, mavenDependencyResolverClassLoader);
for (TestInfo testInfo : testInfos) {
execute(testInfo);
}
}
private void execute(final TestInfo testInfo) {
try {
List<File> dependencyFileList = testInfo.getDependencyFileList();
if (logger.isDebugEnabled()) {
for (File dependency : dependencyFileList) {
logger.debug("testcase cl lib :{}", dependency);
}
}
URL[] urls = URLUtils.fileToUrls(dependencyFileList);
final ClassLoader testClassLoader = new ChildFirstClassLoader(urls, ProfilerClass.PINPOINT_PROFILER_CLASS);
Runnable runnable = new Runnable() {
@Override
public void run() {
final Class<?> testClazz = loadClass();
logger.debug("testClazz:{} cl:{}", testClazz.getName(), testClazz.getClassLoader());
try {
JUnitCore junit = new JUnitCore();
junit.addListener(new PrintListener());
Runner runner = new ForkedPinpointPluginTestRunner(testClazz, testInfo.getTestId());
junit.run(runner);
} catch (InitializationError error) {
logger.error(error, "test run fail testClazz:{} testId:{}", testClazzName, testInfo.getTestId());
List<Throwable> causes = error.getCauses();
for (Throwable cause : causes) {
logger.error(cause, "junit error Caused By:{}", cause.getMessage());
}
}
}
private Class<?> loadClass() {
try {
return testClassLoader.loadClass(testClazzName);
} catch (ClassNotFoundException e) {
logger.error(e, "testClazz:{} not found", testClazzName, testInfo.getTestId());
throw new RuntimeException(e);
}
}
};
String threadName = testClazzName + " " + testInfo.getTestId() + " Thread";
Thread testThread = newThread(runnable, threadName, testClassLoader);
testThread.start();
testThread.join(TimeUnit.MINUTES.toMillis(5));
checkTerminatedState(testThread, testClazzName + " " + testInfo.getTestId());
} catch (Exception e) {
logger.error(e, "{}:{} execute failed:{}", testClazzName, testInfo.getTestId(), e.getMessage());
} finally {
ReflectPluginTestVerifier.getInstance().cleanUp(true);
}
}
private void checkTerminatedState(Thread testThread, String testInfo) {
if (testThread.isAlive()) {
throw new IllegalStateException(testInfo + " not finished");
}
}
private Thread newThread(Runnable runnable, String threadName, ClassLoader testClassLoader) {
Thread testThread = new Thread(runnable);
testThread.setName(threadName);
testThread.setContextClassLoader(testClassLoader);
testThread.setDaemon(true);
return testThread;
}
}
| |
// SMSLib for Java v3
// A Java API library for sending and receiving SMS via a GSM modem
// or other supported gateways.
// Web Site: http://www.smslib.org
//
// Copyright (C) 2002-2012, Thanasis Delenikas, Athens/GREECE.
// SMSLib is distributed under the terms of the Apache License version 2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.smslib.smpp.jsmpp;
import java.io.IOException;
import java.util.Calendar;
import java.util.Date;
import org.jsmpp.InvalidResponseException;
import org.jsmpp.PDUException;
import org.jsmpp.bean.AlertNotification;
import org.jsmpp.bean.Alphabet;
import org.jsmpp.bean.BindType;
import org.jsmpp.bean.DataSm;
import org.jsmpp.bean.DeliverSm;
import org.jsmpp.bean.DeliveryReceipt;
import org.jsmpp.bean.ESMClass;
import org.jsmpp.bean.GeneralDataCoding;
import org.jsmpp.bean.MessageClass;
import org.jsmpp.bean.MessageType;
import org.jsmpp.bean.NumberingPlanIndicator;
import org.jsmpp.bean.RegisteredDelivery;
import org.jsmpp.bean.SMSCDeliveryReceipt;
import org.jsmpp.bean.TypeOfNumber;
import org.jsmpp.extra.NegativeResponseException;
import org.jsmpp.extra.ProcessRequestException;
import org.jsmpp.extra.ResponseTimeoutException;
import org.jsmpp.extra.SessionState;
import org.jsmpp.session.BindParameter;
import org.jsmpp.session.DataSmResult;
import org.jsmpp.session.MessageReceiverListener;
import org.jsmpp.session.SMPPSession;
import org.jsmpp.session.Session;
import org.jsmpp.session.SessionStateListener;
import org.jsmpp.util.InvalidDeliveryReceiptException;
import org.smslib.AGateway;
import org.smslib.GatewayException;
import org.smslib.InboundMessage;
import org.smslib.OutboundMessage;
import org.smslib.Service;
import org.smslib.StatusReportMessage;
import org.smslib.TimeoutException;
import org.smslib.Message.MessageEncodings;
import org.smslib.Message.MessageTypes;
import org.smslib.OutboundMessage.FailureCauses;
import org.smslib.OutboundMessage.MessageStatuses;
import org.smslib.StatusReportMessage.DeliveryStatuses;
import org.smslib.helper.Logger;
import org.smslib.notify.InboundMessageNotification;
import org.smslib.smpp.AbstractSMPPGateway;
import org.smslib.smpp.BindAttributes;
/**
* A gateway that supports SMPP through JSMPP (http://code.google.com/p/jsmpp/).
*
* @author Bassam Al-Sarori
*/
public class JSMPPGateway extends AbstractSMPPGateway {
private SMPPSession session = null;
private MessageReceiver messageReceiver=new MessageReceiver();
private SessionStateListener stateListener=new JSMPPSessionStateListener();
private BindType bindType;
private TypeOfNumber bindTypeOfNumber;
private NumberingPlanIndicator bindNumberingPlanIndicator;
/**
* @param id
* @param host
* @param port
* @param bindAttributes
*/
public JSMPPGateway(String id, String host, int port,
BindAttributes bindAttributes) {
super(id, host, port, bindAttributes);
setAttributes(AGateway.GatewayAttributes.SEND | AGateway.GatewayAttributes.CUSTOMFROM | AGateway.GatewayAttributes.BIGMESSAGES | AGateway.GatewayAttributes.FLASHSMS | AGateway.GatewayAttributes.RECEIVE);
init();
}
private void init(){
switch (bindAttributes.getBindType()){
case RECEIVER:
bindType=BindType.BIND_RX;
setInbound(true);
setOutbound(false);
break;
case TRANSMITTER:
bindType=BindType.BIND_TX;
setInbound(false);
setOutbound(true);
break;
case TRANSCEIVER:
bindType=BindType.BIND_TRX;
setInbound(true);
setOutbound(true);
break;
default:
IllegalArgumentException illegalArgumentException=new IllegalArgumentException("Unknown BindType "+bindAttributes.getBindType());
Logger.getInstance().logError(illegalArgumentException.getMessage(), illegalArgumentException, getGatewayId());
throw illegalArgumentException;
}
bindTypeOfNumber=TypeOfNumber.valueOf(bindAttributes.getBindAddress().getTypeOfNumber().value());
bindNumberingPlanIndicator=NumberingPlanIndicator.valueOf(bindAttributes.getBindAddress().getNumberingPlanIndicator().value());
initSession();
}
private void initSession(){
session = new SMPPSession();
session.addSessionStateListener(stateListener);
session.setMessageReceiverListener(messageReceiver);
}
@Override
public void startGateway() throws TimeoutException, GatewayException,
IOException, InterruptedException {
if(!session.getSessionState().isBound()){
if(enquireLink>0){
session.setEnquireLinkTimer(enquireLink);
}
session.connectAndBind(host, port, new BindParameter(bindType, bindAttributes.getSystemId(), bindAttributes.getPassword(), bindAttributes.getSystemType(), bindTypeOfNumber, bindNumberingPlanIndicator, null));
}else{
Logger.getInstance().logWarn("SMPP session already bound.", null, getGatewayId());
// throw new GatewayException("Session already bound");
}
}
@Override
public void stopGateway() throws TimeoutException, GatewayException,
IOException, InterruptedException {
if(session.getSessionState().isBound()){
session.removeSessionStateListener(stateListener);
session.unbindAndClose();
//super.stopGateway();
}else{
Logger.getInstance().logWarn("SMPP session not bound.", null, getGatewayId());
//throw new GatewayException("Session not bound");
}
super.stopGateway();
}
class MessageReceiver implements MessageReceiverListener{
public void onAcceptDeliverSm(DeliverSm deliverSm)
throws ProcessRequestException {
if (MessageType.SMSC_DEL_RECEIPT.containedIn(deliverSm.getEsmClass())) {
try {
DeliveryReceipt delReceipt = deliverSm.getShortMessageAsDeliveryReceipt();
StatusReportMessage statusReportMessage=new StatusReportMessage(delReceipt.getId(),deliverSm.getDestAddress(), deliverSm.getSourceAddr(), delReceipt.getText(), delReceipt.getSubmitDate(), delReceipt.getDoneDate());
switch(delReceipt.getFinalStatus()){
case DELIVRD:
statusReportMessage.setStatus(DeliveryStatuses.DELIVERED);
break;
case REJECTD:
case EXPIRED:
case UNDELIV:
statusReportMessage.setStatus(DeliveryStatuses.ABORTED);
break;
default:
statusReportMessage.setStatus(DeliveryStatuses.UNKNOWN);
}
statusReportMessage.setGatewayId(getGatewayId());
Service.getInstance().getNotifyQueueManager().getNotifyQueue().add(new InboundMessageNotification(getMyself(), MessageTypes.STATUSREPORT, statusReportMessage));
} catch (InvalidDeliveryReceiptException e) {
Logger.getInstance().logError("Failed getting delivery receipt.", e, getGatewayId());
}
} else {
InboundMessage msg = new InboundMessage(new java.util.Date(), deliverSm.getSourceAddr(), new String(deliverSm.getShortMessage()), 0, null);
msg.setGatewayId(JSMPPGateway.this.getGatewayId());
if(Alphabet.ALPHA_DEFAULT.value()==deliverSm.getDataCoding()){
msg.setEncoding(MessageEncodings.ENC7BIT);
}else if(Alphabet.ALPHA_8_BIT.value()==deliverSm.getDataCoding()){
msg.setEncoding(MessageEncodings.ENC8BIT);
}else if(Alphabet.ALPHA_UCS2.value()==deliverSm.getDataCoding()){
msg.setEncoding(MessageEncodings.ENCUCS2);
}else{
msg.setEncoding(MessageEncodings.ENCCUSTOM);
}
incInboundMessageCount();
Service.getInstance().getNotifyQueueManager().getNotifyQueue().add(new InboundMessageNotification(getMyself(), MessageTypes.INBOUND, msg));
}
}
public DataSmResult onAcceptDataSm(DataSm dataSm, Session source)
throws ProcessRequestException {
// ignored
return null;
}
public void onAcceptAlertNotification(
AlertNotification alertNotification) {
// ignored
}
}
class JSMPPSessionStateListener implements SessionStateListener {
public void onStateChange(SessionState newState, SessionState oldState,
Object source) {
if(newState.isBound()){
if(!getStatus().equals(GatewayStatuses.STARTED)){
try {
JSMPPGateway.super.startGateway();
} catch (TimeoutException e) {
Logger.getInstance().logError("Failed starting Gateway.", e, getGatewayId());
} catch (GatewayException e) {
Logger.getInstance().logError("Failed starting Gateway.", e, getGatewayId());
} catch (IOException e) {
Logger.getInstance().logError("Failed starting Gateway.", e, getGatewayId());
} catch (InterruptedException e) {
Logger.getInstance().logError("Failed starting Gateway.", e, getGatewayId());
}
}
}else if(newState.equals(SessionState.CLOSED)){
if(getStatus().equals(GatewayStatuses.STARTED)){
JSMPPGateway.super.setStatus(GatewayStatuses.RESTART);
initSession();
}
}
//System.out.println("State Changed: from "+oldState+" To "+newState);
}
}
@Override
public boolean sendMessage(OutboundMessage msg) throws TimeoutException,
GatewayException, IOException, InterruptedException {
Alphabet encoding=Alphabet.ALPHA_DEFAULT;
switch (msg.getEncoding()){
case ENC8BIT:
encoding=Alphabet.ALPHA_8_BIT;
break;
case ENCUCS2:
encoding=Alphabet.ALPHA_UCS2;
break;
case ENCCUSTOM:
encoding=Alphabet.ALPHA_RESERVED;
break;
}
GeneralDataCoding dataCoding;
switch(msg.getDCSMessageClass()){
case MSGCLASS_FLASH:
dataCoding=new GeneralDataCoding(false, true, MessageClass.CLASS0, encoding);
break;
case MSGCLASS_ME:
dataCoding=new GeneralDataCoding(false, true, MessageClass.CLASS1, encoding);
break;
case MSGCLASS_SIM:
dataCoding=new GeneralDataCoding(false, true, MessageClass.CLASS2, encoding);
break;
case MSGCLASS_TE:
dataCoding=new GeneralDataCoding(false, true, MessageClass.CLASS3, encoding);
break;
default:
dataCoding=new GeneralDataCoding();
dataCoding.setAlphabet(encoding);
}
try {
final RegisteredDelivery registeredDelivery = new RegisteredDelivery();
registeredDelivery.setSMSCDeliveryReceipt((msg.getStatusReport())?SMSCDeliveryReceipt.SUCCESS_FAILURE:SMSCDeliveryReceipt.DEFAULT);
String msgId=session.submitShortMessage(bindAttributes.getSystemType(),
TypeOfNumber.valueOf(sourceAddress.getTypeOfNumber().value()),
NumberingPlanIndicator.valueOf(sourceAddress.getNumberingPlanIndicator().value()),
(msg.getFrom()!=null)?msg.getFrom():getFrom(),
TypeOfNumber.valueOf(destinationAddress.getTypeOfNumber().value()),
NumberingPlanIndicator.valueOf(destinationAddress.getNumberingPlanIndicator().value()),
msg.getRecipient(),
new ESMClass(),
(byte)0,
(byte)msg.getPriority(),
null,
formatTimeFromHours(msg.getValidityPeriod()),
registeredDelivery,
(byte)0,
dataCoding,
(byte)0,
msg.getText().getBytes());
msg.setRefNo(msgId);
msg.setDispatchDate(new Date());
msg.setGatewayId(getGatewayId());
msg.setMessageStatus(MessageStatuses.SENT);
incOutboundMessageCount();
}catch (PDUException e) {
msg.setGatewayId(getGatewayId());
msg.setMessageStatus(MessageStatuses.FAILED);
msg.setFailureCause(FailureCauses.BAD_FORMAT);
Logger.getInstance().logError("Message Format not accepted.", e, getGatewayId());
return false;
} catch (ResponseTimeoutException e) {
Logger.getInstance().logError("Message could not be sent.", e, getGatewayId());
throw new TimeoutException(e.getMessage());
} catch (InvalidResponseException e) {
Logger.getInstance().logError("Message could not be sent.", e, getGatewayId());
throw new IOException("InvalidResponseException: ", e);
} catch (NegativeResponseException e) {
Logger.getInstance().logError("Message could not be sent.", e, getGatewayId());
throw new IOException("NegativeResponseException: ", e);
}
return true;
}
private String formatTimeFromHours(int timeInHours){
if(timeInHours<0){
return null;
}
Calendar cDate=Calendar.getInstance();
cDate.clear();
cDate.set(Calendar.YEAR, 0);
cDate.add(Calendar.HOUR, timeInHours);
int years=cDate.get(Calendar.YEAR)-cDate.getMinimum(Calendar.YEAR);
int months=cDate.get(Calendar.MONTH);
int days=cDate.get(Calendar.DAY_OF_MONTH)-1;
int hours=cDate.get(Calendar.HOUR_OF_DAY);
String yearsString=(years<10)?"0"+years:years+"";
String monthsString=(months<10)?"0"+months:months+"";
String daysString=(days<10)?"0"+days:days+"";
String hoursString=(hours<10)?"0"+hours:hours+"";
return yearsString+monthsString+daysString+hoursString+"0000000R";
}
@Override
public void setEnquireLink(int enquireLink) {
super.setEnquireLink(enquireLink);
if(session!=null){
session.setEnquireLinkTimer(enquireLink);
}
}
}
| |
package com.redhat.ceylon.compiler.typechecker.analyzer;
import static com.redhat.ceylon.compiler.typechecker.analyzer.Util.getParameterTypeErrorNode;
import static com.redhat.ceylon.compiler.typechecker.analyzer.Util.getTypeErrorNode;
import static com.redhat.ceylon.model.typechecker.model.Util.isCompletelyVisible;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.redhat.ceylon.compiler.typechecker.tree.Node;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.ParameterList;
import com.redhat.ceylon.compiler.typechecker.tree.Visitor;
import com.redhat.ceylon.model.typechecker.model.Declaration;
import com.redhat.ceylon.model.typechecker.model.Element;
import com.redhat.ceylon.model.typechecker.model.IntersectionType;
import com.redhat.ceylon.model.typechecker.model.Method;
import com.redhat.ceylon.model.typechecker.model.Module;
import com.redhat.ceylon.model.typechecker.model.ModuleImport;
import com.redhat.ceylon.model.typechecker.model.Package;
import com.redhat.ceylon.model.typechecker.model.Parameter;
import com.redhat.ceylon.model.typechecker.model.ProducedType;
import com.redhat.ceylon.model.typechecker.model.TypeAlias;
import com.redhat.ceylon.model.typechecker.model.TypeDeclaration;
import com.redhat.ceylon.model.typechecker.model.TypeParameter;
import com.redhat.ceylon.model.typechecker.model.TypedDeclaration;
import com.redhat.ceylon.model.typechecker.model.UnionType;
public class VisibilityVisitor extends Visitor {
@Override public void visit(Tree.TypedDeclaration that) {
checkVisibility(that, that.getDeclarationModel());
super.visit(that);
}
@Override public void visit(Tree.TypedArgument that) {
checkVisibility(that, that.getDeclarationModel());
super.visit(that);
}
@Override
public void visit(Tree.AnyClass that) {
super.visit(that);
checkParameterVisibility(that.getDeclarationModel(),
that.getParameterList());
}
@Override
public void visit(Tree.AnyMethod that) {
super.visit(that);
checkParameterVisibility(that.getDeclarationModel(),
that.getParameterLists());
}
@Override
public void visit(Tree.Constructor that) {
super.visit(that);
checkParameterVisibility(that.getDeclarationModel(),
that.getParameterList());
}
@Override
public void visit(Tree.MethodArgument that) {
super.visit(that);
checkParameterVisibility(that.getDeclarationModel(),
that.getParameterLists());
}
@Override
public void visit(Tree.FunctionArgument that) {
super.visit(that);
Method m = that.getDeclarationModel();
checkVisibility(that, m);
checkParameterVisibility(m, that.getParameterLists());
}
private void checkParameterVisibility(Method m,
List<ParameterList> parameterLists) {
for (Tree.ParameterList list: parameterLists) {
checkParameterVisibility(m, list);
}
}
private void checkParameterVisibility(Declaration m,
Tree.ParameterList list) {
if (list!=null) {
for (Tree.Parameter tp: list.getParameters()) {
if (tp!=null) {
Parameter p = tp.getParameterModel();
if (p.getModel()!=null) {
checkParameterVisibility(tp, m, p);
}
}
}
}
}
@Override public void visit(Tree.TypeDeclaration that) {
validateSupertypes(that, that.getDeclarationModel());
super.visit(that);
}
@Override public void visit(Tree.ObjectDefinition that) {
validateSupertypes(that,
that.getAnonymousClass());
super.visit(that);
}
@Override public void visit(Tree.ObjectArgument that) {
validateSupertypes(that,
that.getAnonymousClass());
super.visit(that);
}
@Override public void visit(Tree.ObjectExpression that) {
validateSupertypes(that,
that.getAnonymousClass());
super.visit(that);
}
private void validateSupertypes(Node that, TypeDeclaration td) {
if (td instanceof TypeAlias) {
ProducedType at = td.getExtendedType();
if (at!=null) {
if (!isCompletelyVisible(td, at)) {
that.addError("aliased type is not visible everywhere type alias '" +
td.getName() + "' is visible: '" +
at.getProducedTypeName(that.getUnit()) +
"' involves an unshared type declaration",
713);
}
if (!checkModuleVisibility(td, at)) {
that.addError("aliased type of type alias '" + td.getName() +
"' that is visible outside this module comes from an imported module that is not re-exported: '" +
at.getProducedTypeName(that.getUnit()) +
"' involves an unexported type declaration",
714);
}
}
}
else {
List<ProducedType> supertypes = td.getType().getSupertypes();
if (!td.isInconsistentType()) {
for (ProducedType st: supertypes) {
// don't do this check for ObjectArguments
if (that instanceof Tree.Declaration) {
if (!isCompletelyVisible(td, st)) {
that.addError("supertype is not visible everywhere type '" +
td.getName() + "' is visible: '" +
st.getProducedTypeName(that.getUnit()) +
"' involves an unshared type declaration",
713);
}
if (!checkModuleVisibility(td, st)) {
that.addError("supertype of type '" + td.getName() +
"' that is visible outside this module comes from an imported module that is not re-exported: '" +
st.getProducedTypeName(that.getUnit()) +
"' involves an unexported type declaration",
714);
}
}
}
}
}
// validateMemberRefinement(td, that, unit);
}
private static boolean checkModuleVisibility(Declaration member, ProducedType pt) {
if (inExportedScope(member)) {
Module declarationModule = getModule(member);
if (declarationModule!=null) {
return isCompletelyVisibleFromOtherModules(member,pt,declarationModule);
}
}
return true;
}
private static boolean inExportedScope(Declaration decl) {
// if it has a visible scope it's not exported outside the module
if(decl.getVisibleScope() != null)
return false;
// now perhaps its package is not shared
Package p = decl.getUnit().getPackage();
return p != null && p.isShared();
}
static boolean isCompletelyVisibleFromOtherModules(Declaration member,
ProducedType pt, Module thisModule) {
if (pt.getDeclaration() instanceof UnionType) {
for (ProducedType ct: pt.getDeclaration().getCaseTypes()) {
if (!isCompletelyVisibleFromOtherModules(member,
ct.substitute(pt.getTypeArguments()), thisModule)) {
return false;
}
}
return true;
}
else if (pt.getDeclaration() instanceof IntersectionType) {
for (ProducedType ct: pt.getDeclaration().getSatisfiedTypes()) {
if (!isCompletelyVisibleFromOtherModules(member,
ct.substitute(pt.getTypeArguments()), thisModule)) {
return false;
}
}
return true;
}
else {
if (!isVisibleFromOtherModules(member, thisModule,
pt.getDeclaration())) {
return false;
}
for (ProducedType at: pt.getTypeArgumentList()) {
if ( at!=null &&
!isCompletelyVisibleFromOtherModules(member,at,thisModule) ) {
return false;
}
}
return true;
}
}
private static String getName(Declaration td) {
if (td.isAnonymous()) {
return "anonymous function";
}
else {
return "'" + td.getName() + "'";
}
}
private static void checkVisibility(Node that,
TypedDeclaration td) {
ProducedType type = td.getType();
if (type!=null) {
Node typeNode = getTypeErrorNode(that);
if (!isCompletelyVisible(td, type)) {
typeNode.addError("type of declaration " + getName(td) +
" is not visible everywhere declaration is visible: '" +
type.getProducedTypeName(that.getUnit()) +
"' involves an unshared type declaration", 711);
}
if (!checkModuleVisibility(td, type)) {
typeNode.addError("type of declaration " + getName(td) +
" that is visible outside this module comes from an imported module that is not re-exported: '" +
type.getProducedTypeName(that.getUnit()) +
"' involves an unexported type declaration", 712);
}
}
}
private static void checkParameterVisibility(Tree.Parameter tp,
Declaration td, Parameter p) {
ProducedType pt = p.getType();
if (pt!=null) {
if (!isCompletelyVisible(td, pt)) {
getParameterTypeErrorNode(tp)
.addError("type of parameter '" + p.getName() + "' of " + getName(td) +
" is not visible everywhere declaration is visible: '" +
pt.getProducedTypeName(tp.getUnit()) +
"' involves an unshared type declaration", 710);
}
if (!checkModuleVisibility(td, pt)) {
getParameterTypeErrorNode(tp)
.addError("type of parameter '" + p.getName() + "' of " + getName(td) +
" that is visible outside this module comes from an imported module that is not re-exported: '" +
pt.getProducedTypeName(tp.getUnit()) +
"' involves an unexported type declaration", 714);
}
}
}
private static boolean isVisibleFromOtherModules(Declaration member,
Module thisModule, TypeDeclaration type) {
// type parameters are OK
if (type instanceof TypeParameter) {
return true;
}
Module typeModule = getModule(type);
if (typeModule!=null && thisModule!=null &&
thisModule!=typeModule) {
// find the module import, but only in exported imports, otherwise it's an error anyways
// language module stuff is automagically exported
if (typeModule == thisModule.getLanguageModule()) {
return true;
}
// try to find a direct import first
for (ModuleImport imp: thisModule.getImports()) {
if (imp.isExport() &&
imp.getModule() == typeModule) {
// found it
return true;
}
}
// then try the more expensive implicit imports
Set<Module> visited = new HashSet<Module>();
visited.add(thisModule);
for (ModuleImport imp : thisModule.getImports()) {
// now try implicit dependencies
if (imp.isExport() &&
includedImplicitly(imp.getModule(),
typeModule, visited)) {
// found it
return true;
}
}
// couldn't find it
return false;
}
// no module or it does not belong to a module? more likely an error was already reported
return true;
}
private static boolean includedImplicitly(Module importedModule,
Module targetModule, Set<Module> visited) {
// don't visit them twice
if (visited.add(importedModule)) {
for (ModuleImport imp: importedModule.getImports()){
// only consider modules it exported back to us
if (imp.isExport()
&& (imp.getModule() == targetModule
|| includedImplicitly(imp.getModule(),
targetModule, visited))) {
return true;
}
}
}
return false;
}
private static Module getModule(Element element){
Package typePackage = element.getUnit().getPackage();
return typePackage != null ? typePackage.getModule() : null;
}
}
| |
/* Copyright (c) 2017, University of Oslo, Norway
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of the University of Oslo nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package vtk.web.api;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.web.HttpRequestHandler;
import vtk.repository.AuthorizationException;
import vtk.repository.ContentInputSources;
import vtk.repository.Namespace;
import vtk.repository.Path;
import vtk.repository.Property;
import vtk.repository.RepositoryAction;
import vtk.repository.Resource;
import vtk.repository.ResourceNotFoundException;
import vtk.repository.TypeInfo;
import vtk.repository.resourcetype.PropertyTypeDefinition;
import vtk.resourcemanagement.StructuredResource;
import vtk.resourcemanagement.StructuredResourceDescription;
import vtk.resourcemanagement.StructuredResourceManager;
import vtk.resourcemanagement.ValidationResult;
import vtk.util.Result;
import vtk.util.repository.ResourceMappers;
import vtk.util.repository.ResourceMappers.PropertySetMapper;
import vtk.util.text.Json;
import vtk.util.text.JsonStreamer;
import vtk.web.RequestContext;
public class PropertiesApiHandler implements HttpRequestHandler {
private StructuredResourceManager structuredResourceManager;
private static Locale LOCALE = Locale.getDefault();
public PropertiesApiHandler(StructuredResourceManager structuredResourceManager) {
this.structuredResourceManager = structuredResourceManager;
}
@Override
public void handleRequest(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
RequestContext requestContext = RequestContext.getRequestContext(request);
Result<Optional<Resource>> res = retrieve(requestContext, requestContext.getResourceURI());
Result<ApiResponseBuilder> bldr = res.flatMap(opt -> {
if (!opt.isPresent()) {
return Result.success(ApiResponseBuilder.notFound(
"404 Not Found: " + requestContext.getResourceURI()));
}
else {
Resource resource = opt.get();
switch (request.getMethod()) {
case "GET":
return getProperties(request, resource);
case "PATCH":
return updateProperties(request, resource);
default:
return unknownMethod(request, resource);
}
}
});
bldr = bldr.recover(ex -> {
if (ex instanceof InvalidRequestException) {
return ApiResponseBuilder.badRequest(ex.getMessage());
}
if (ex instanceof AuthorizationException) {
return ApiResponseBuilder.forbidden(ex.getMessage());
}
return ApiResponseBuilder.internalServerError(
"An unexpected error occurred: " + ex.getMessage());
});
bldr.forEach(resp -> {
resp.header("Accept-Patch", "application/merge-patch+json")
.writeTo(response);
});
}
private Result<ApiResponseBuilder> unknownMethod(HttpServletRequest request,
Resource resource) {
return Result.success(new ApiResponseBuilder(HttpServletResponse.SC_BAD_REQUEST)
.header("Content-Type", "text/plain;charset=utf-8")
.message("Request method " + request.getMethod()
+ " not supported: on " + resource.getURI()));
}
private static abstract class PropertyOperation {
public final PropertyTypeDefinition propDef;
public PropertyOperation(PropertyTypeDefinition propDef) {
this.propDef = propDef;
}
}
private static class Delete extends PropertyOperation {
public Delete(PropertyTypeDefinition propDef) {
super(propDef);
}
@Override
public String toString() {
return getClass().getSimpleName() + "(" + propDef.getName() + ")";
}
}
private static class Update extends PropertyOperation {
public final Object value;
public Update(PropertyTypeDefinition propDef, Object value) {
super(propDef);
this.value = value;
}
@Override
public String toString() {
return getClass().getSimpleName() + "("
+ propDef.getName() + ", " + value + ")";
}
}
private static class PropertyOperations {
private List<PropertyOperation> regularProps = new ArrayList<>();
private List<PropertyOperation> structuredProps = new ArrayList<>();
public PropertyOperations add(PropertyOperation operation) {
List<PropertyOperation> list = operation.propDef.getNamespace() ==
Namespace.STRUCTURED_RESOURCE_NAMESPACE ? structuredProps : regularProps;
list.add(operation);
return this;
}
public Stream<PropertyOperation> stream() {
return Stream.concat(regularProps.stream(), structuredProps.stream());
}
public Result<Resource> apply(Resource resource, RequestContext requestContext,
StructuredResourceManager manager) {
Result<Resource> result = Result.success(resource);
if (!structuredProps.isEmpty()) {
// Properties in the structured resource name space
// are always derived from the JSON content. The procedure
// to update such properties is as follows:
//
// 1. fetch JSON content of resource from repository
// 2. attach input fields to JSON (or remove)
// 3. store content
Result<StructuredResource> structured = structuredResource(
requestContext, manager, resource);
structured.map(res -> {
for (PropertyOperation op: structuredProps) {
if (op instanceof Delete) {
res.removeProperty(op.propDef.getName());
}
else if (op instanceof Update) {
Update update = (Update) op;
res.addProperty(update.propDef.getName(), update.value);
}
}
return res;
});
structured = structured.flatMap(r -> {
ValidationResult validation = r.validate(r.toJSON());
if (validation.isValid()) return Result.success(r);
return Result.failure(new RuntimeException(validation.getErrors().get(0).getMessage()));
});
result = structured.flatMap(res -> storeContent(requestContext, res));
}
result = result.flatMap(res -> {
return Result.attempt(() -> {
regularProps.forEach(op -> {
res.removeProperty(op.propDef);
if (op instanceof Update) {
res.addProperty(createProperty(((Update) op).value, op.propDef));
}
});
try {
return requestContext.getRepository()
.store(requestContext.getSecurityToken(), null, res);
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
});
});
return result;
}
}
private Result<ApiResponseBuilder> getProperties(HttpServletRequest request,
Resource resource) {
PropertySetMapper<Consumer<JsonStreamer>> mapper =
ResourceMappers.jsonStreamer(LOCALE)
.uris(false).types(false).acls(false).compact(true).build();
ApiResponseBuilder builder = new ApiResponseBuilder(HttpServletResponse.SC_OK)
.header("Content-Type", "application/json")
.handler(response -> {
try (Writer writer = response.getWriter()) {
JsonStreamer streamer = new JsonStreamer(writer, 2, false);
mapper.apply(resource).accept(streamer);
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
});
return Result.success(builder);
}
private static Result<StructuredResource> structuredResource(RequestContext requestContext,
StructuredResourceManager manager, Resource resource) {
Result<StructuredResourceDescription> desc = Result.attempt(() -> Objects.requireNonNull(
manager.get(resource.getResourceType()), "Resource " + resource.getURI()
+ " is not a structured resource"));
return desc.flatMap(d -> {
return getContent(requestContext, resource)
.flatMap(is -> Result.attempt(() -> d.buildResource(is)));
});
}
private static Result<Resource> storeContent(RequestContext requestContext,
StructuredResource structuredResource) {
return Result.attempt(() -> {
try {
byte[] buffer = JsonStreamer
.toJson(structuredResource.toJSON(), 3, false)
.getBytes("utf-8");
return requestContext.getRepository()
.storeContent(requestContext.getSecurityToken(),
null, requestContext.getResourceURI(),
ContentInputSources.fromBytes(buffer));
}
catch (Exception e) {
throw new RuntimeException(e);
}
});
}
private static Result<InputStream> getContent(RequestContext requestContext, Resource resource) {
return Result.attempt(() -> {
try {
return requestContext.getRepository()
.getInputStream(requestContext.getSecurityToken(), resource.getURI(), false);
}
catch (Exception e) {
throw new RuntimeException(e);
}
});
}
private Result<Optional<Resource>> retrieve(RequestContext requestContext, Path uri) {
return Result.attempt(() -> {
try {
return Optional.of(requestContext.getRepository()
.retrieve(requestContext.getSecurityToken(), uri, false));
}
catch (ResourceNotFoundException e) {
return Optional.empty();
}
catch (AuthorizationException e) {
throw e;
}
catch (Exception e) {
throw new RuntimeException(e);
}
});
}
private Result<ApiResponseBuilder> updateProperties(HttpServletRequest request,
Resource resource) {
RequestContext requestContext = RequestContext
.getRequestContext(request);
if (!"application/merge-patch+json".equals(request.getContentType())) {
return Result.success(ApiResponseBuilder.badRequest(
"Content-Type 'application/merge-patch+json' is required for PATCH method"));
}
Result<InputStream> stream = Result.attempt(() -> {
try {
return request.getInputStream();
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
});
Result<Json.MapContainer> json = stream.flatMap(is -> parseJson(is));
Result<PropertyOperations> operations = json
.flatMap(body -> propertyOperations(body, resource, requestContext));
Result<ApiResponseBuilder> result = operations.flatMap(ops -> {
Result<Resource> updated = ops.apply(resource, requestContext, structuredResourceManager);
return updated.map(res -> {
return ops.stream()
.map(Object::toString)
.collect(Collectors.joining("\n", "Properties patched:\n", ""));
})
.map(msg -> ApiResponseBuilder.ok(msg));
})
.recover(err -> {
if (err instanceof AuthorizationException) {
return ApiResponseBuilder.forbidden(err.getMessage());
}
return ApiResponseBuilder.badRequest(err.getMessage());
});
return result;
}
private Result<PropertyOperations> propertyOperations(Json.MapContainer body,
Resource resource, RequestContext requestContext) {
Result<PropertyOperations> result = Result.attempt(() -> {
Map<String, Object> propsMap = body.optObjectValue("properties").orElse(null);
if (propsMap == null) throw new IllegalArgumentException(
"Json body must contain a 'properties' entry");
TypeInfo typeInfo = requestContext.getRepository().getTypeInfo(resource);
PropertyOperations operations = new PropertyOperations();
for (String key: propsMap.keySet()) {
Namespace ns = Namespace.DEFAULT_NAMESPACE;
String prefix = null;
String name = key;
int colonIdx = key.indexOf(':');
if (colonIdx != -1) {
prefix = key.substring(0, colonIdx);
name = key.substring(colonIdx + 1);
}
if (prefix != null) {
ns = typeInfo.getNamespaceByPrefix(prefix);
}
PropertyTypeDefinition propDef = typeInfo
.getPropertyTypeDefinition(ns, name);
if (propDef.getNamespace() !=
Namespace.STRUCTURED_RESOURCE_NAMESPACE) {
if (propDef.getProtectionLevel() == RepositoryAction.UNEDITABLE_ACTION) {
throw new IllegalArgumentException("Property '" + key + "' is not editable");
}
}
Object value = propsMap.get(key);
if (value == null) {
if (propDef.isMandatory()) {
throw new IllegalArgumentException("Property '"
+ key + "' cannot be deleted");
}
operations.add(new Delete(propDef));
}
else {
operations.add(new Update(propDef, value));
}
}
return operations;
});
return result;
}
private static Property createProperty(Object input, PropertyTypeDefinition propDef) {
if (propDef.isMultiple()) {
throw new IllegalArgumentException("Multi-value properties are not suported");
}
switch (propDef.getType()) {
case STRING:
case HTML:
case IMAGE_REF:
case INT:
case BOOLEAN:
case LONG:
case PRINCIPAL:
return propDef.createProperty(input.toString());
case TIMESTAMP:
Property prop = propDef.createProperty();
prop.setValue(propDef.getValueFormatter()
.stringToValue(input.toString(), "iso-8601", null));
return prop;
case DATE:
prop = propDef.createProperty();
prop.setValue(propDef.getValueFormatter()
.stringToValue(input.toString(), "iso-8601-short", null));
return prop;
case JSON:
if (input instanceof Json.MapContainer) {
prop = propDef.createProperty();
prop.setJSONValue((Json.MapContainer) input);
return prop;
}
throw new IllegalArgumentException(
"Only object types are supported for JSON properties");
case BINARY:
throw new IllegalArgumentException("Binary properties not supported");
default:
throw new IllegalArgumentException("Unknown property type " + propDef.getType());
}
}
private static Result<Json.MapContainer> parseJson(InputStream input) {
return Result.attempt(() -> {
try {
return Json.parseToContainer(input).asObject();
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
});
}
}
| |
/*
* Lantern
*
* Copyright (c) LanternPowered <https://www.lanternpowered.org>
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* This work is licensed under the terms of the MIT License (MIT). For
* a copy, see 'LICENSE.txt' or <https://opensource.org/licenses/MIT>.
*/
/*
* Copyright (c) 2011-2014 Glowstone - Tad Hardesty
* Copyright (c) 2010-2011 Lightstone - Graham Edgecombe
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.lanternpowered.server.data.io.anvil;
import static org.lanternpowered.server.data.io.anvil.RegionFileCache.REGION_AREA;
import static org.lanternpowered.server.data.io.anvil.RegionFileCache.REGION_MASK;
import static org.lanternpowered.server.data.io.anvil.RegionFileCache.REGION_SIZE;
import static org.lanternpowered.server.util.UncheckedThrowables.throwUnchecked;
import static org.lanternpowered.server.world.chunk.LanternChunk.fixEntityYSection;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
import it.unimi.dsi.fastutil.shorts.Short2ObjectMap;
import it.unimi.dsi.fastutil.shorts.Short2ObjectOpenHashMap;
import org.lanternpowered.server.block.entity.LanternBlockEntity;
import org.lanternpowered.server.data.DataQueries;
import org.lanternpowered.server.data.io.ChunkIOService;
import org.lanternpowered.server.data.io.store.ObjectSerializer;
import org.lanternpowered.server.data.io.store.ObjectSerializerRegistry;
import org.lanternpowered.server.data.persistence.nbt.NbtDataContainerInputStream;
import org.lanternpowered.server.data.persistence.nbt.NbtDataContainerOutputStream;
import org.lanternpowered.server.entity.LanternEntity;
import org.lanternpowered.server.game.DirectoryKeys;
import org.lanternpowered.server.util.UncheckedThrowables;
import org.lanternpowered.server.util.collect.array.NibbleArray;
import org.lanternpowered.server.world.chunk.ChunkBlockStateArray;
import org.lanternpowered.server.world.chunk.LanternChunk;
import org.lanternpowered.server.world.chunk.LanternChunk.ChunkSection;
import org.lanternpowered.server.world.chunk.LanternChunk.ChunkSectionSnapshot;
import org.slf4j.Logger;
import org.spongepowered.api.data.persistence.DataContainer;
import org.spongepowered.api.data.persistence.DataQuery;
import org.spongepowered.api.data.persistence.DataView;
import org.spongepowered.api.data.persistence.InvalidDataException;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.world.ServerLocation;
import org.spongepowered.api.world.server.ServerWorld;
import org.spongepowered.api.world.storage.ChunkDataStream;
import org.spongepowered.api.world.storage.WorldProperties;
import org.spongepowered.math.vector.Vector3i;
import java.io.DataInputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.regex.Matcher;
import org.checkerframework.checker.nullness.qual.Nullable;
@Singleton
public class AnvilChunkIOService implements ChunkIOService {
private static final DataQuery DATA_VERSION = DataQuery.of("DataVersion"); // int
private static final DataQuery LEVEL = DataQuery.of("Level"); // compound
private static final DataQuery SECTIONS = DataQuery.of("Sections"); // array
private static final DataQuery X = DataQuery.of("xPos"); // int
private static final DataQuery Z = DataQuery.of("zPos"); // int
private static final DataQuery Y = DataQuery.of("Y"); // byte
private static final DataQuery BLOCK_LIGHT = DataQuery.of("BlockLight"); // (nibble) byte array
private static final DataQuery SKY_LIGHT = DataQuery.of("SkyLight"); // (nibble) byte array
private static final DataQuery TERRAIN_POPULATED = DataQuery.of("TerrainPopulated"); // (boolean) byte
private static final DataQuery LIGHT_POPULATED = DataQuery.of("LightPopulated"); // (boolean) byte
private static final DataQuery BIOMES = DataQuery.of("Biomes"); // int array
private static final DataQuery HEIGHT_MAP = DataQuery.of("HeightMap"); // int array
private static final DataQuery LAST_UPDATE = DataQuery.of("LastUpdate"); // long
private static final DataQuery TRACKER_DATA_TABLE = DataQuery.of("BlockPosTable");
private static final DataQuery TRACKER_BLOCK_POS = DataQuery.of("pos");
private static final DataQuery TRACKER_ENTRY_CREATOR = DataQuery.of("owner");
private static final DataQuery TRACKER_ENTRY_NOTIFIER = DataQuery.of("notifier");
private static final DataQuery BLOCK_ENTITY_X = DataQuery.of("x");
private static final DataQuery BLOCK_ENTITY_Y = DataQuery.of("y");
private static final DataQuery BLOCK_ENTITY_Z = DataQuery.of("z");
private static final DataQuery BLOCK_ENTITIES = DataQuery.of("TileEntities");
private static final DataQuery INHABITED_TIME = DataQuery.of("InhabitedTime");
private static final DataQuery ENTITIES = DataQuery.of("Entities");
private final ServerWorld world;
private final Logger logger;
private final LanternGame game;
private final RegionFileCache cache;
private final Path baseDir;
// TODO: Consider the session.lock file
@Inject
public AnvilChunkIOService(@Named(DirectoryKeys.WORLD) Path baseDir, ServerWorld world, Logger logger, LanternGame game) {
this.cache = new RegionFileCache(baseDir);
this.baseDir = baseDir;
this.logger = logger;
this.world = world;
this.game = game;
}
@Override
public boolean exists(int x, int z) throws IOException {
final RegionFile region = this.cache.getRegionFileByChunk(x, z);
final int regionX = x & REGION_MASK;
final int regionZ = z & REGION_MASK;
return region.hasChunk(regionX, regionZ);
}
@Override
public boolean read(LanternChunk chunk) throws IOException {
final int x = chunk.getX();
final int z = chunk.getZ();
final RegionFile region = this.cache.getRegionFileByChunk(x, z);
final int regionX = x & REGION_MASK;
final int regionZ = z & REGION_MASK;
final DataInputStream is = region.getChunkDataInputStream(regionX, regionZ);
if (is == null) {
return false;
}
final DataView levelDataView;
try (NbtDataContainerInputStream nbt = new NbtDataContainerInputStream(is)) {
levelDataView = nbt.read().getView(LEVEL).get();
}
// read the vertical sections
final List<DataView> sectionList = levelDataView.getViewList(SECTIONS).get();
final ChunkSection[] sections = new ChunkSection[16];
final ChunkBlockStateArray[] blockStateArray = new ChunkBlockStateArray[16];
//noinspection unchecked
final Short2ObjectOpenHashMap<LanternBlockEntity>[] tileEntitySections = new Short2ObjectOpenHashMap[sections.length];
for (DataView sectionTag : sectionList) {
final int y = sectionTag.getInt(Y).get();
/* TODO
final byte[] blockLight = (byte[]) sectionTag.get(BLOCK_LIGHT).get();
final byte[] skyLight = (byte[]) sectionTag.get(SKY_LIGHT).get();
final NibbleArray blockLightNibbleArray = new NibbleArray(blockLight.length * 2, blockLight, true);
final NibbleArray skyLightNibbleArray = new NibbleArray(skyLight.length * 2, skyLight, true);*/
final NibbleArray blockLightNibbleArray = new NibbleArray(LanternChunk.CHUNK_SECTION_VOLUME);
final NibbleArray skyLightNibbleArray = new NibbleArray(LanternChunk.CHUNK_SECTION_VOLUME);
tileEntitySections[y] = new Short2ObjectOpenHashMap<>();
blockStateArray[y] = ChunkBlockStateArray.deserializeFrom(sectionTag);
sections[y] = new ChunkSection(blockStateArray[y], skyLightNibbleArray, blockLightNibbleArray, tileEntitySections[y]);
}
levelDataView.getViewList(BLOCK_ENTITIES).ifPresent(tileEntityViews -> {
final ObjectSerializer<LanternBlockEntity> blockEntitySerializer = ObjectSerializerRegistry.get().get(LanternBlockEntity.class).get();
for (DataView blockEntityView : tileEntityViews) {
final int tileY = blockEntityView.getInt(BLOCK_ENTITY_Y).get();
final int section = tileY >> 4;
if (tileEntitySections[section] == null) {
continue;
}
final int tileZ = blockEntityView.getInt(BLOCK_ENTITY_Z).get();
final int tileX = blockEntityView.getInt(BLOCK_ENTITY_X).get();
try {
final LanternBlockEntity blockEntity = blockEntitySerializer.deserialize(blockEntityView);
blockEntity.setLocation(ServerLocation.of(this.world, new Vector3i(tileX, tileY, tileZ)));
final short index = (short) ChunkSection.index(tileX & 0xf, tileY & 0xf, tileZ & 0xf);
blockEntity.setBlock(blockStateArray[section].get(index));
blockEntity.setValid(true);
tileEntitySections[section].put(index, blockEntity);
} catch (InvalidDataException e) {
this.logger.warn("Error loading block entity at ({};{};{}) in the chunk ({},{}) in the world {}",
tileX & 0xf, tileY & 0xf, tileZ & 0xf, x, z, getWorldProperties().getDirectoryName(), e);
}
}
});
final DataView spongeDataView = levelDataView.getView(DataQueries.SPONGE_DATA).orElse(null);
final List<DataView> trackerDataViews = spongeDataView == null ? null : levelDataView.getViewList(TRACKER_DATA_TABLE).orElse(null);
final Short2ObjectMap<LanternChunk.TrackerData>[] trackerData = chunk.getTrackerData().getObjects();
if (trackerDataViews != null) {
for (DataView dataView : trackerDataViews) {
final Optional<Short> optIndex = dataView.getShort(TRACKER_BLOCK_POS);
if (!optIndex.isPresent()) {
continue;
}
final int creatorId = dataView.getInt(TRACKER_ENTRY_CREATOR).orElse(-1);
final int notifierId = dataView.getInt(TRACKER_ENTRY_NOTIFIER).orElse(-1);
// index = z << 12 | y << 4 | x
int index = optIndex.get() & 0xffff;
final int section = (index >> 8) & 0xf;
// Convert the index to the section based system
// index = y << 8 | z << 4 | x
index = ChunkSection.index(index & 0xf, (index >> 4) & 0xf, index >> 12);
trackerData[section].put((short) index, new LanternChunk.TrackerData(creatorId, notifierId));
}
}
// initialize the chunk
chunk.initializeSections(sections);
chunk.setPopulated(levelDataView.getInt(TERRAIN_POPULATED).orElse(0) > 0);
if (levelDataView.contains(BIOMES)) {
final int[] biomes = (int[]) levelDataView.get(BIOMES).get();
chunk.initializeBiomes(biomes);
}
final Object heightMap;
if (levelDataView.contains(HEIGHT_MAP) && (heightMap = levelDataView.get(HEIGHT_MAP).get()) instanceof int[]) {
chunk.initializeHeightMap((int[]) heightMap);
} else {
chunk.initializeHeightMap(null);
}
levelDataView.getLong(INHABITED_TIME).ifPresent(time -> chunk.setInhabitedTime(time.intValue()));
chunk.setLightPopulated(levelDataView.getInt(LIGHT_POPULATED).orElse(0) > 0);
chunk.initializeLight();
levelDataView.getViewList(ENTITIES).ifPresent(entityViews -> {
final ObjectSerializer<LanternEntity> entitySerializer = ObjectSerializerRegistry.get().get(LanternEntity.class).get();
for (DataView entityView : entityViews) {
try {
final LanternEntity entity = entitySerializer.deserialize(entityView);
final int ySection = fixEntityYSection(entity.getPosition().getFloorY() >> 4);
chunk.addEntity(entity, ySection);
} catch (InvalidDataException e) {
this.logger.warn("Error loading entity in the chunk ({},{}) in the world {}",
x, z, getWorldProperties().getDirectoryName(), e);
}
}
});
return true;
}
@Override
public void write(LanternChunk chunk) throws IOException {
final int x = chunk.getX();
final int z = chunk.getZ();
final RegionFile region = this.cache.getRegionFileByChunk(x, z);
final int regionX = x & REGION_MASK;
final int regionZ = z & REGION_MASK;
final DataContainer rootView = DataContainer.createNew(DataView.SafetyMode.NO_DATA_CLONED);
final DataView levelDataView = rootView.createView(LEVEL);
// Core properties
levelDataView.set(DATA_VERSION, 1474);
levelDataView.set(X, chunk.getX());
levelDataView.set(Z, chunk.getZ());
levelDataView.set(TERRAIN_POPULATED, (byte) (chunk.isPopulated() ? 1 : 0));
levelDataView.set(LIGHT_POPULATED, (byte) (chunk.isLightPopulated() ? 1 : 0));
levelDataView.set(LAST_UPDATE, 0L);
levelDataView.set(INHABITED_TIME, chunk.getLongInhabitedTime());
// Chunk sections
final ChunkSectionSnapshot[] sections = chunk.getSectionSnapshots();
final List<DataView> sectionDataViews = new ArrayList<>();
final List<DataView> blockEntityDataViews = new ArrayList<>();
for (byte i = 0; i < sections.length; ++i) {
final ChunkSectionSnapshot section = sections[i];
if (section == null) {
continue;
}
final DataContainer sectionDataView = DataContainer.createNew(DataView.SafetyMode.NO_DATA_CLONED);
sectionDataView.set(Y, i);
// Serialize blocks data
ChunkBlockStateArray.serializeTo(sectionDataView, section.blockStates);
// Serialize light nibble arrays
/* TODO
sectionDataView.set(BLOCK_LIGHT, section.lightFromBlock);
final byte[] lightFromSky = section.lightFromSky;
if (lightFromSky != null) {
sectionDataView.set(SKY_LIGHT, lightFromSky);
}*/
sectionDataViews.add(sectionDataView);
final ObjectSerializer<LanternBlockEntity> tileEntitySerializer = ObjectSerializerRegistry.get().get(LanternBlockEntity.class).get();
// Serialize the block entities
for (Short2ObjectMap.Entry<LanternBlockEntity> blockEntityEntry : section.blockEntities.short2ObjectEntrySet()) {
if (!blockEntityEntry.getValue().isValid()) {
continue;
}
final DataView dataView = tileEntitySerializer.serialize(blockEntityEntry.getValue());
final short pos = blockEntityEntry.getShortKey();
dataView.set(BLOCK_ENTITY_X, x * 16 + (pos & 0xf));
dataView.set(BLOCK_ENTITY_Y, (i << 4) | (pos >> 8));
dataView.set(BLOCK_ENTITY_Z, z * 16 + ((pos >> 4) & 0xf));
blockEntityDataViews.add(dataView);
}
}
levelDataView.set(BLOCK_ENTITIES, blockEntityDataViews);
levelDataView.set(SECTIONS, sectionDataViews);
levelDataView.set(HEIGHT_MAP, chunk.getHeightMap());
final Short2ObjectMap<LanternChunk.TrackerData>[] trackerData = chunk.getTrackerData().getObjects();
final List<DataView> trackerDataViews = new ArrayList<>();
for (int i = 0; i < trackerData.length; i++) {
final Short2ObjectMap<LanternChunk.TrackerData> trackerDataSection = trackerData[i];
for (Short2ObjectMap.Entry<LanternChunk.TrackerData> entry : trackerDataSection.short2ObjectEntrySet()) {
// index = y << 8 | z << 4 | x
int index = entry.getShortKey() & 0xffff;
// Convert the index to the column based system
// index = z << 12 | y << 4 | x
index = ((index >> 4) & 0xf) << 12 | i << 8 | (index >> 4) & 0xf0 | index & 0xf;
final DataView trackerDataView = DataContainer.createNew(DataView.SafetyMode.NO_DATA_CLONED);
trackerDataView.set(TRACKER_BLOCK_POS, (short) index);
trackerDataView.set(TRACKER_ENTRY_NOTIFIER, entry.getValue().getNotifierId());
trackerDataView.set(TRACKER_ENTRY_CREATOR, entry.getValue().getCreatorId());
trackerDataViews.add(trackerDataView);
}
}
if (!trackerDataViews.isEmpty()) {
levelDataView.createView(DataQueries.SPONGE_DATA).set(TRACKER_DATA_TABLE, trackerDataViews);
}
levelDataView.set(BIOMES, chunk.getBiomes());
//noinspection unchecked
final List<LanternEntity> entities = new ArrayList(chunk.getEntities(entity -> !(entity instanceof Player)));
final ObjectSerializer<LanternEntity> entitySerializer = ObjectSerializerRegistry.get().get(LanternEntity.class).get();
final List<DataView> entityViews = new ArrayList<>();
for (LanternEntity entity : entities) {
if (entity.getRemoveState() == LanternEntity.UnloadState.REMOVED) {
continue;
}
final DataView entityView = entitySerializer.serialize(entity);
entityViews.add(entityView);
}
levelDataView.set(ENTITIES, entityViews);
try (NbtDataContainerOutputStream nbt = new NbtDataContainerOutputStream(region.getChunkDataOutputStream(regionX, regionZ))) {
nbt.write(rootView);
nbt.flush();
}
}
@Override
public void unload() throws IOException {
this.cache.clear();
}
@Override
public ChunkDataStream getGeneratedChunks() {
return new ChunkDataStream() {
// All the region files
private Path[] paths;
// The current region file that we opened
@Nullable private RegionFile region;
// The coordinates of the chunk inside the region
private int chunkX;
private int chunkZ;
// The next index of the chunk in the region file
private int regionChunkIndex;
// The next index that we are in the file array
private int regionFileIndex;
// Whether the current fields are cached
private boolean cached;
// Done, no new chunks can be found
private boolean done;
{
// Use the reset to initialize
this.reset();
}
@Override
public DataContainer next() {
if (!this.hasNext()) {
throw new NoSuchElementException();
}
try {
final DataInputStream is = this.region.getChunkDataInputStream(this.chunkX, this.chunkZ);
final DataContainer data;
try (NbtDataContainerInputStream nbt = new NbtDataContainerInputStream(is)) {
data = nbt.read();
}
this.cached = false;
return data;
} catch (IOException e) {
// This shouldn't happen
throw throwUnchecked(e);
}
}
@Override
public boolean hasNext() {
// Fast fail
if (this.done) {
return false;
}
// Use the cached index if set
if (this.cached) {
return true;
}
// Try first to search for more chunks in the current region
while (true) {
if (this.region != null) {
while (++this.regionChunkIndex < REGION_AREA) {
this.chunkX = this.regionChunkIndex / REGION_SIZE;
this.chunkZ = this.regionChunkIndex % REGION_SIZE;
if (this.region.hasChunk(this.chunkX, this.chunkZ)) {
this.cached = true;
return true;
}
}
}
// There no chunk available in the current region,
// reset the chunk index for the next one
this.regionChunkIndex = -1;
// There was no chunk present in the current region,
// try the next region
if (++this.regionFileIndex >= this.paths.length) {
this.region = null;
this.done = true;
return false;
}
final Path nextRegionFile = this.paths[this.regionFileIndex];
if (Files.exists(nextRegionFile)) {
Matcher matcher = cache.getFilePattern().matcher(nextRegionFile.getFileName().toString());
int regionX = Integer.parseInt(matcher.group(0));
int regionZ = Integer.parseInt(matcher.group(1));
try {
this.region = cache.getRegionFile(regionX, regionZ);
} catch (IOException e) {
logger.error("Failed to read the region file ({};{}) in the world folder {}",
regionX, regionZ, baseDir.getFileName().toString(), e);
this.region = null;
}
} else {
this.region = null;
}
}
}
@Override
public int available() {
// TODO: Not sure how we will be able to do this without opening all
// the region files
throw new UnsupportedOperationException();
}
@Override
public void reset() {
this.paths = cache.getRegionFiles();
this.regionFileIndex = -1;
this.regionChunkIndex = -1;
this.region = null;
this.cached = false;
this.done = false;
}
};
}
@Override
public CompletableFuture<Boolean> doesChunkExist(Vector3i chunkCoords) {
return this.game.getAsyncScheduler().submit(() -> {
try {
return exists(chunkCoords.getX(), chunkCoords.getZ());
} catch (IOException e) {
UncheckedThrowables.throwUnchecked(e);
}
});
}
@Override
public CompletableFuture<Optional<DataContainer>> getChunkData(Vector3i chunkCoords) {
return this.game.getAsyncScheduler().submit(() -> {
final int x = chunkCoords.getX();
final int z = chunkCoords.getZ();
final RegionFile region = cache.getRegionFileByChunk(x, z);
final int regionX = x & REGION_MASK;
final int regionZ = z & REGION_MASK;
final DataInputStream is = region.getChunkDataInputStream(regionX, regionZ);
if (is == null) {
return Optional.empty();
}
final DataContainer data;
try (NbtDataContainerInputStream nbt = new NbtDataContainerInputStream(is)) {
data = nbt.read();
}
return Optional.of(data);
});
}
@Override
public WorldProperties getWorldProperties() {
return this.world.getProperties();
}
}
| |
/*
* #%L
* Native ARchive plugin for Maven
* %%
* Copyright (C) 2002 - 2014 NAR Maven Plugin developers.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.github.maven_nar;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Objects;
import java.util.Set;
import java.util.Vector;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.shared.artifact.filter.collection.ScopeFilter;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.codehaus.plexus.util.FileUtils;
import org.codehaus.plexus.util.StringUtils;
import com.github.maven_nar.cpptasks.CCTask;
import com.github.maven_nar.cpptasks.CUtil;
import com.github.maven_nar.cpptasks.CompilerDef;
import com.github.maven_nar.cpptasks.LinkerDef;
import com.github.maven_nar.cpptasks.OutputTypeEnum;
import com.github.maven_nar.cpptasks.RuntimeType;
import com.github.maven_nar.cpptasks.SubsystemEnum;
import com.github.maven_nar.cpptasks.VersionInfo;
import com.github.maven_nar.cpptasks.types.LibrarySet;
import com.github.maven_nar.cpptasks.types.LinkerArgument;
/**
* Compiles native source files.
*
* @requiresSession
* @author Mark Donszelmann
*/
@Mojo(name = "nar-compile", defaultPhase = LifecyclePhase.COMPILE, requiresProject = true,
requiresDependencyResolution = ResolutionScope.COMPILE)
public class NarCompileMojo extends AbstractCompileMojo {
/**
* Specify that the final manifest should be embedded in the output (default
* true) or false for side by side.
*/
@Parameter(property = "nar.embedManifest", defaultValue = "true")
protected boolean embedManifest = true;
/**
* The current build session instance.
*/
@Parameter(defaultValue = "${session}", readonly = true)
protected MavenSession session;
private void copyInclude(final Compiler c) throws IOException, MojoExecutionException, MojoFailureException {
if (c == null) {
return;
}
c.copyIncludeFiles(
getMavenProject(),
getLayout().getIncludeDirectory(getTargetDirectory(), getMavenProject().getArtifactId(),
getMavenProject().getVersion()));
}
private void createLibrary(final Project antProject, final Library library)
throws MojoExecutionException, MojoFailureException {
getLog().debug("Creating Library " + library);
// configure task
final CCTask task = new CCTask();
task.setCommandLogLevel(this.commandLogLevel);
task.setProject(antProject);
task.setDecorateLinkerOptions(this.decorateLinkerOptions);
// subsystem
final SubsystemEnum subSystem = new SubsystemEnum();
subSystem.setValue(library.getSubSystem());
task.setSubsystem(subSystem);
// set max cores
task.setMaxCores(getMaxCores(getAOL()));
// outtype
final OutputTypeEnum outTypeEnum = new OutputTypeEnum();
final String type = library.getType();
outTypeEnum.setValue(type);
task.setOuttype(outTypeEnum);
// stdc++
task.setLinkCPP(library.linkCPP());
// fortran
task.setLinkFortran(library.linkFortran());
task.setLinkFortranMain(library.linkFortranMain());
// outDir
File outDir;
if (type.equals(Library.EXECUTABLE)) {
outDir = getLayout().getBinDirectory(getTargetDirectory(), getMavenProject().getArtifactId(),
getMavenProject().getVersion(), getAOL().toString());
} else {
outDir = getLayout().getLibDirectory(getTargetDirectory(), getMavenProject().getArtifactId(),
getMavenProject().getVersion(), getAOL().toString(), type);
}
outDir.mkdirs();
// outFile
// FIXME NAR-90 we could get the final name from layout
final File outFile = new File(outDir, getOutput(getAOL(), type));
getLog().debug("NAR - output: '" + outFile + "'");
task.setOutfile(outFile);
// If we are building a shared library on AIX and the user has specified a sharedObjectName, use it.
if (getOS().equals(OS.AIX) && type.equals(Library.SHARED) && sharedObjectName != "") {
task.setSharedObjectName(sharedObjectName);
}
// object directory
File objDir = new File(getTargetDirectory(), "obj");
objDir = new File(objDir, getAOL().toString() + "-" + library.getType());
objDir.mkdirs();
task.setObjdir(objDir);
// failOnError, libtool
task.setFailonerror(failOnError(getAOL()));
task.setLibtool(useLibtool(getAOL()));
// runtime
final RuntimeType runtimeType = new RuntimeType();
runtimeType.setValue(getRuntime(getAOL()));
task.setRuntime(runtimeType);
// IDL, MC, RC compilations should probably be 'generate source' type
// actions, seperate from main build.
// Needs resolution of handling for generate sources.
// Order is somewhat important here, IDL and MC generate outputs that are
// (often) included in the RC compilation
if (getIdl() != null) {
final CompilerDef idl = getIdl().getCompiler(Compiler.MAIN, null);
if (idl != null) {
task.addConfiguredCompiler(idl);
task.createIncludePath().setPath(objDir.getPath()); // generated
// 'sources'
}
}
if (getMessage() != null) {
final CompilerDef mc = getMessage().getCompiler(Compiler.MAIN, null);
if (mc != null) {
task.addConfiguredCompiler(mc);
task.createIncludePath().setPath(objDir.getPath()); // generated
// 'sources'
}
}
if (getResource() != null) {
final CompilerDef res = getResource().getCompiler(Compiler.MAIN, null);
if (res != null) {
task.addConfiguredCompiler(res);
}
}
if(getOS().equals( OS.WINDOWS ) && getArchitecture().equals("amd64"))
{
int noOfASMSources = getSourcesFor(getAssembler()).size();
if(noOfASMSources > 0)
{ // Assmbler files exist
CompilerDef assembler = getAssembler().getCompiler( Compiler.MAIN, null );
// CompilerDef msAssembler64bitCompiler = MSAssmbler64bitCompiler.getCompiler(Compiler.MAIN, getOutput( ),getAntProject() );
task.addConfiguredCompiler( assembler );
}
}
// Darren Sargent Feb 11 2010: Use Compiler.MAIN for "type"...appears the
// wrong "type" variable was being used
// since getCompiler() expects "main" or "test", whereas the "type" variable
// here is "executable", "shared" etc.
// add C++ compiler
if (getCpp() != null) {
final CompilerDef cpp = getCpp().getCompiler(Compiler.MAIN, null);
if (cpp != null) {
// Set FortifyID attribute
cpp.setFortifyID(getfortifyID());
cpp.setCommands(compileCommands);
cpp.setDryRun(dryRun);
task.addConfiguredCompiler(cpp);
}
}
// add C compiler
if (getC() != null) {
final CompilerDef c = getC().getCompiler(Compiler.MAIN, null);
if (c != null) {
// Set FortifyID attribute
c.setFortifyID(getfortifyID());
c.setCommands(compileCommands);
c.setDryRun(dryRun);
task.addConfiguredCompiler(c);
}
}
// add Fortran compiler
if (getFortran() != null) {
final CompilerDef fortran = getFortran().getCompiler(Compiler.MAIN, null);
if (fortran != null) {
fortran.setCommands(compileCommands);
fortran.setDryRun(dryRun);
task.addConfiguredCompiler(fortran);
}
}
// Add VersionInfo for the Windows binaries
if(getOS().equals( OS.WINDOWS ) && getLinker().getName( null, null ).equals( "msvc" ))
{
NARVersionInfo narVersioninfo = getNARVersionInfo() ;
if(narVersioninfo != null)
{
VersionInfo versionInfo=narVersioninfo.getVersionInfo(getAntProject());
if(versionInfo != null)
{
task.addConfiguredVersioninfo(versionInfo);
}
}
}
// end Darren
// add javah include path
final File jniDirectory = getJavah().getJniDirectory();
if (jniDirectory.exists()) {
task.createIncludePath().setPath(jniDirectory.getPath());
}
// add java include paths
getJava().addIncludePaths(task, type);
getMsvc().configureCCTask(task);
final List<NarArtifact> dependencies = getNarArtifacts();
List<String> linkPaths = new ArrayList<String>();
// If we're restricting deps to direct deps ONLY then trim transitive deps
if (directDepsOnly){
HashSet<String> directDepsSet = getDirectDepsSet(getVerboseDependencyTree());
ListIterator <NarArtifact> depsIt = dependencies.listIterator();
// Trim all deps from dependencies that are not in the directDepsSet, warn if they are found.
while(depsIt.hasNext()){
NarInfo dep = depsIt.next().getNarInfo();
if(!directDepsSet.contains(dep.getGroupId() + ":" + dep.getArtifactId())){
this.getLog().debug("Stray dependency: " + dep + " found. This may cause build failures.");
depsIt.remove();
// If this transitive dependency was a shared object, add it to the linkPaths list.
String depType = dep.getBinding(null, null);
if (Objects.equals(depType, Library.SHARED))
{
File soDir = getLayout().getLibDirectory(getTargetDirectory(), dep.getArtifactId(), dep.getVersion(), getAOL().toString(), depType);
if (soDir.exists()){
linkPaths.add(soDir.getAbsolutePath());
}
}
}
}
}
// add dependency include paths
for (final Object element : dependencies) {
// FIXME, handle multiple includes from one NAR
final NarArtifact narDependency = (NarArtifact) element;
final String binding = getBinding(library, narDependency);
getLog().debug("Looking for " + narDependency + " found binding " + binding);
if (!binding.equals(Library.JNI)) {
final File unpackDirectory = getUnpackDirectory();
final File include = getLayout().getIncludeDirectory(unpackDirectory, narDependency.getArtifactId(),
narDependency.getBaseVersion());
getLog().debug("Looking for include directory: " + include);
if (include.exists()) {
String includesType = narDependency.getNarInfo().getIncludesType(null);
if (includesType.equals("system")) {
task.createSysIncludePath().setPath(include.getPath());
}
else {
task.createIncludePath().setPath(include.getPath());
}
} else {
// Ideally includes are used from lib (static or shared)
// however it's not required.
// make a note in the log if something has gone wrong,
// but don't block compilation
getLog().warn(String.format("Unable to locate %1$s lib include path '%2$s'", binding, include));
}
}
}
// add linker
final LinkerDef linkerDefinition = getLinker().getLinker(this, task, getOS(), getAOL().getKey() + ".linker.", type, linkPaths);
linkerDefinition.setCommands(linkCommands);
linkerDefinition.setDryRun(dryRun);
task.addConfiguredLinker(linkerDefinition);
Set<SysLib> dependencySysLibs = new LinkedHashSet<>();
// add dependency libraries
// FIXME: what about PLUGIN and STATIC, depending on STATIC, should we
// not add all libraries, see NARPLUGIN-96
final boolean skipDepLink = linkerDefinition.isSkipDepLink();
if (((type.equals(Library.SHARED) || type.equals(Library.JNI) || type.equals(Library.EXECUTABLE))) && !skipDepLink) {
final List depLibOrder = getDependencyLibOrder();
List depLibs = dependencies;
// reorder the libraries that come from the nar dependencies
// to comply with the order specified by the user
if (depLibOrder != null && !depLibOrder.isEmpty()) {
final List tmp = new LinkedList();
for (final Object aDepLibOrder : depLibOrder) {
final String depToOrderName = (String) aDepLibOrder;
for (final Iterator j = depLibs.iterator(); j.hasNext(); ) {
final NarArtifact dep = (NarArtifact) j.next();
final String depName = dep.getGroupId() + ":" + dep.getArtifactId();
if (depName.equals(depToOrderName)) {
tmp.add(dep);
j.remove();
}
}
}
tmp.addAll(depLibs);
depLibs = tmp;
}
for (final Object depLib : depLibs) {
final NarArtifact dependency = (NarArtifact) depLib;
// FIXME no handling of "local"
String binding = getBinding(library, dependency);
if (binding == null)
binding = dependency.getNarInfo().getBinding(getAOL(), Library.NONE);
getLog().debug("Using Binding: " + binding);
AOL aol = getAOL();
aol = dependency.getNarInfo().getAOL(getAOL());
getLog().debug("Using Library AOL: " + aol.toString());
if (!binding.equals(Library.JNI) && !binding.equals(Library.NONE) && !binding.equals(Library.EXECUTABLE)) {
final File unpackDirectory = getUnpackDirectory();
final File dir = getLayout()
.getLibDirectory(unpackDirectory, dependency.getArtifactId(), dependency.getBaseVersion(), aol.toString(),
binding);
getLog().debug("Looking for Library Directory: " + dir);
if (dir.exists()) {
// Load nar properties file from aol specific directory
final File aolNarInfoFile = getLayout()
.getNarInfoDirectory(unpackDirectory, dependency.getGroupId(), dependency.getArtifactId(),
dependency.getBaseVersion(), aol.toString(), binding);
// Read nar properties file as narInfo
NarInfo aolNarInfo = new NarInfo(dependency.getGroupId(), dependency.getArtifactId(),
dependency.getBaseVersion(), getLog(), aolNarInfoFile);
// Write to log about custom nar properties found in aol directory.
if(!aolNarInfo.getInfo().isEmpty()) {
getLog().debug(String.format ("Custom NAR properties identified: %s-%s-%s-%s-%s",
dependency.getGroupId(),
dependency.getArtifactId(),
dependency.getBaseVersion(),
aol.toString(),
binding));
}
else {
getLog().debug(String.format ("Custom NAR properties not identified: %s-%s-%s-%s-%s",
dependency.getGroupId(),
dependency.getArtifactId(),
dependency.getBaseVersion(),
aol.toString(),
binding));
}
// overlay aol nar properties file on top of the default one.
aolNarInfo.mergeProperties(dependency.getNarInfo().getInfo());
final LibrarySet libSet = new LibrarySet();
libSet.setProject(antProject);
// FIXME, no way to override
final String libs = aolNarInfo.getLibs(getAOL());
if (libs != null && !libs.equals("")) {
getLog().debug("Using LIBS = " + libs);
libSet.setLibs(new CUtil.StringArrayBuilder(libs));
libSet.setDir(dir);
task.addLibset(libSet);
}
dependencySysLibs.addAll(getDependecySysLib(aolNarInfo));
} else {
getLog().debug("Library Directory " + dir + " does NOT exist.");
}
// FIXME, look again at this, for multiple dependencies we may need to
// remove duplicates
final String options = dependency.getNarInfo().getOptions(getAOL());
if (options != null && !options.equals("")) {
getLog().debug("Using OPTIONS = " + options);
final LinkerArgument arg = new LinkerArgument();
arg.setValue(options);
linkerDefinition.addConfiguredLinkerArg(arg);
}
}
}
}
if (syslibsFromDependencies) {
for (SysLib s : dependencySysLibs) {
task.addSyslibset(s.getSysLibSet(antProject));
}
}
// Add JVM to linker
getJava().addRuntime(task, getJavaHome(getAOL()), getOS(), getAOL().getKey() + ".java.");
// execute
try {
task.execute();
} catch (final BuildException e) {
throw new MojoExecutionException("NAR: Compile failed", e);
}
// FIXME, this should be done in CPPTasks at some point
// getRuntime(getAOL()).equals("dynamic") &&
if ((isEmbedManifest() || getLinker().isGenerateManifest()) && getOS().equals(OS.WINDOWS)
&& getLinker().getName().equals("msvc") && !getLinker().getVersion(this).startsWith("6.")) {
final String[] env = new String[] {
"PATH=" + getMsvc().getPathVariable().getValue()
};
final String libType = library.getType();
if (Library.JNI.equals(libType) || Library.SHARED.equals(libType) || Library.EXECUTABLE.equals(libType)) {
Vector<String> commandlineArgs = new Vector<>();
commandlineArgs.add("/manifest");
getManifests(outFile.getPath(), commandlineArgs);
if (commandlineArgs.size() == 1) {
if (isEmbedManifest())
getLog().warn("Embed manifest requested, no source manifests to embed, no manifest generated");
} else {
if (Library.JNI.equals(libType) || Library.SHARED.equals(libType)) {
String dll = outFile.getPath() + ".dll";
if (isEmbedManifest()) {
commandlineArgs.add("/outputresource:" + dll + ";#2");
} else {
commandlineArgs.add("/out:" + dll + ".manifest");
}
} else // if (Library.EXECUTABLE.equals( libType ))
{
String exe = outFile.getPath() + ".exe";
if (isEmbedManifest()) {
commandlineArgs.add("/outputresource:" + exe + ";#1");
} else {
commandlineArgs.add("/out:" + exe + ".manifest");
}
}
String[] commandlineArgsArray = commandlineArgs.toArray(new String[0]);
String mtexe = "mt.exe";
if (getMsvc().compareVersion( getMsvc().getWindowsSdkVersion(),"7.0")<0 && getLinker().getVersion(this).startsWith("8.")) { // VS2005 VC8 only one that includes mt.exe
File mtexeFile = new File(getMsvc().getToolPath(), mtexe);
if (mtexeFile.exists())
mtexe = mtexeFile.getAbsolutePath();
} else {
File mtexeFile = new File(getMsvc().getSDKToolPath(), mtexe);
if (mtexeFile.exists())
mtexe = mtexeFile.getAbsolutePath();
}
int result = NarUtil.runCommand(mtexe, commandlineArgsArray, null, null, getLog());
if (result != 0) {
throw new MojoFailureException("MT.EXE failed with exit code: " + result);
}
}
}
}
if( getOS().equals(OS.WINDOWS) && Library.STATIC.equals(library.getType()) ){ // option? should debug symbols always be provided.
getLog().debug( "Copy static pdbs from intermediat dir to " + task.getOutfile().getParentFile() );
try {
NarUtil.copyDirectoryStructure(task.getObjdir(), task.getOutfile().getParentFile(), "**/*.pdb", NarUtil.DEFAULT_EXCLUDES );
} catch (IOException e) {
getLog().info( "Failed to copy pdbs from " + task.getObjdir() + "\nexception" + e.getMessage() );
}
}
}
/**
* List the dependencies needed for compilation, those dependencies are used
* to get the include paths needed for
* compilation and to get the libraries paths and names needed for linking.
*/
@Override
protected ScopeFilter getArtifactScopeFilter() {
return new ScopeFilter(Artifact.SCOPE_COMPILE, null);
}
private List getSourcesFor(final Compiler compiler) throws MojoFailureException, MojoExecutionException {
if (compiler == null) {
return Collections.emptyList();
}
try {
final List files = new ArrayList();
final List srcDirs = compiler.getSourceDirectories();
for (final Object srcDir : srcDirs) {
final File dir = (File) srcDir;
if (dir.exists()) {
files.addAll(FileUtils.getFiles(dir, StringUtils.join(compiler.getIncludes().iterator(), ","), null));
}
}
return files;
} catch (final IOException e) {
return Collections.emptyList();
}
}
@Override
public final void narExecute() throws MojoExecutionException, MojoFailureException {
// make sure destination is there
getTargetDirectory().mkdirs();
// check for source files
int noOfSources = 0;
noOfSources += getSourcesFor(getCpp()).size();
noOfSources += getSourcesFor(getC()).size();
noOfSources += getSourcesFor(getFortran()).size();
if(getOS().equals( OS.WINDOWS ) && getArchitecture().equals("amd64"))
{
noOfSources += getSourcesFor(getAssembler()).size();
}
if (noOfSources > 0) {
getLog().info("Compiling " + noOfSources + " native files");
for (final Library library : getLibraries()) {
createLibrary(getAntProject(), library);
}
} else {
getLog().info("Nothing to compile");
}
try {
// FIXME, should the include paths be defined at a higher level ?
copyInclude(getCpp());
copyInclude(getC());
copyInclude(getFortran());
} catch (final IOException e) {
throw new MojoExecutionException("NAR: could not copy include files", e);
}
getNarInfo().writeToDirectory(this.classesDirectory);
if (replay != null) {
File compileCommandFile = new File(replay.getOutputDirectory(), NarConstants.REPLAY_COMPILE_NAME);
NarUtil.writeCommandFile(compileCommandFile, compileCommands);
File linkCommandFile = new File(replay.getOutputDirectory(), NarConstants.REPLAY_LINK_NAME);
NarUtil.writeCommandFile(linkCommandFile, linkCommands);
}
}
public boolean isEmbedManifest() {
return embedManifest;
}
private void getManifests(String generated, Vector<String> manifests) {
// TODO: /manifest should be followed by the list of manifest files
// - the one generated by link, any others provided in source.
// search the source for .manifest files.
if (getLinker().isGenerateManifest())
manifests.add(generated + ".manifest");
}
}
| |
package org.jgroups.protocols.tom;
import org.jgroups.*;
import org.jgroups.annotations.MBean;
import org.jgroups.annotations.ManagedAttribute;
import org.jgroups.annotations.ManagedOperation;
import org.jgroups.stack.Protocol;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
/**
* Total Order Anycast with three communication steps (based on Skeen's Algorithm). Establishes total order for a
* message sent to a subset of the cluster members (an anycast). Example: send a totally ordered message to {D,E}
* out of a membership of {A,B,C,D,E,F}.<p/>
* Skeen's algorithm uses consensus among the anycast target members to find the currently highest
* sequence number (seqno) and delivers the message according to the order established by the seqnos.
*
* @author Pedro Ruivo
* @since 3.1
*/
@MBean(description = "Implementation of Total Order Anycast based on Skeen's Algorithm")
public class TOA extends Protocol implements DeliveryProtocol {
//managers
private DeliveryManagerImpl deliverManager;
private SenderManager senderManager;
// threads
private final DeliveryThread deliverThread = new DeliveryThread(this);
//local address
private Address localAddress;
//sequence numbers, messages ids and lock
private final AtomicLong messageIdCounter = new AtomicLong(0);
//stats: profiling information
private final StatsCollector statsCollector = new StatsCollector();
private volatile View currentView;
public TOA() {
}
@Override
public void start() throws Exception {
deliverManager = new DeliveryManagerImpl();
senderManager = new SenderManager();
deliverThread.start(deliverManager);
statsCollector.setStatsEnabled(statsEnabled());
}
@Override
public void stop() {
deliverThread.interrupt();
}
@Override
public Object down(Event evt) {
switch (evt.getType()) {
case Event.SET_LOCAL_ADDRESS:
this.localAddress =evt.getArg();
this.deliverThread.setLocalAddress(localAddress.toString());
break;
case Event.VIEW_CHANGE:
handleViewChange(evt.getArg());
break;
default:
break;
}
return down_prot.down(evt);
}
public Object down(Message message) {
Address dest = message.getDest();
if (dest != null && dest instanceof AnycastAddress && !message.isFlagSet(Message.Flag.NO_TOTAL_ORDER)) {
// anycast message
sendTotalOrderAnycastMessage(extract((AnycastAddress) dest), message);
} else if (dest != null && dest instanceof AnycastAddress) {
//anycast address with NO_TOTAL_ORDER flag (should no be possible, but...)
send(extract((AnycastAddress) dest), message, true);
} else {
//normal message
down_prot.down(message);
}
return null;
}
@Override
public Object up(Event evt) {
switch (evt.getType()) {
case Event.VIEW_CHANGE:
handleViewChange(evt.getArg());
break;
case Event.SET_LOCAL_ADDRESS:
this.localAddress =evt.getArg();
this.deliverThread.setLocalAddress(localAddress.toString());
break;
default:
break;
}
return up_prot.up(evt);
}
public Object up(Message message) {
ToaHeader header=message.getHeader(this.id);
if (header == null)
return up_prot.up(message);
switch (header.getType()) {
case ToaHeader.DATA_MESSAGE:
handleDataMessage(message, header);
break;
case ToaHeader.PROPOSE_MESSAGE:
handleSequenceNumberPropose(message.getSrc(), header);
break;
case ToaHeader.FINAL_MESSAGE:
handleFinalSequenceNumber(header);
break;
case ToaHeader.SINGLE_DESTINATION_MESSAGE:
if (log.isTraceEnabled()) {
log.trace("Received message %s with SINGLE_DESTINATION header. delivering...", message);
}
deliverManager.deliverSingleDestinationMessage(message, header.getMessageID());
break;
default:
throw new IllegalStateException("Unknown header type received " + header);
}
return null;
}
@Override
public void deliver(Message message) {
message.setDest(localAddress);
if (log.isTraceEnabled()) {
log.trace("Deliver message %s (%d) in total order", message, message.getHeader(id));
}
up_prot.up(message);
statsCollector.incrementMessageDeliver();
}
private void handleViewChange(View view) {
if (log.isTraceEnabled()) {
log.trace("Handle view %s", view);
}
View oldView = currentView;
currentView = view;
//basis behavior: drop leavers message (as senders)
List<Address> leavers = View.leftMembers(oldView, view);
deliverManager.removeLeavers(leavers);
//basis behavior: avoid waiting for the acks
Collection<MessageID> pendingSentMessages = senderManager.getPendingMessageIDs();
for (MessageID messageID : pendingSentMessages) {
long finalSequenceNumber = senderManager.removeLeavers(messageID, leavers);
if (finalSequenceNumber != SenderManager.NOT_READY) {
ToaHeader finalHeader = ToaHeader.newFinalMessageHeader(messageID, finalSequenceNumber);
Message finalMessage = new Message().src(localAddress).putHeader(this.id, finalHeader)
.setFlag(Message.Flag.OOB, Message.Flag.INTERNAL, Message.Flag.DONT_BUNDLE);
Set<Address> destinations = senderManager.getDestination(messageID);
if (destinations.contains(localAddress)) {
destinations.remove(localAddress);
}
if (log.isTraceEnabled()) {
log.trace("Message %s is ready to be deliver. Final sequencer number is %d",
messageID, finalSequenceNumber);
}
send(destinations, finalMessage, false);
//returns true if we are in destination set
if (senderManager.markSent(messageID)) {
deliverManager.markReadyToDeliver(messageID, finalSequenceNumber);
}
}
}
// TODO: Future work: How to add fault tolerance? (simple and efficient)
}
private void sendTotalOrderAnycastMessage(List<Address> destinations, Message message) {
boolean trace = log.isTraceEnabled();
long startTime = statsCollector.now();
long duration = -1;
final boolean deliverToMySelf = destinations.contains(localAddress);
if (destinations.size() == 1) {
MessageID messageID = generateId();
message.putHeader(id, ToaHeader.createSingleDestinationHeader(messageID));
message.setDest(destinations.get(0));
if (trace) {
log.trace("Sending total order anycast message %s (%s) to single destination", message, message.getHeader(id));
}
if (deliverToMySelf) {
deliverManager.deliverSingleDestinationMessage(message, messageID);
} else {
down_prot.down(message);
}
return;
}
try {
final MessageID messageID = generateId();
long sequenceNumber = -1;
ToaHeader header = ToaHeader.newDataMessageHeader(messageID, destinations);
message.putHeader(this.id, header);
if (deliverToMySelf) {
sequenceNumber = deliverManager.addLocalMessageToDeliver(messageID, message, header);
}
if (trace) {
log.trace("Sending total order anycast message %s (%s) to %s", message, message.getHeader(id), destinations);
}
senderManager.addNewMessageToSend(messageID, destinations, sequenceNumber, deliverToMySelf);
send(destinations, message, false);
duration = statsCollector.now() - startTime;
} catch (Exception e) {
logException("Exception caught while sending anycast message. Error is " + e.getLocalizedMessage(),
e);
} finally {
statsCollector.addAnycastSentDuration(duration, (destinations.size() - (deliverToMySelf ? 1 : 0)));
}
}
private MessageID generateId() {
return new MessageID(localAddress, messageIdCounter.getAndIncrement());
}
private void send(Collection<Address> destinations, Message msg, boolean sendToMyself) {
if (log.isTraceEnabled()) {
log.trace("sending anycast total order message %s to %s", msg, destinations);
}
for (Address address : destinations) {
if (!sendToMyself && address.equals(localAddress)) {
continue;
}
Message cpy = msg.copy();
cpy.setDest(address);
down_prot.down(cpy);
}
}
private void handleDataMessage(Message message, ToaHeader header) {
long startTime = statsCollector.now();
long duration = -1;
try {
final MessageID messageID = header.getMessageID();
//create the sequence number and put it in deliver manager
long myProposeSequenceNumber = deliverManager.addRemoteMessageToDeliver(messageID, message,
header.getSequencerNumber());
if (log.isTraceEnabled()) {
log.trace("Received the message with %s. The proposed sequence number is %d",
header, myProposeSequenceNumber);
}
//create a new message and send it back
ToaHeader newHeader = ToaHeader.newProposeMessageHeader(messageID, myProposeSequenceNumber);
Message proposeMessage = new Message().src(localAddress).dest(messageID.getAddress())
.putHeader(this.id, newHeader).setFlag(Message.Flag.OOB, Message.Flag.INTERNAL, Message.Flag.DONT_BUNDLE);
//multicastSenderThread.addUnicastMessage(proposeMessage);
down_prot.down(proposeMessage);
duration = statsCollector.now() - startTime;
} catch (Exception e) {
logException("Exception caught while processing the data message " + header.getMessageID(), e);
} finally {
statsCollector.addDataMessageDuration(duration);
}
}
private void handleSequenceNumberPropose(Address from, ToaHeader header) {
long startTime = statsCollector.now();
long duration = -1;
boolean lastProposeReceived = false;
boolean trace = log.isTraceEnabled();
try {
MessageID messageID = header.getMessageID();
if (trace) {
log.trace("Received the proposed sequence number message with %s from %s",
header, from);
}
deliverManager.updateSequenceNumber(header.getSequencerNumber());
long finalSequenceNumber = senderManager.addPropose(messageID, from,
header.getSequencerNumber());
if (finalSequenceNumber != SenderManager.NOT_READY) {
lastProposeReceived = true;
ToaHeader finalHeader = ToaHeader.newFinalMessageHeader(messageID, finalSequenceNumber);
Message finalMessage = new Message().src(localAddress).putHeader(this.id, finalHeader)
.setFlag(Message.Flag.OOB, Message.Flag.INTERNAL, Message.Flag.DONT_BUNDLE);
Set<Address> destinations = senderManager.getDestination(messageID);
if (destinations.contains(localAddress)) {
destinations.remove(localAddress);
}
if (trace) {
log.trace("Message %s is ready to be deliver. Final sequencer number is %d" +
messageID, finalSequenceNumber);
}
send(destinations, finalMessage, false);
//returns true if we are in destination set
if (senderManager.markSent(messageID)) {
deliverManager.markReadyToDeliver(messageID, finalSequenceNumber);
}
}
duration = statsCollector.now() - startTime;
} catch (Exception e) {
logException("Exception caught while processing the propose sequence number for " + header.getMessageID(), e);
} finally {
statsCollector.addProposeSequenceNumberDuration(duration, lastProposeReceived);
}
}
private void handleFinalSequenceNumber(ToaHeader header) {
long startTime = statsCollector.now();
long duration = -1;
try {
MessageID messageID = header.getMessageID();
if (log.isTraceEnabled()) {
log.trace("Received the final sequence number message with %s", header);
}
deliverManager.markReadyToDeliver(messageID, header.getSequencerNumber());
duration = statsCollector.now() - startTime;
} catch (Exception e) {
logException("Exception caught while processing the final sequence number for " + header.getMessageID(), e);
} finally {
statsCollector.addFinalSequenceNumberDuration(duration);
}
}
private void logException(String msg, Exception e) {
if (log.isDebugEnabled()) {
log.debug(msg, e);
} else if (log.isWarnEnabled()) {
log.warn("%s. Error is %s", msg, e.getLocalizedMessage());
}
}
private List<Address> extract(AnycastAddress anycastAddress) {
Collection<Address> addresses = anycastAddress.getAddresses();
if (addresses == null) {
return new ArrayList<>(currentView.getMembers());
} else {
return new ArrayList<>(addresses);
}
}
@ManagedOperation
public String getMessageList() {
return deliverManager.getMessageSet().toString();
}
@Override
public void enableStats(boolean flag) {
super.enableStats(flag);
statsCollector.setStatsEnabled(flag);
}
@Override
public void resetStats() {
super.resetStats();
statsCollector.clearStats();
}
@ManagedAttribute(description = "The average duration (in milliseconds) in processing and sending the anycast " +
"message to all the recipients")
public double getAvgToaSendDuration() {
return statsCollector.getAvgAnycastSentDuration();
}
@ManagedAttribute(description = "The average duration (in milliseconds) in processing a data message received")
public double getAvgDataMessageReceivedDuration() {
return statsCollector.getAvgDataMessageReceivedDuration();
}
@ManagedAttribute(description = "The average duration (in milliseconds) in processing a propose message received" +
"(not the last one")
public double getAvgProposeMessageReceivedDuration() {
return statsCollector.getAvgProposeMesageReceivedDuration();
}
@ManagedAttribute(description = "The average duration (in milliseconds) in processing the last propose message " +
"received. This last propose message will originate the sending of the final message")
public double getAvgLastProposeMessageReceivedDuration() {
return statsCollector.getAvgLastProposeMessageReceivedDuration();
}
@ManagedAttribute(description = "The average duration (in milliseconds) in processing a final message received")
public double getAvgFinalMessageReceivedDuration() {
return statsCollector.getAvgFinalMessageReceivedDuration();
}
@ManagedAttribute(description = "The number of anycast messages sent")
public int getNumberOfAnycastMessagesSent() {
return statsCollector.getNumberOfAnycastMessagesSent();
}
@ManagedAttribute(description = "The number of final anycast sent")
public int getNumberOfFinalAnycastSent() {
return statsCollector.getNumberOfFinalAnycastsSent();
}
@ManagedAttribute(description = "The number of anycast messages delivered")
public int getNumberOfAnycastMessagesDelivered() {
return statsCollector.getAnycastDelivered();
}
@ManagedAttribute(description = "The number of propose messages sent")
public int getNumberOfProposeMessageSent() {
return statsCollector.getNumberOfProposeMessagesSent();
}
@ManagedAttribute(description = "The number of final messages delivered")
public int getNumberOfFinalMessagesDelivered() {
return statsCollector.getNumberOfFinalMessagesDelivered();
}
@ManagedAttribute(description = "The number of data messages delivered")
public int getNumberOfDataMessagesDelivered() {
return statsCollector.getNumberOfProposeMessagesSent();
}
@ManagedAttribute(description = "The number of propose messages received")
public int getNumberOfProposeMessageReceived() {
return statsCollector.getNumberOfProposeMessagesReceived();
}
@ManagedAttribute(description = "The average number of unicasts messages created per anycast message")
public double getAvgNumberOfUnicastSentPerAnycast() {
return statsCollector.getAvgNumberOfUnicastSentPerAnycast();
}
}
| |
/*
* Copyright (c) 1995, 2021, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.awt;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serial;
/**
* A flow layout arranges components in a directional flow, much
* like lines of text in a paragraph. The flow direction is
* determined by the container's {@code componentOrientation}
* property and may be one of two values:
* <ul>
* <li>{@code ComponentOrientation.LEFT_TO_RIGHT}
* <li>{@code ComponentOrientation.RIGHT_TO_LEFT}
* </ul>
* Flow layouts are typically used
* to arrange buttons in a panel. It arranges buttons
* horizontally until no more buttons fit on the same line.
* The line alignment is determined by the {@code align}
* property. The possible values are:
* <ul>
* <li>{@link #LEFT LEFT}
* <li>{@link #RIGHT RIGHT}
* <li>{@link #CENTER CENTER}
* <li>{@link #LEADING LEADING}
* <li>{@link #TRAILING TRAILING}
* </ul>
* <p>
* For example, the following picture shows an applet using the flow
* layout manager (its default layout manager) to position three buttons:
* <p>
* <img src="doc-files/FlowLayout-1.gif"
* ALT="Graphic of Layout for Three Buttons"
* style="margin: 7px 10px;">
* <p>
* Here is the code for this applet:
*
* <hr><blockquote><pre>
* import java.awt.*;
* import java.applet.Applet;
*
* public class myButtons extends Applet {
* Button button1, button2, button3;
* public void init() {
* button1 = new Button("Ok");
* button2 = new Button("Open");
* button3 = new Button("Close");
* add(button1);
* add(button2);
* add(button3);
* }
* }
* </pre></blockquote><hr>
* <p>
* A flow layout lets each component assume its natural (preferred) size.
*
* @author Arthur van Hoff
* @author Sami Shaio
* @since 1.0
* @see ComponentOrientation
*/
public class FlowLayout implements LayoutManager, java.io.Serializable {
/**
* This value indicates that each row of components
* should be left-justified.
*/
public static final int LEFT = 0;
/**
* This value indicates that each row of components
* should be centered.
*/
public static final int CENTER = 1;
/**
* This value indicates that each row of components
* should be right-justified.
*/
public static final int RIGHT = 2;
/**
* This value indicates that each row of components
* should be justified to the leading edge of the container's
* orientation, for example, to the left in left-to-right orientations.
*
* @see java.awt.Component#getComponentOrientation
* @see java.awt.ComponentOrientation
* @since 1.2
*/
public static final int LEADING = 3;
/**
* This value indicates that each row of components
* should be justified to the trailing edge of the container's
* orientation, for example, to the right in left-to-right orientations.
*
* @see java.awt.Component#getComponentOrientation
* @see java.awt.ComponentOrientation
* @since 1.2
*/
public static final int TRAILING = 4;
/**
* {@code align} is the property that determines
* how each row distributes empty space.
* It can be one of the following values:
* <ul>
* <li>{@code LEFT}
* <li>{@code RIGHT}
* <li>{@code CENTER}
* </ul>
*
* @serial
* @see #getAlignment
* @see #setAlignment
*/
int align; // This is for 1.1 serialization compatibility
/**
* {@code newAlign} is the property that determines
* how each row distributes empty space for the Java 2 platform,
* v1.2 and greater.
* It can be one of the following three values:
* <ul>
* <li>{@code LEFT}
* <li>{@code RIGHT}
* <li>{@code CENTER}
* <li>{@code LEADING}
* <li>{@code TRAILING}
* </ul>
*
* @serial
* @since 1.2
* @see #getAlignment
* @see #setAlignment
*/
int newAlign; // This is the one we actually use
/**
* The flow layout manager allows a separation of
* components with gaps. The horizontal gap will
* specify the space between components and between
* the components and the borders of the
* {@code Container}.
*
* @serial
* @see #getHgap()
* @see #setHgap(int)
*/
int hgap;
/**
* The flow layout manager allows a separation of
* components with gaps. The vertical gap will
* specify the space between rows and between the
* the rows and the borders of the {@code Container}.
*
* @serial
* @see #getHgap()
* @see #setHgap(int)
*/
int vgap;
/**
* If true, components will be aligned on their baseline.
*/
private boolean alignOnBaseline;
/**
* Use serialVersionUID from JDK 1.1 for interoperability.
*/
@Serial
private static final long serialVersionUID = -7262534875583282631L;
/**
* Constructs a new {@code FlowLayout} with a centered alignment and a
* default 5-unit horizontal and vertical gap.
*/
public FlowLayout() {
this(CENTER, 5, 5);
}
/**
* Constructs a new {@code FlowLayout} with the specified
* alignment and a default 5-unit horizontal and vertical gap.
* The value of the alignment argument must be one of
* {@code FlowLayout.LEFT}, {@code FlowLayout.RIGHT},
* {@code FlowLayout.CENTER}, {@code FlowLayout.LEADING},
* or {@code FlowLayout.TRAILING}.
* @param align the alignment value
*/
public FlowLayout(int align) {
this(align, 5, 5);
}
/**
* Creates a new flow layout manager with the indicated alignment
* and the indicated horizontal and vertical gaps.
* <p>
* The value of the alignment argument must be one of
* {@code FlowLayout.LEFT}, {@code FlowLayout.RIGHT},
* {@code FlowLayout.CENTER}, {@code FlowLayout.LEADING},
* or {@code FlowLayout.TRAILING}.
* @param align the alignment value
* @param hgap the horizontal gap between components
* and between the components and the
* borders of the {@code Container}
* @param vgap the vertical gap between components
* and between the components and the
* borders of the {@code Container}
*/
public FlowLayout(int align, int hgap, int vgap) {
this.hgap = hgap;
this.vgap = vgap;
setAlignment(align);
}
/**
* Gets the alignment for this layout.
* Possible values are {@code FlowLayout.LEFT},
* {@code FlowLayout.RIGHT}, {@code FlowLayout.CENTER},
* {@code FlowLayout.LEADING},
* or {@code FlowLayout.TRAILING}.
* @return the alignment value for this layout
* @see java.awt.FlowLayout#setAlignment
* @since 1.1
*/
public int getAlignment() {
return newAlign;
}
/**
* Sets the alignment for this layout.
* Possible values are
* <ul>
* <li>{@code FlowLayout.LEFT}
* <li>{@code FlowLayout.RIGHT}
* <li>{@code FlowLayout.CENTER}
* <li>{@code FlowLayout.LEADING}
* <li>{@code FlowLayout.TRAILING}
* </ul>
* @param align one of the alignment values shown above
* @see #getAlignment()
* @since 1.1
*/
public void setAlignment(int align) {
this.newAlign = align;
// this.align is used only for serialization compatibility,
// so set it to a value compatible with the 1.1 version
// of the class
switch (align) {
case LEADING:
this.align = LEFT;
break;
case TRAILING:
this.align = RIGHT;
break;
default:
this.align = align;
break;
}
}
/**
* Gets the horizontal gap between components
* and between the components and the borders
* of the {@code Container}
*
* @return the horizontal gap between components
* and between the components and the borders
* of the {@code Container}
* @see java.awt.FlowLayout#setHgap
* @since 1.1
*/
public int getHgap() {
return hgap;
}
/**
* Sets the horizontal gap between components and
* between the components and the borders of the
* {@code Container}.
*
* @param hgap the horizontal gap between components
* and between the components and the borders
* of the {@code Container}
* @see java.awt.FlowLayout#getHgap
* @since 1.1
*/
public void setHgap(int hgap) {
this.hgap = hgap;
}
/**
* Gets the vertical gap between components and
* between the components and the borders of the
* {@code Container}.
*
* @return the vertical gap between components
* and between the components and the borders
* of the {@code Container}
* @see java.awt.FlowLayout#setVgap
* @since 1.1
*/
public int getVgap() {
return vgap;
}
/**
* Sets the vertical gap between components and between
* the components and the borders of the {@code Container}.
*
* @param vgap the vertical gap between components
* and between the components and the borders
* of the {@code Container}
* @see java.awt.FlowLayout#getVgap
* @since 1.1
*/
public void setVgap(int vgap) {
this.vgap = vgap;
}
/**
* Sets whether or not components should be vertically aligned along their
* baseline. Components that do not have a baseline will be centered.
* The default is false.
*
* @param alignOnBaseline whether or not components should be
* vertically aligned on their baseline
* @since 1.6
*/
public void setAlignOnBaseline(boolean alignOnBaseline) {
this.alignOnBaseline = alignOnBaseline;
}
/**
* Returns true if components are to be vertically aligned along
* their baseline. The default is false.
*
* @return true if components are to be vertically aligned along
* their baseline
* @since 1.6
*/
public boolean getAlignOnBaseline() {
return alignOnBaseline;
}
/**
* Adds the specified component to the layout.
* Not used by this class.
* @param name the name of the component
* @param comp the component to be added
*/
public void addLayoutComponent(String name, Component comp) {
}
/**
* Removes the specified component from the layout.
* Not used by this class.
* @param comp the component to remove
* @see java.awt.Container#removeAll
*/
public void removeLayoutComponent(Component comp) {
}
/**
* Returns the preferred dimensions for this layout given the
* <i>visible</i> components in the specified target container.
*
* @param target the container that needs to be laid out
* @return the preferred dimensions to lay out the
* subcomponents of the specified container
* @see Container
* @see #minimumLayoutSize
* @see java.awt.Container#getPreferredSize
*/
public Dimension preferredLayoutSize(Container target) {
synchronized (target.getTreeLock()) {
Dimension dim = new Dimension(0, 0);
int nmembers = target.getComponentCount();
boolean firstVisibleComponent = true;
boolean useBaseline = getAlignOnBaseline();
int maxAscent = 0;
int maxDescent = 0;
for (int i = 0 ; i < nmembers ; i++) {
Component m = target.getComponent(i);
if (m.isVisible()) {
Dimension d = m.getPreferredSize();
dim.height = Math.max(dim.height, d.height);
if (firstVisibleComponent) {
firstVisibleComponent = false;
} else {
dim.width += hgap;
}
dim.width += d.width;
if (useBaseline) {
int baseline = m.getBaseline(d.width, d.height);
if (baseline >= 0) {
maxAscent = Math.max(maxAscent, baseline);
maxDescent = Math.max(maxDescent, d.height - baseline);
}
}
}
}
if (useBaseline) {
dim.height = Math.max(maxAscent + maxDescent, dim.height);
}
Insets insets = target.getInsets();
dim.width += insets.left + insets.right + hgap*2;
dim.height += insets.top + insets.bottom + vgap*2;
return dim;
}
}
/**
* Returns the minimum dimensions needed to layout the <i>visible</i>
* components contained in the specified target container.
* @param target the container that needs to be laid out
* @return the minimum dimensions to lay out the
* subcomponents of the specified container
* @see #preferredLayoutSize
* @see java.awt.Container
* @see java.awt.Container#doLayout
*/
public Dimension minimumLayoutSize(Container target) {
synchronized (target.getTreeLock()) {
boolean useBaseline = getAlignOnBaseline();
Dimension dim = new Dimension(0, 0);
int nmembers = target.getComponentCount();
int maxAscent = 0;
int maxDescent = 0;
boolean firstVisibleComponent = true;
for (int i = 0 ; i < nmembers ; i++) {
Component m = target.getComponent(i);
if (m.visible) {
Dimension d = m.getMinimumSize();
dim.height = Math.max(dim.height, d.height);
if (firstVisibleComponent) {
firstVisibleComponent = false;
} else {
dim.width += hgap;
}
dim.width += d.width;
if (useBaseline) {
int baseline = m.getBaseline(d.width, d.height);
if (baseline >= 0) {
maxAscent = Math.max(maxAscent, baseline);
maxDescent = Math.max(maxDescent,
dim.height - baseline);
}
}
}
}
if (useBaseline) {
dim.height = Math.max(maxAscent + maxDescent, dim.height);
}
Insets insets = target.getInsets();
dim.width += insets.left + insets.right + hgap*2;
dim.height += insets.top + insets.bottom + vgap*2;
return dim;
}
}
/**
* Centers the elements in the specified row, if there is any slack.
* @param target the component which needs to be moved
* @param x the x coordinate
* @param y the y coordinate
* @param width the width dimensions
* @param height the height dimensions
* @param rowStart the beginning of the row
* @param rowEnd the ending of the row
* @param useBaseline Whether or not to align on baseline.
* @param ascent Ascent for the components. This is only valid if
* useBaseline is true.
* @param descent Ascent for the components. This is only valid if
* useBaseline is true.
* @return actual row height
*/
private int moveComponents(Container target, int x, int y, int width, int height,
int rowStart, int rowEnd, boolean ltr,
boolean useBaseline, int[] ascent,
int[] descent) {
switch (newAlign) {
case LEFT:
x += ltr ? 0 : width;
break;
case CENTER:
x += width / 2;
break;
case RIGHT:
x += ltr ? width : 0;
break;
case LEADING:
break;
case TRAILING:
x += width;
break;
}
int maxAscent = 0;
int nonbaselineHeight = 0;
int baselineOffset = 0;
if (useBaseline) {
int maxDescent = 0;
for (int i = rowStart ; i < rowEnd ; i++) {
Component m = target.getComponent(i);
if (m.visible) {
if (ascent[i] >= 0) {
maxAscent = Math.max(maxAscent, ascent[i]);
maxDescent = Math.max(maxDescent, descent[i]);
}
else {
nonbaselineHeight = Math.max(m.getHeight(),
nonbaselineHeight);
}
}
}
height = Math.max(maxAscent + maxDescent, nonbaselineHeight);
baselineOffset = (height - maxAscent - maxDescent) / 2;
}
for (int i = rowStart ; i < rowEnd ; i++) {
Component m = target.getComponent(i);
if (m.isVisible()) {
int cy;
if (useBaseline && ascent[i] >= 0) {
cy = y + baselineOffset + maxAscent - ascent[i];
}
else {
cy = y + (height - m.height) / 2;
}
if (ltr) {
m.setLocation(x, cy);
} else {
m.setLocation(target.width - x - m.width, cy);
}
x += m.width + hgap;
}
}
return height;
}
/**
* Lays out the container. This method lets each
* <i>visible</i> component take
* its preferred size by reshaping the components in the
* target container in order to satisfy the alignment of
* this {@code FlowLayout} object.
*
* @param target the specified component being laid out
* @see Container
* @see java.awt.Container#doLayout
*/
public void layoutContainer(Container target) {
synchronized (target.getTreeLock()) {
Insets insets = target.getInsets();
int maxwidth = target.width - (insets.left + insets.right + hgap*2);
int nmembers = target.getComponentCount();
int x = 0, y = insets.top + vgap;
int rowh = 0, start = 0;
boolean ltr = target.getComponentOrientation().isLeftToRight();
boolean useBaseline = getAlignOnBaseline();
int[] ascent = null;
int[] descent = null;
if (useBaseline) {
ascent = new int[nmembers];
descent = new int[nmembers];
}
for (int i = 0 ; i < nmembers ; i++) {
Component m = target.getComponent(i);
if (m.isVisible()) {
Dimension d = m.getPreferredSize();
m.setSize(d.width, d.height);
if (useBaseline) {
int baseline = m.getBaseline(d.width, d.height);
if (baseline >= 0) {
ascent[i] = baseline;
descent[i] = d.height - baseline;
}
else {
ascent[i] = -1;
}
}
if ((x == 0) || ((x + d.width) <= maxwidth)) {
if (x > 0) {
x += hgap;
}
x += d.width;
rowh = Math.max(rowh, d.height);
} else {
rowh = moveComponents(target, insets.left + hgap, y,
maxwidth - x, rowh, start, i, ltr,
useBaseline, ascent, descent);
x = d.width;
y += vgap + rowh;
rowh = d.height;
start = i;
}
}
}
moveComponents(target, insets.left + hgap, y, maxwidth - x, rowh,
start, nmembers, ltr, useBaseline, ascent, descent);
}
}
//
// the internal serial version which says which version was written
// - 0 (default) for versions before the Java 2 platform, v1.2
// - 1 for version >= Java 2 platform v1.2, which includes "newAlign" field
//
private static final int currentSerialVersion = 1;
/**
* This represent the {@code currentSerialVersion}
* which is bein used. It will be one of two values:
* {@code 0} versions before Java 2 platform v1.2,
* {@code 1} versions after Java 2 platform v1.2.
*
* @serial
* @since 1.2
*/
private int serialVersionOnStream = currentSerialVersion;
/**
* Reads this object out of a serialization stream, handling
* objects written by older versions of the class that didn't contain all
* of the fields we use now..
*
* @param stream the {@code ObjectInputStream} to read
* @throws ClassNotFoundException if the class of a serialized object could
* not be found
* @throws IOException if an I/O error occurs
*/
@Serial
private void readObject(ObjectInputStream stream)
throws IOException, ClassNotFoundException
{
stream.defaultReadObject();
if (serialVersionOnStream < 1) {
// "newAlign" field wasn't present, so use the old "align" field.
setAlignment(this.align);
}
serialVersionOnStream = currentSerialVersion;
}
/**
* Returns a string representation of this {@code FlowLayout}
* object and its values.
* @return a string representation of this layout
*/
public String toString() {
String str = "";
switch (align) {
case LEFT: str = ",align=left"; break;
case CENTER: str = ",align=center"; break;
case RIGHT: str = ",align=right"; break;
case LEADING: str = ",align=leading"; break;
case TRAILING: str = ",align=trailing"; break;
}
return getClass().getName() + "[hgap=" + hgap + ",vgap=" + vgap + str + "]";
}
}
| |
/**
* Copyright Notice
*
* This is a work of the U.S. Government and is not subject to copyright
* protection in the United States. Foreign copyrights may apply.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.va.isaac.gui.enhancedsearchview;
import gov.va.isaac.AppContext;
import gov.va.isaac.gui.dialog.BusyPopover;
import gov.va.isaac.gui.enhancedsearchview.model.EnhancedSavedSearch;
import gov.va.isaac.gui.enhancedsearchview.model.SearchModel;
import gov.va.isaac.gui.enhancedsearchview.resulthandler.ResultsToDrools;
import gov.va.isaac.gui.enhancedsearchview.resulthandler.ResultsToRefset;
import gov.va.isaac.gui.enhancedsearchview.resulthandler.ResultsToReport;
import gov.va.isaac.gui.enhancedsearchview.resulthandler.ResultsToTaxonomy;
import gov.va.isaac.gui.enhancedsearchview.resulthandler.ResultsToWorkflow;
import gov.va.isaac.interfaces.gui.constants.SharedServiceNames;
import gov.va.isaac.interfaces.gui.views.DockedViewI;
import gov.va.isaac.interfaces.gui.views.commonFunctionality.ListBatchViewI;
import gov.va.isaac.search.CompositeSearchResult;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.collections.ListChangeListener;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuBar;
import javafx.scene.control.MenuItem;
import javafx.scene.control.SplitPane;
import javafx.scene.control.TableColumn;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.Pane;
import javafx.stage.Stage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* EnhancedSearchViewController
*
* @author <a href="mailto:joel.kniaz@gmail.com">Joel Kniaz</a>
*/
public class EnhancedSearchViewController {
private static final Logger LOG = LoggerFactory.getLogger(EnhancedSearchViewController.class);
@FXML private AnchorPane mainPane;
@FXML private MenuBar enhancedSearchMenuBar;
@FXML private Menu handleResultsMenu;
@FXML private BorderPane searchBorderPane;
@FXML private SplitPane searchAndTaxonomySplitPane;
//@FXML private ListView<DisplayableFilter> searchFilterListView;
private EnhancedSearchViewTopPane topPane;
private EnhancedSearchViewBottomPane bottomPane;
private SearchModel searchModel = new SearchModel();
private Menu handleSearchMenu = new Menu("Handle Results");
private Menu saveSearchMenu = new Menu("Saved Searches");
private MenuItem resultsToReportMenuItem = new MenuItem("Report");
private MenuItem resultsToListMenuItem = new MenuItem("List");
private MenuItem resultsToWorkflowMenuItem = new MenuItem("Workflow");
private MenuItem resultsToTaxonomyMenuItem = new MenuItem("Taxonomy");
private MenuItem resultsToSememeMenuItem = new MenuItem("Sememe");
private MenuItem resultsToDroolsMenuItem = new MenuItem("Drools");
private MenuItem saveSearchMenuItem = new MenuItem("Save Search");
private Menu loadSearchMenu = new Menu("Load Saved Search");
public static EnhancedSearchViewController init() throws IOException {
// Load FXML
URL resource = EnhancedSearchViewController.class.getResource("EnhancedSearchView.fxml");
LOG.debug("FXML for " + EnhancedSearchViewController.class + ": " + resource);
FXMLLoader loader = new FXMLLoader(resource);
loader.load();
return loader.getController();
}
@FXML
public void initialize() {
//assert searchText != null : "fx:id=\"searchText\" was not injected: check your FXML file 'EnhancedSearchView.fxml'.";
//assert addIsDescendantOfFilterButton != null : "fx:id=\"addIsDescendantOfFilterButton\" was not injected: check your FXML file 'EnhancedSearchView.fxml'.";
assert searchAndTaxonomySplitPane != null : "fx:id=\"searchResultsAndTaxonomySplitPane\" was not injected: check your FXML file 'EnhancedSearchView.fxml'.";
assert enhancedSearchMenuBar != null : "fx:id=\"enhancedSearchMenuBar\" was not injected: check your FXML file 'EnhancedSearchView.fxml'.";
Stage stage = AppContext.getMainApplicationWindow().getPrimaryStage();
initializeSearchMenus(stage);
String styleSheet = EnhancedSearchViewController.class.getResource("/isaac-shared-styles.css").toString();
if (! searchAndTaxonomySplitPane.getStylesheets().contains(styleSheet)) {
searchAndTaxonomySplitPane.getStylesheets().add(styleSheet);
}
topPane = new EnhancedSearchViewTopPane();
bottomPane = new EnhancedSearchViewBottomPane(stage);
SearchModel.getSearchRunning().addListener(new ChangeListener<Boolean>() {
@Override
public void changed(ObservableValue<? extends Boolean> observable,
Boolean oldValue, Boolean newValue) {
if (newValue) {
searchModel.setSearchRunningPopover(BusyPopover.createBusyPopover("Searching...", topPane.getSearchButton()));
} else {
if (searchModel.getSearchRunningPopover() != null) {
searchModel.getSearchRunningPopover().hide();
}
}
}
});
//TODO (artf231846) - things that hit the DB (BDB or the Workflow SQL DB) should NOT Be done in the JavaFX foreground thread. This causes large delays in displaying your GUI.
//this sort of stuff need to be a in background thread, with an appropriate progress indicator
topPane.getSearchButton().setOnAction((action) -> {
if (SearchModel.getSearchRunning().get() && SearchModel.getSsh() != null) {
SearchModel.getSsh().cancel();
} else {
SearchModel.getSearchTypeSelector().getTypeSpecificModel().search(
searchModel.getResultsTypeComboBox().getSelectionModel().getSelectedItem(),
searchModel.getMaxResultsCustomTextField());
}
});
SearchModel.getSearchRunning().addListener((observable, oldValue, newValue) -> {
if (SearchModel.getSearchRunning().get()) {
topPane.getSearchButton().setText("Cancel");
} else {
topPane.getSearchButton().setText("Search");
}
});
ResultsToTaxonomy.setSearchAndTaxonomySplitPane(searchAndTaxonomySplitPane);
searchBorderPane.setTop(topPane.getTopPaneVBox());
searchBorderPane.setBottom(bottomPane.getBottomPaneHBox());
searchBorderPane.setCenter(SearchModel.getSearchResultsTable().getResults());
searchModel.setPanes(bottomPane, searchAndTaxonomySplitPane, ResultsToTaxonomy.getTaxonomyPanelBorderPane());
SearchModel.getSearchResultsTable().getResults().getItems().addListener(new ListChangeListener<CompositeSearchResult>() {
@Override
public void onChanged(
javafx.collections.ListChangeListener.Change<? extends CompositeSearchResult> c) {
bottomPane.refreshBottomPanel();
setSearchMenusDisabled();
}
});
}
public Pane getRoot() {
//return searchAndTaxonomySplitPane;
return mainPane;
}
interface ColumnValueExtractor {
String extract(TableColumn<CompositeSearchResult, ?> col);
}
private void initializeSearchMenus(Stage stage) {
ResultsToTaxonomy.initializeTaxonomyPanel();
handleSearchMenu.getItems().clear();
saveSearchMenu.getItems().clear();
handleSearchMenu.getItems().addAll(resultsToReportMenuItem ,
resultsToListMenuItem ,
resultsToWorkflowMenuItem ,
resultsToTaxonomyMenuItem ,
resultsToSememeMenuItem ,
resultsToDroolsMenuItem );
saveSearchMenu.getItems().addAll(saveSearchMenuItem, loadSearchMenu);
enhancedSearchMenuBar.getMenus().addAll(handleSearchMenu, saveSearchMenu);
resultsToListMenuItem.setOnAction((e) -> resultsToList());
resultsToReportMenuItem.setOnAction((e) -> ResultsToReport.resultsToReport());
resultsToWorkflowMenuItem.setOnAction((e) -> ResultsToWorkflow.multipleResultsToWorkflow());
resultsToTaxonomyMenuItem.setOnAction((e) -> ResultsToTaxonomy.resultsToSearchTaxonomy());
resultsToSememeMenuItem.setOnAction((e) -> createSememe(stage));
resultsToDroolsMenuItem.setOnAction((e) -> ResultsToDrools.createDroolsOnClipboard(searchModel));
saveSearchMenuItem.setOnAction((e) -> {
topPane.getSearchSaver().saveSearch();
EnhancedSavedSearch.refreshSavedSearchMenu(loadSearchMenu);
});
EnhancedSavedSearch.refreshSavedSearchMenu(loadSearchMenu);
//setSearchMenusDisabled(true);
}
public void setSearchMenusDisabled() {
int resultSize = SearchModel.getSearchResultsTable().getResults().getItems().size();
setSearchMenusDisabled(resultSize == 0);
resultsToWorkflowMenuItem.setDisable(resultSize > 0 && resultSize <= 5);
}
public void setSearchMenusDisabled(boolean isDisabled) {
handleSearchMenu.setDisable(isDisabled);
saveSearchMenuItem.setDisable(isDisabled);
}
private void createSememe(Stage stage) {
try {
String refexName = ResultsToRefset.resultsToRefset(stage, SearchModel.getSearchResultsTable().getResults());
if (refexName != null) {
AppContext.getCommonDialogs().showInformationDialog("Sememe Successfully Created", "Created and populated new Sememe (" + refexName + ") with all values in results table");
}
} catch (Exception e) {
AppContext.getCommonDialogs().showErrorDialog("Sememe Creation Failure", "Sememe Creation Failure", "Failed to create and populate Sememe with values in results table");
}
}
private void resultsToList() {
ListBatchViewI lv = AppContext.getService(ListBatchViewI.class, SharedServiceNames.DOCKED);
AppContext.getMainApplicationWindow().ensureDockedViewIsVisble((DockedViewI) lv);
List<Integer> nids = new ArrayList<>();
for (CompositeSearchResult result : SearchModel.getSearchResultsTable().getResults().getItems()) {
if (! nids.contains(result.getContainingConcept().getNid())) {
nids.add(result.getContainingConcept().getNid());
}
}
lv.addConcepts(nids);
}
}
| |
package com.github.marcinseweryn.service;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.stereotype.Service;
import com.github.marcinseweryn.dao.UserDAO;
import com.github.marcinseweryn.model.User;
@Service
public class UserServiceImpl implements UserService{
@Autowired
private UserDAO userDAO;
DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
public void addUser(User user){
if(!user.getPesel().equals("")){
BCryptPasswordEncoder passwordEncoder = new BCryptPasswordEncoder();
String hashedPassword = passwordEncoder.encode(user.getPassword());
user.setEnabled("1");
user.setPassword(hashedPassword);
userDAO.addUser(user);
}
}
public List<User> findAllUsers() {
return userDAO.findAllUsers();
}
public void deleteUsers(List<Integer> usersIDs) {
if(usersIDs.size() != 0){
userDAO.deleteUsers(usersIDs);
}
}
@Override
public void updateUsers(List<Integer> usersIDs, User user) {
String columns = "";
if(user.getPesel() != null){
if(!user.getPesel().equals("")){
columns += "pesel = '" + user.getPesel() + "' ,";
}
}
if(user.getGender() != null){
if(!user.getGender().equals("")){
columns += "gender = '" + user.getGender() + "' ,";
}
}
if(user.getName() != null){
if(!user.getName().equals("")){
columns += "name = '" + user.getName() + "' ,";
}
}
if(user.getSurname() != null){
if(!user.getSurname().equals("")){
columns += "surname = '" + user.getSurname() + "' ,";
}
}
if(user.getBirthDate() != null){
columns += "birthDate = '" + df.format(user.getBirthDate()) + "' ,";
}
if(user.getPassword() != null){
if(!user.getPassword().equals("")){
BCryptPasswordEncoder passwordEncoder = new BCryptPasswordEncoder();
String hashedPassword = passwordEncoder.encode(user.getPassword());
columns += "password = '" + hashedPassword + "' ,";
}
}
if(user.getStreetAddress() != null){
if(!user.getStreetAddress().equals("")){
columns += "streetAddress = '" + user.getStreetAddress() + "' ,";
}
}
if(user.getCity() != null){
if(!user.getCity().equals("")){
columns += "city = '" + user.getCity() + "' ,";
}
}
if(user.getPostalCode() != null){
if(!user.getPostalCode().equals("")){
columns += "postalCode = '" + user.getPostalCode() + "' ,";
}
}
if(user.getPhone() != null){
if(!user.getPhone().equals("")){
columns += "phone = '" + user.getPhone() + "' ,";
}
}
if(user.getEmail() != null){
if(!user.getEmail().equals("")){
columns += "email = '" + user.getEmail() + "' ,";
}
}
if(user.getRole() != null){
if(!user.getRole().equals("")){
columns += "role = '" + user.getRole() + "' ,";
}
}
if(columns.length() > 0){
columns = columns.substring(0, columns.length() - 1);
}
if(!columns.equals("")){
userDAO.updateUsers(usersIDs, columns);
}
}
@Override
public List<User> findUsers(User user) {
String columns = "";
if(user.getID() != null){
columns += "ID = '" + user.getID() + "' and ";
}
if(user.getPesel() != null){
if(!user.getPesel().equals("")){
columns += "pesel = '" + user.getPesel() + "' and ";
}
}
if(user.getGender() != null){
if(!user.getGender().equals("")){
columns += "gender = '" + user.getGender() + "' and ";
}
}
if(user.getName() != null){
if(!user.getName().equals("")){
columns += "name = '" + user.getName() + "' and ";
}
}
if(user.getSurname() != null){
if(!user.getSurname().equals("")){
columns += "surname = '" + user.getSurname() + "' and ";
}
}
if(user.getBirthDate() != null){
columns += "birthDate = '" + df.format(user.getBirthDate()) + "' and ";
}
if(user.getStreetAddress() != null){
if(!user.getStreetAddress().equals("")){
columns += "streetAddress = '" + user.getStreetAddress() + "' and ";
}
}
if(user.getCity() != null){
if(!user.getCity().equals("")){
columns += "city = '" + user.getCity() + "' and ";
}
}
if(user.getPostalCode() != null){
if(!user.getPostalCode().equals("")){
columns += "postalCode = '" + user.getPostalCode() + "' and ";
}
}
if(user.getPhone() != null){
if(!user.getPhone().equals("")){
columns += "phone = '" + user.getPhone() + "' and ";
}
}
if(user.getEmail() != null){
if(!user.getEmail().equals("")){
columns += "email = '" + user.getEmail() + "' and ";
}
}
if(user.getRole() != null){
if(!user.getRole().equals("")){
columns += "role = '" + user.getRole() + "' and ";
}
}
if(columns.length() > 0){
columns = columns.substring(0, columns.length() - 4);
}
if(columns.equals("")){
return null;
}else{
return userDAO.findUsers(columns);
}
}
@Override
public User findUserByID(Integer ID) {
return userDAO.findUserByID(ID);
}
@Override
public List<User> findUsersByIDs(List<Integer> usersIDs) {
if(usersIDs.size() != 0){
return userDAO.findUsersByIDs(usersIDs);
}
return null;
}
@Override
public User findUserByEmail(String email) {
return userDAO.findUserByEmail(email);
}
}
| |
/*
* Created on May 18, 2004
*
* Paros and its related class files.
*
* Paros is an HTTP/HTTPS proxy for assessing web application security.
* Copyright (C) 2003-2005 Chinotec Technologies Company
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the Clarified Artistic License
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Clarified Artistic License for more details.
*
* You should have received a copy of the Clarified Artistic License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
// ZAP: 2011/08/03 Revamped upgrade for 1.3.2
// ZAP: 2011/10/05 Write backup file to user dir
// ZAP: 2011/11/15 Changed to use ZapXmlConfiguration, to enforce the same
// character encoding when reading/writing configurations. Changed to use the
// correct file when an error occurs during the load of the configuration file.
// Removed the calls XMLConfiguration.load() as they are not needed, the
// XMLConfiguration constructor used already does that.
// ZAP: 2011/11/20 Support for extension factory
// ZAP: 2012/03/03 Added ZAP homepage
// ZAP: 2012/03/15 Removed a @SuppressWarnings annotation from the method
// copyAllProperties.
// ZAP: 2012/03/17 Issue 282 ZAP and PAROS team constants
// ZAP: 2012/05/02 Added method createInstance and changed the method
// getInstance to use it.
// ZAP: 2012/05/03 Changed the Patterns used to detect the O.S. to be final.
// ZAP: 2012/06/15 Issue 312 Increase the maximum number of scanning threads allowed
// ZAP: 2012/07/13 Added variable for maximum number of threads used in scan (MAX_THREADS_PER_SCAN)
// ZAP: 2012/10/15 Issue 397: Support weekly builds
// ZAP: 2012/10/17 Issue 393: Added more online links from menu
// ZAP: 2012/11/15 Issue 416: Normalise how multiple related options are managed
// throughout ZAP and enhance the usability of some options.
// ZAP: 2012/11/20 Issue 419: Restructure jar loading code
// ZAP: 2012/12/08 Issue 428: Changed to use I18N for messages, to support the marketplace
// ZAP: 2013/03/03 Issue 546: Remove all template Javadoc comments
// ZAP: 2013/04/14 Issue 610: Replace the use of the String class for available/default "Forced Browse" files
// ZAP: 2013/04/15 Issue 632: Manual Request Editor dialogue (HTTP) configurations not saved correctly
// ZAP: 2013/12/03 Issue 933: Automatically determine install dir
// ZAP: 2013/12/13 Issue 919: Support for multiple language vulnerability files.
// ZAP: 2014/04/11 Issue 1148: ZAP 2.3.0 does not launch after upgrading in some situations
// ZAP: 2014/07/15 Issue 1265: Context import and export
// ZAP: 2014/08/14 Issue 1300: Add-ons show incorrect language when English is selected on non English locale
// ZAP: 2014/11/11 Issue 1406: Move online menu items to an add-on
// ZAP: 2015/01/04 Issue 1388: Not all translated files are updated when "zaplang" package is imported
// ZAP: 2014/01/04 Issue 1394: Import vulnerabilities.xml files when updating the translated resources
// ZAP: 2014/01/04 Issue 1458: Change home/installation dir paths to be always absolute
// ZAP: 2015/03/10 Issue 653: Handle updates on Kali better
// ZAP: 2015/03/30 Issue 1582: Enablers for low memory option
// ZAP: 2015/04/12 Remove "installation" fuzzers dir, no longer in use
// ZAP: 2015/08/01 Remove code duplication in catch of exceptions, use installation directory in default config file
package org.parosproxy.paros;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.InvalidParameterException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.NoSuchElementException;
import java.util.Properties;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.ConversionException;
import org.apache.commons.configuration.XMLConfiguration;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.parosproxy.paros.extension.option.OptionsParamView;
import org.parosproxy.paros.model.FileCopier;
import org.parosproxy.paros.model.Model;
import org.zaproxy.zap.ZAP;
import org.zaproxy.zap.control.AddOnLoader;
import org.zaproxy.zap.extension.autoupdate.OptionsParamCheckForUpdates;
import org.zaproxy.zap.utils.I18N;
import org.zaproxy.zap.utils.ZapXmlConfiguration;
public final class Constant {
// ZAP: rebrand
public static final String PROGRAM_NAME = "OWASP ZAP";
public static final String PROGRAM_NAME_SHORT = "ZAP";
public static final String ZAP_HOMEPAGE = "http://www.owasp.org/index.php/ZAP";
public static final String ZAP_EXTENSIONS_PAGE = "https://github.com/zaproxy/zap-extensions";
public static final String ZAP_TEAM = "ZAP Dev Team";
public static final String PAROS_TEAM = "Chinotec Technologies";
// ************************************************************
// the config.xml MUST be set to be the same as the version_tag
// otherwise the config.xml will be overwritten everytime.
// ************************************************************
private static final String DEV_VERSION = "Dev Build";
public static final String ALPHA_VERSION = "alpha";
public static final String BETA_VERSION = "beta";
private static final long VERSION_TAG = 2004001;
// Old version numbers - for upgrade
private static final long V_2_3_1_TAG = 2003001;
private static final long V_2_2_0_TAG = 2002000;
private static final long V_2_1_0_TAG = 2001000;
private static final long V_2_0_0_TAG = 2000000;
private static final long V_1_4_1_TAG = 1004001;
private static final long V_1_3_1_TAG = 1003001;
private static final long V_1_3_0_TAG = 1003000;
private static final long V_1_2_1_TAG = 1002001;
private static final long V_1_2_0_TAG = 1002000;
private static final long V_1_1_0_TAG = 1001000;
private static final long V_1_0_0_TAG = 1000000;
private static final long V_PAROS_TAG = 30020013;
// ************************************************************
// note the above
// ************************************************************
// These are no longer final - version is now loaded from the manifest file
public static String PROGRAM_VERSION = DEV_VERSION;
public static String PROGRAM_TITLE = PROGRAM_NAME + " " + PROGRAM_VERSION;
public static final String SYSTEM_PAROS_USER_LOG = "zap.user.log";
// public static final String FILE_CONFIG = "xml/config.xml";
// public static final String FOLDER_PLUGIN = "plugin";
// public static final String FOLDER_FILTER = "filter";
// public static final String FOLDER_SESSION = "session";
// public static final String DBNAME_TEMPLATE = "db/parosdb";
// public static final String DBNAME_UNTITLED = FOLDER_SESSION + "/untitled";
public static final String FILE_SEPARATOR = System.getProperty("file.separator");
/**
* @deprecated (TODO add version) The path does not take into account the installation directory, use
* {@link #getPathDefaultConfigFile()} instead.
*/
@Deprecated
public static final String FILE_CONFIG_DEFAULT = "xml/config.xml";
public static final String FILE_CONFIG_NAME = "config.xml";
public static final String FOLDER_PLUGIN = "plugin";
public static final String FOLDER_FILTER = "filter";
public static final String FOLDER_SESSION_DEFAULT = "session";
public static final String DBNAME_TEMPLATE = "db" + System.getProperty("file.separator") + "zapdb";
/**
* Prefix (file name) of Messages.properties files.
*
* @see #MESSAGES_EXTENSION
*/
public static final String MESSAGES_PREFIX = "Messages";
/**
* Extension (with dot) of Messages.properties files.
*
* @see #MESSAGES_PREFIX
* @since 2.4.0
*/
public static final String MESSAGES_EXTENSION = ".properties";
public static final String DBNAME_UNTITLED_DEFAULT = FOLDER_SESSION_DEFAULT + System.getProperty("file.separator") + "untitled";
public String FILE_CONFIG = FILE_CONFIG_NAME;
public String FOLDER_SESSION = "session";
public String DBNAME_UNTITLED = FOLDER_SESSION + System.getProperty("file.separator") + "untitled";
public String ACCEPTED_LICENSE_DEFAULT = "AcceptedLicense";
public String ACCEPTED_LICENSE = ACCEPTED_LICENSE_DEFAULT;
public static final String FILE_PROGRAM_SPLASH = "resource/zap128x128.png";
// Accelerator keys - Default: Windows
public static String ACCELERATOR_UNDO = "control Z";
public static String ACCELERATOR_REDO = "control Y";
public static String ACCELERATOR_TRIGGER_KEY = "Control";
private static Constant instance = null;
public static final int MAX_HOST_CONNECTION = 15;
public static final int MAX_THREADS_PER_SCAN = 50;
// ZAP: Dont announce ourselves
//public static final String USER_AGENT = PROGRAM_NAME + "/" + PROGRAM_VERSION;
public static final String USER_AGENT = "";
private static String staticEyeCatcher = "0W45pz4p";
private static boolean staticSP = false;
private static final String USER_CONTEXTS_DIR = "contexts";
private static final String USER_POLICIES_DIR = "policies";
//
// Home dir for ZAP, ie where the config file is. Can be set on cmdline, otherwise will be set to default loc
private static String zapHome = null;
// Default home dir for 'full' releases - used for copying full conf file when dev/daily release run for the first time
// and also for the JVM options config file
private static String zapStd = null;
// Install dir for ZAP, but default will be cwd
private static String zapInstall = null;
private static Boolean onKali = null;
private static Boolean lowMemoryOption = null;
// ZAP: Added i18n
public static I18N messages = null;
/**
* The system's locale (as determined by the JVM at startup, {@code Locale#getDefault()}).
* <p>
* The locale is kept here because the default locale is later overridden with the user's chosen locale/language.
*
* @see Locale#getDefault()
*/
private static final Locale SYSTEMS_LOCALE = Locale.getDefault();
/**
* Name of directory that contains the (source and translated) resource files.
*
* @see #MESSAGES_PREFIX
* @see #VULNERABILITIES_PREFIX
*/
public static final String LANG_DIR = "lang";
/**
* Prefix (file name) of vulnerabilities.xml files.
*
* @see #VULNERABILITIES_EXTENSION
* @since 2.4.0
*/
public static final String VULNERABILITIES_PREFIX = "vulnerabilities";
/**
* @deprecated (2.4.0) Use {@link #VULNERABILITIES_PREFIX} instead. It will be removed in a following release.
*/
@Deprecated
public static String VULNS_BASE = VULNERABILITIES_PREFIX;
/**
* Extension (with dot) of vulnerabilities.xml files.
*
* @see #VULNERABILITIES_PREFIX
* @since 2.4.0
*/
public static final String VULNERABILITIES_EXTENSION = ".xml";
// ZAP: Added dirbuster dir
public String DIRBUSTER_DIR = "dirbuster";
public String DIRBUSTER_CUSTOM_DIR = DIRBUSTER_DIR;
public String FUZZER_DIR = "fuzzers";
public static String FOLDER_LOCAL_PLUGIN = FOLDER_PLUGIN;
public static final URL OK_FLAG_IMAGE_URL = Constant.class.getResource("/resource/icon/10/072.png"); // Green
public static final URL INFO_FLAG_IMAGE_URL = Constant.class.getResource("/resource/icon/10/073.png"); // Blue
public static final URL LOW_FLAG_IMAGE_URL = Constant.class.getResource("/resource/icon/10/074.png"); // Yellow
public static final URL MED_FLAG_IMAGE_URL = Constant.class.getResource("/resource/icon/10/076.png"); // Orange
public static final URL HIGH_FLAG_IMAGE_URL = Constant.class.getResource("/resource/icon/10/071.png"); // Red
public static final URL BLANK_IMAGE_URL = Constant.class.getResource("/resource/icon/10/blank.png");
public static final URL SPIDER_IMAGE_URL = Constant.class.getResource("/resource/icon/10/spider.png");
public static String getEyeCatcher() {
return staticEyeCatcher;
}
public static void setEyeCatcher(String eyeCatcher) {
staticEyeCatcher = eyeCatcher;
}
public static void setSP(boolean isSP) {
staticSP = isSP;
}
public static boolean isSP() {
return staticSP;
}
public Constant() {
initializeFilesAndDirectories();
setAcceleratorKeys();
}
public static String getDefaultHomeDirectory(boolean incDevOption) {
if (zapStd == null) {
zapStd = System.getProperty("user.home");
if (zapStd == null) {
zapStd = ".";
}
if (isLinux()) {
// Linux: Hidden Zap directory in the user's home directory
zapStd += FILE_SEPARATOR + "." + PROGRAM_NAME_SHORT;
} else if (isMacOsX()) {
// Mac Os X: Support for writing the configuration into the users Library
zapStd += FILE_SEPARATOR + "Library" + FILE_SEPARATOR
+ "Application Support" + FILE_SEPARATOR + PROGRAM_NAME_SHORT;
} else {
// Windows: Zap directory in the user's home directory
zapStd += FILE_SEPARATOR + PROGRAM_NAME;
}
}
if (incDevOption) {
if (isDevBuild() || isDailyBuild()) {
// Default to a different home dir to prevent messing up full releases
return zapStd + "_D";
}
}
return zapStd;
}
private void initializeFilesAndDirectories() {
FileCopier copier = new FileCopier();
File f = null;
Logger log = null;
// default to use application directory 'log'
System.setProperty(SYSTEM_PAROS_USER_LOG, "log");
// Set up the version from the manifest
PROGRAM_VERSION = getVersionFromManifest();
PROGRAM_TITLE = PROGRAM_NAME + " " + PROGRAM_VERSION;
if (zapHome == null) {
zapHome = getDefaultHomeDirectory(true);
}
zapHome = getAbsolutePath(zapHome);
f = new File(zapHome);
FILE_CONFIG = zapHome + FILE_CONFIG;
FOLDER_SESSION = zapHome + FOLDER_SESSION;
DBNAME_UNTITLED = zapHome + DBNAME_UNTITLED;
ACCEPTED_LICENSE = zapHome + ACCEPTED_LICENSE;
DIRBUSTER_CUSTOM_DIR = zapHome + DIRBUSTER_DIR;
FUZZER_DIR = zapHome + FUZZER_DIR;
FOLDER_LOCAL_PLUGIN = zapHome + FOLDER_LOCAL_PLUGIN;
try {
System.setProperty(SYSTEM_PAROS_USER_LOG, zapHome);
if (!f.isDirectory()) {
if (! f.mkdir() ) {
// ZAP: report failure to create directory
System.out.println("Failed to create directory " + f.getAbsolutePath());
}
}
// Setup the logging
File logFile = new File(zapHome + "/log4j.properties");
if (!logFile.exists()) {
copier.copy(new File(zapInstall, "xml/log4j.properties"),logFile);
}
System.setProperty("log4j.configuration", logFile.getAbsolutePath());
PropertyConfigurator.configure(logFile.getAbsolutePath());
log = Logger.getLogger(Constant.class);
f = new File(FILE_CONFIG);
if (!f.isFile()) {
File oldf;
if (isDevBuild() || isDailyBuild()) {
// try standard location
oldf = new File (getDefaultHomeDirectory(false) + FILE_SEPARATOR + FILE_CONFIG_NAME);
} else {
// try old location
oldf = new File (zapHome + FILE_SEPARATOR + "zap" + FILE_SEPARATOR + FILE_CONFIG_NAME);
}
if (oldf.exists()) {
log.info("Copying defaults from " + oldf.getAbsolutePath() + " to " + FILE_CONFIG);
copier.copy(oldf,f);
} else {
log.info("Copying defaults from " + getPathDefaultConfigFile() + " to " + FILE_CONFIG);
copier.copy(getPathDefaultConfigFile().toFile(),f);
}
}
f = new File(FOLDER_SESSION);
if (!f.isDirectory()) {
log.info("Creating directory " + FOLDER_SESSION);
if (! f.mkdir() ) {
// ZAP: report failure to create directory
System.out.println("Failed to create directory " + f.getAbsolutePath());
}
}
f = new File(DIRBUSTER_CUSTOM_DIR);
if (!f.isDirectory()) {
log.info("Creating directory " + DIRBUSTER_CUSTOM_DIR);
if (! f.mkdir() ) {
// ZAP: report failure to create directory
System.out.println("Failed to create directory " + f.getAbsolutePath());
}
}
f = new File(FUZZER_DIR);
if (!f.isDirectory()) {
log.info("Creating directory " + FUZZER_DIR);
if (! f.mkdir() ) {
// ZAP: report failure to create directory
System.out.println("Failed to create directory " + f.getAbsolutePath());
}
}
f = new File(FOLDER_LOCAL_PLUGIN);
if (!f.isDirectory()) {
log.info("Creating directory " + FOLDER_LOCAL_PLUGIN);
if (! f.mkdir() ) {
// ZAP: report failure to create directory
System.out.println("Failed to create directory " + f.getAbsolutePath());
}
}
} catch (Exception e) {
System.err.println("Unable to initialize home directory! " + e.getMessage());
e.printStackTrace(System.err);
System.exit(1);
}
// Upgrade actions
try {
try {
// ZAP: Changed to use ZapXmlConfiguration, to enforce the same character encoding when reading/writing configurations.
XMLConfiguration config = new ZapXmlConfiguration(FILE_CONFIG);
config.setAutoSave(false);
long ver = config.getLong("version");
if (ver == VERSION_TAG) {
// Nothing to do
} else if (isDevBuild() || isDailyBuild()) {
// Nothing to do
} else {
// Backup the old one
log.info("Backing up config file to " + FILE_CONFIG + ".bak");
f = new File(FILE_CONFIG);
try {
copier.copy(f, new File(FILE_CONFIG + ".bak"));
} catch (IOException e) {
String msg = "Failed to backup config file " +
FILE_CONFIG + " to " + FILE_CONFIG + ".bak " + e.getMessage();
System.err.println(msg);
log.error(msg, e);
}
if (ver == V_PAROS_TAG) {
upgradeFrom1_1_0(config);
upgradeFrom1_2_0(config);
}
if (ver <= V_1_0_0_TAG) {
// Nothing to do
}
if (ver <= V_1_1_0_TAG) {
upgradeFrom1_1_0(config);
}
if (ver <= V_1_2_0_TAG) {
upgradeFrom1_2_0(config);
}
if (ver <= V_1_2_1_TAG) {
// Nothing to do
}
if (ver <= V_1_3_0_TAG) {
// Nothing to do
}
if (ver <= V_1_3_1_TAG) {
// Nothing to do
}
if (ver <= V_1_4_1_TAG) {
upgradeFrom1_4_1(config);
}
if (ver <= V_2_0_0_TAG) {
upgradeFrom2_0_0(config);
}
if (ver <= V_2_1_0_TAG) {
// Nothing to do
}
if (ver <= V_2_2_0_TAG) {
upgradeFrom2_2_0(config);
}
if (ver <= V_2_3_1_TAG) {
upgradeFrom2_3_1(config);
}
log.info("Upgraded from " + ver);
// Update the version
config.setProperty("version", VERSION_TAG);
config.save();
}
} catch (ConfigurationException | ConversionException | NoSuchElementException e) {
// if there is any error in config file (eg config file not exist, corrupted),
// overwrite previous configuration file
// ZAP: changed to use the correct file
copier.copy(getPathDefaultConfigFile().toFile(), new File(FILE_CONFIG));
}
} catch (Exception e) {
System.err.println("Unable to upgrade config file " + FILE_CONFIG + " " + e.getMessage());
e.printStackTrace(System.err);
System.exit(1);
}
// ZAP: Init i18n
String lang;
Locale locale = Locale.ENGLISH;
try {
// Select the correct locale
// ZAP: Changed to use ZapXmlConfiguration, to enforce the same character encoding when reading/writing configurations.
XMLConfiguration config = new ZapXmlConfiguration(FILE_CONFIG);
config.setAutoSave(false);
lang = config.getString(OptionsParamView.LOCALE, OptionsParamView.DEFAULT_LOCALE);
if (lang.length() == 0) {
lang = OptionsParamView.DEFAULT_LOCALE;
}
String[] langArray = lang.split("_");
locale = new Locale(langArray[0], langArray[1]);
} catch (Exception e) {
System.out.println("Failed to initialise locale " + e);
}
Locale.setDefault(locale);
messages = new I18N(locale);
}
private void copyProperty(XMLConfiguration fromConfig, XMLConfiguration toConfig, String key) {
toConfig.setProperty(key, fromConfig.getProperty(key));
}
private void copyAllProperties(XMLConfiguration fromConfig, XMLConfiguration toConfig, String prefix) {
Iterator<String> iter = fromConfig.getKeys(prefix);
while (iter.hasNext()) {
String key = iter.next();
copyProperty(fromConfig, toConfig, key);
}
}
private void upgradeFrom1_1_0(XMLConfiguration config) throws ConfigurationException {
// Upgrade the regexs
// ZAP: Changed to use ZapXmlConfiguration, to enforce the same character encoding when reading/writing configurations.
XMLConfiguration newConfig = new ZapXmlConfiguration(getPathDefaultConfigFile().toFile());
newConfig.setAutoSave(false);
copyAllProperties(newConfig, config, "pscans");
}
private void upgradeFrom1_2_0(XMLConfiguration config) throws ConfigurationException {
// Upgrade the regexs
// ZAP: Changed to use ZapXmlConfiguration, to enforce the same character encoding when reading/writing configurations.
XMLConfiguration newConfig = new ZapXmlConfiguration(getPathDefaultConfigFile().toFile());
newConfig.setAutoSave(false);
copyProperty(newConfig, config, "view.editorView");
copyProperty(newConfig, config, "view.brkPanelView");
copyProperty(newConfig, config, "view.showMainToolbar");
}
private void upgradeFrom1_4_1(XMLConfiguration config) {
// As the POST_FORM option for the spider has been updated from int to boolean, keep
// compatibility for old versions
if (!config.getProperty("spider.postform").toString().equals("0")) {
config.setProperty("spider.postform", "true");
config.setProperty("spider.processform", "true");
} else {
config.setProperty("spider.postform", "false");
config.setProperty("spider.processform", "false");
}
// Move the old session tokens to the new "httpsessions" hierarchy and
// delete the old "session" hierarchy as it's no longer used/needed.
String[] tokens = config.getStringArray("session.tokens");
for (int i = 0; i < tokens.length; ++i) {
String elementBaseKey = "httpsessions.tokens.token(" + i + ").";
config.setProperty(elementBaseKey + "name", tokens[i]);
config.setProperty(elementBaseKey + "enabled", Boolean.TRUE);
}
config.clearTree("session");
// Update the anti CSRF tokens elements/hierarchy.
tokens = config.getStringArray("anticsrf.tokens");
config.clearTree("anticsrf.tokens");
for (int i = 0; i < tokens.length; ++i) {
String elementBaseKey = "anticsrf.tokens.token(" + i + ").";
config.setProperty(elementBaseKey + "name", tokens[i]);
config.setProperty(elementBaseKey + "enabled", Boolean.TRUE);
}
// Update the invoke applications elements/hierarchy.
List<Object[]> oldData = new ArrayList<>();
for (int i = 0; ; i++) {
String baseKey = "invoke.A" + i + ".";
String host = config.getString(baseKey + "name");
if (host == null || "".equals(host)) {
break;
}
Object[] data = new Object[6];
data[0] = host;
data[1] = config.getString(baseKey + "directory", "");
data[2] = config.getString(baseKey + "command");
data[3] = config.getString(baseKey + "parameters");
data[4] = Boolean.valueOf(config.getBoolean(baseKey + "output", true));
data[5] = Boolean.valueOf(config.getBoolean(baseKey + "note", false));
oldData.add(data);
}
config.clearTree("invoke.A");
for (int i = 0, size = oldData.size(); i < size; ++i) {
String elementBaseKey = "invoke.apps.app(" + i + ").";
Object[] data = oldData.get(i);
config.setProperty(elementBaseKey + "name", data[0]);
config.setProperty(elementBaseKey + "directory", data[1]);
config.setProperty(elementBaseKey + "command", data[2]);
config.setProperty(elementBaseKey + "parameters", data[3]);
config.setProperty(elementBaseKey + "output", data[4]);
config.setProperty(elementBaseKey + "note", data[5]);
config.setProperty(elementBaseKey + "enabled", Boolean.TRUE);
}
// Update the authentication elements/hierarchy.
oldData = new ArrayList<>();
for (int i = 0; ; i++) {
String baseKey = "connection.auth.A" + i + ".";
String host = config.getString(baseKey + "hostName");
if (host == null || "".equals(host)) {
break;
}
Object[] data = new Object[5];
data[0] = host;
data[1] = Integer.valueOf(config.getString(baseKey + "port", "80"));
data[2] = config.getString(baseKey + "userName");
data[3] = config.getString(baseKey + "password");
data[4] = config.getString(baseKey + "realm");
oldData.add(data);
}
config.clearTree("connection.auth.A");
for (int i = 0, size = oldData.size(); i < size; ++i) {
String elementBaseKey = "connection.auths.auth(" + i + ").";
Object[] data = oldData.get(i);
config.setProperty(elementBaseKey + "name", "Auth " + i);
config.setProperty(elementBaseKey + "hostName", data[0]);
config.setProperty(elementBaseKey + "port", data[1]);
config.setProperty(elementBaseKey + "userName", data[2]);
config.setProperty(elementBaseKey + "password", data[3]);
config.setProperty(elementBaseKey + "realm", data[4]);
config.setProperty(elementBaseKey + "enabled", Boolean.TRUE);
}
// Update the passive scan elements/hierarchy.
String[] names = config.getStringArray("pscans.names");
oldData = new ArrayList<>();
for (String pscanName : names) {
String baseKey = "pscans." + pscanName + ".";
Object[] data = new Object[8];
data[0] = pscanName;
data[1] = config.getString(baseKey + "type");
data[2] = config.getString(baseKey + "config");
data[3] = config.getString(baseKey + "reqUrlRegex");
data[4] = config.getString(baseKey + "reqHeadRegex");
data[5] = config.getString(baseKey + "resHeadRegex");
data[6] = config.getString(baseKey + "resBodyRegex");
data[7] = Boolean.valueOf(config.getBoolean(baseKey + "enabled"));
oldData.add(data);
}
config.clearTree("pscans.names");
for (String pscanName : names) {
config.clearTree("pscans." + pscanName);
}
for (int i = 0, size = oldData.size(); i < size; ++i) {
String elementBaseKey = "pscans.autoTagScanners.scanner(" + i + ").";
Object[] data = oldData.get(i);
config.setProperty(elementBaseKey + "name", data[0]);
config.setProperty(elementBaseKey + "type", data[1]);
config.setProperty(elementBaseKey + "config", data[2]);
config.setProperty(elementBaseKey + "reqUrlRegex", data[3]);
config.setProperty(elementBaseKey + "reqHeadRegex", data[4]);
config.setProperty(elementBaseKey + "resHeadRegex", data[5]);
config.setProperty(elementBaseKey + "resBodyRegex", data[6]);
config.setProperty(elementBaseKey + "enabled", data[7]);
}
}
private void upgradeFrom2_0_0(XMLConfiguration config) {
String forcedBrowseFile = config.getString("bruteforce.defaultFile", "");
if (!"".equals(forcedBrowseFile)) {
String absolutePath = "";
// Try the 'local' dir first
File f = new File(DIRBUSTER_CUSTOM_DIR + File.separator + forcedBrowseFile);
if (!f.exists()) {
f = new File(DIRBUSTER_DIR + File.separator + forcedBrowseFile);
}
if (f.exists()) {
absolutePath = f.getAbsolutePath();
}
config.setProperty("bruteforce.defaultFile", absolutePath);
}
// Remove the manual request editor configurations that were incorrectly created.
config.clearTree("nullrequest");
config.clearTree("nullresponse");
}
private void upgradeFrom2_2_0(XMLConfiguration config) {
try {
if ( ! config.getBoolean(OptionsParamCheckForUpdates.CHECK_ON_START, false)) {
/*
* Check-for-updates on start set to false - force another prompt to ask the user,
* as this option can have been unset incorrectly before.
* And we want to encourage users to use this ;)
*/
config.setProperty(OptionsParamCheckForUpdates.DAY_LAST_CHECKED, "");
}
} catch (Exception e) {
// At one stage this was an integer, which will cause an exception to be thrown
config.setProperty(OptionsParamCheckForUpdates.DAY_LAST_CHECKED, "");
}
// Clear the block list - addons were incorrectly added to this if an update failed
config.setProperty(AddOnLoader.ADDONS_BLOCK_LIST, "");
}
private void upgradeFrom2_3_1(XMLConfiguration config) {
// Remove old authentication options no longer used
config.clearProperty("connection.confirmRemoveAuth");
config.clearTree("options.auth");
}
public static void setLocale (String loc) {
String[] langArray = loc.split("_");
Locale locale = new Locale(langArray[0], langArray[1]);
Locale.setDefault(locale);
if (messages == null) {
messages = new I18N(locale);
} else {
messages.setLocale(locale);
}
}
public static Locale getLocale () {
return messages.getLocal();
}
/**
* Returns the system's {@code Locale} (as determined by the JVM at startup, {@code Locale#getDefault()}). Should be used to
* show locale dependent information in the system's locale.
* <p>
* <strong>Note:</strong> The default locale is overridden with the ZAP's user defined locale/language.
*
* @return the system's {@code Locale}
* @see Locale#getDefault()
* @see Locale#setDefault(Locale)
*/
public static Locale getSystemsLocale() {
return SYSTEMS_LOCALE;
}
public static Constant getInstance() {
if (instance==null) {
// ZAP: Changed to use the method createInstance().
createInstance();
}
return instance;
}
// ZAP: Added method.
private static synchronized void createInstance() {
if (instance == null) {
instance = new Constant();
}
}
private void setAcceleratorKeys() {
// Undo/Redo
if (Constant.isMacOsX()) {
ACCELERATOR_UNDO = "meta Z";
ACCELERATOR_REDO = "meta shift Z";
ACCELERATOR_TRIGGER_KEY = "Meta";
} else {
ACCELERATOR_UNDO = "control Z";
ACCELERATOR_REDO = "control Y";
ACCELERATOR_TRIGGER_KEY = "Control";
}
}
// Determine Windows Operating System
// ZAP: Changed to final.
private static final Pattern patternWindows = Pattern.compile("window", Pattern.CASE_INSENSITIVE);
public static boolean isWindows() {
String os_name = System.getProperty("os.name");
Matcher matcher = patternWindows.matcher(os_name);
return matcher.find();
}
// Determine Linux Operating System
// ZAP: Changed to final.
private static final Pattern patternLinux = Pattern.compile("linux", Pattern.CASE_INSENSITIVE);
public static boolean isLinux() {
String os_name = System.getProperty("os.name");
Matcher matcher = patternLinux.matcher(os_name);
return matcher.find();
}
// Determine Windows Operating System
// ZAP: Changed to final.
private static final Pattern patternMacOsX = Pattern.compile("mac", Pattern.CASE_INSENSITIVE);
public static boolean isMacOsX() {
String os_name = System.getProperty("os.name");
Matcher matcher = patternMacOsX.matcher(os_name);
return matcher.find();
}
public static void setZapHome (String dir) {
zapHome = getAbsolutePath(dir);
}
/**
* Returns the absolute path for the given {@code directory}.
* <p>
* The path is terminated with a separator.
*
* @param directory the directory whose path will be made absolute
* @return the absolute path for the given {@code directory}, terminated with a separator
* @since 2.4.0
*/
private static String getAbsolutePath(String directory) {
String realPath = Paths.get(directory).toAbsolutePath().toString();
String separator = FileSystems.getDefault().getSeparator();
if (!realPath.endsWith(separator)) {
realPath += separator;
}
return realPath;
}
public static String getZapHome () {
return zapHome;
}
/**
* Returns the path to default configuration file, located in installation directory.
*
* @return the {@code Path} to default configuration file.
* @since TODO add version
*/
public static Path getPathDefaultConfigFile() {
return Paths.get(getZapInstall(), "xml", FILE_CONFIG_NAME);
}
public static File getContextsDir () {
File f = new File(Constant.getZapHome(), USER_CONTEXTS_DIR);
if (! f.exists()) {
f.mkdirs();
}
if (f.isDirectory() && f.canWrite()) {
return f;
}
return Model.getSingleton().getOptionsParam().getUserDirectory();
}
public static File getPoliciesDir () {
File f = new File(Constant.getZapHome(), USER_POLICIES_DIR);
if (! f.exists()) {
f.mkdirs();
}
if (f.isDirectory() && f.canWrite()) {
return f;
}
return Model.getSingleton().getOptionsParam().getUserDirectory();
}
public static void setZapInstall (String dir) {
zapInstall = getAbsolutePath(dir);
}
public static String getZapInstall () {
if (zapInstall == null) {
String path = ".";
Path localDir = Paths.get(path);
if ( ! Files.isDirectory(localDir.resolve("db")) || ! Files.isDirectory(localDir.resolve("lang"))) {
path = ZAP.class.getProtectionDomain().getCodeSource().getLocation().getPath();
// Loggers wont have been set up yet
System.out.println("Defaulting ZAP install dir to " + path);
}
if (path.startsWith("/") && path.indexOf(":") > 0) {
// This is likely to be a Windows path, remove to initial slash or it will fail
path = path.substring(1);
}
zapInstall = getAbsolutePath(path);
}
return zapInstall;
}
private static String getVersionFromManifest() {
String className = Constant.class.getSimpleName() + ".class";
String classPath = Constant.class.getResource(className).toString();
if (!classPath.startsWith("jar")) {
// Class not from JAR
return DEV_VERSION;
}
String manifestPath = classPath.substring(0, classPath.lastIndexOf("!") + 1) + "/META-INF/MANIFEST.MF";
Manifest manifest;
try {
manifest = new Manifest(new URL(manifestPath).openStream());
Attributes attr = manifest.getMainAttributes();
return attr.getValue("Implementation-Version");
} catch (Exception e) {
// Ignore
return DEV_VERSION;
}
}
public static boolean isDevBuild() {
return isDevBuild(PROGRAM_VERSION);
}
public static boolean isDevBuild(String version) {
// Dev releases with be called "Dev Build" date stamped builds will be of the format D-{yyyy}-{mm}-{dd}
return DEV_VERSION.equals(version);
}
public static boolean isDailyBuild(String version) {
// Date stamped builds will be of the format D-{yyyy}-{mm}-{dd}
return version.startsWith("D-");
}
public static boolean isDailyBuild() {
return isDailyBuild(PROGRAM_VERSION);
}
public static void setLowMemoryOption(boolean lowMem) {
if (lowMemoryOption != null) {
throw new InvalidParameterException("Low memory option already set to " + lowMemoryOption);
}
lowMemoryOption = lowMem;
}
public static boolean isLowMemoryOptionSet() {
return lowMemoryOption != null && lowMemoryOption.booleanValue();
}
/**
* Returns true if running on Kali and not a daily build
*/
public static boolean isKali() {
if (onKali == null) {
onKali = false;
File osReleaseFile = new File ("/etc/os-release");
if (isLinux() && ! isDailyBuild() && osReleaseFile.exists()) {
// Ignore the fact we're on Kali if this is a daily build - they will only have been installed manually
try {
InputStream in = null;
Properties osProps = new Properties();
in = new FileInputStream(osReleaseFile);
osProps.load(in);
String osLikeValue = osProps.getProperty("ID");
if (osLikeValue != null) {
String [] oSLikes = osLikeValue.split(" ");
for (String osLike: oSLikes) {
if (osLike.toLowerCase().equals("kali")) {
onKali = true;
break;
}
}
}
in.close();
} catch (Exception e) {
// Ignore
}
}
}
return onKali;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.DrainDispatcher;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter;
import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerImpl;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceLimits;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator;
import org.apache.hadoop.yarn.util.resource.ResourceCalculator;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
public class TestReservations {
private static final Log LOG = LogFactory.getLog(TestReservations.class);
private final RecordFactory recordFactory = RecordFactoryProvider
.getRecordFactory(null);
RMContext rmContext;
RMContext spyRMContext;
CapacityScheduler cs;
// CapacitySchedulerConfiguration csConf;
CapacitySchedulerContext csContext;
private final ResourceCalculator resourceCalculator = new DefaultResourceCalculator();
CSQueue root;
Map<String, CSQueue> queues = new HashMap<String, CSQueue>();
Map<String, CSQueue> oldQueues = new HashMap<String, CSQueue>();
final static int GB = 1024;
final static String DEFAULT_RACK = "/default";
@Before
public void setUp() throws Exception {
CapacityScheduler spyCs = new CapacityScheduler();
cs = spy(spyCs);
rmContext = TestUtils.getMockRMContext();
}
private void setup(CapacitySchedulerConfiguration csConf) throws Exception {
csConf.setBoolean("yarn.scheduler.capacity.user-metrics.enable", true);
final String newRoot = "root" + System.currentTimeMillis();
// final String newRoot = "root";
setupQueueConfiguration(csConf, newRoot);
YarnConfiguration conf = new YarnConfiguration();
cs.setConf(conf);
csContext = mock(CapacitySchedulerContext.class);
when(csContext.getConfiguration()).thenReturn(csConf);
when(csContext.getConf()).thenReturn(conf);
when(csContext.getMinimumResourceCapability()).thenReturn(
Resources.createResource(GB, 1));
when(csContext.getMaximumResourceCapability()).thenReturn(
Resources.createResource(16 * GB, 12));
when(csContext.getClusterResource()).thenReturn(
Resources.createResource(100 * 16 * GB, 100 * 12));
when(csContext.getApplicationComparator()).thenReturn(
CapacityScheduler.applicationComparator);
when(csContext.getQueueComparator()).thenReturn(
CapacityScheduler.queueComparator);
when(csContext.getResourceCalculator()).thenReturn(resourceCalculator);
when(csContext.getRMContext()).thenReturn(rmContext);
RMContainerTokenSecretManager containerTokenSecretManager = new RMContainerTokenSecretManager(
conf);
containerTokenSecretManager.rollMasterKey();
when(csContext.getContainerTokenSecretManager()).thenReturn(
containerTokenSecretManager);
root = CapacityScheduler.parseQueue(csContext, csConf, null,
CapacitySchedulerConfiguration.ROOT, queues, queues, TestUtils.spyHook);
spyRMContext = spy(rmContext);
when(spyRMContext.getScheduler()).thenReturn(cs);
when(spyRMContext.getYarnConfiguration())
.thenReturn(new YarnConfiguration());
cs.setRMContext(spyRMContext);
cs.init(csConf);
cs.start();
}
private static final String A = "a";
private void setupQueueConfiguration(CapacitySchedulerConfiguration conf,
final String newRoot) {
// Define top-level queues
conf.setQueues(CapacitySchedulerConfiguration.ROOT,
new String[] { newRoot });
conf.setMaximumCapacity(CapacitySchedulerConfiguration.ROOT, 100);
conf.setAcl(CapacitySchedulerConfiguration.ROOT,
QueueACL.SUBMIT_APPLICATIONS, " ");
final String Q_newRoot = CapacitySchedulerConfiguration.ROOT + "."
+ newRoot;
conf.setQueues(Q_newRoot, new String[] { A });
conf.setCapacity(Q_newRoot, 100);
conf.setMaximumCapacity(Q_newRoot, 100);
conf.setAcl(Q_newRoot, QueueACL.SUBMIT_APPLICATIONS, " ");
final String Q_A = Q_newRoot + "." + A;
conf.setCapacity(Q_A, 100f);
conf.setMaximumCapacity(Q_A, 100);
conf.setAcl(Q_A, QueueACL.SUBMIT_APPLICATIONS, "*");
}
static LeafQueue stubLeafQueue(LeafQueue queue) {
// Mock some methods for ease in these unit tests
// 1. LeafQueue.createContainer to return dummy containers
doAnswer(new Answer<Container>() {
@Override
public Container answer(InvocationOnMock invocation) throws Throwable {
final FiCaSchedulerApp application = (FiCaSchedulerApp) (invocation
.getArguments()[0]);
final ContainerId containerId = TestUtils
.getMockContainerId(application);
Container container = TestUtils.getMockContainer(containerId,
((FiCaSchedulerNode) (invocation.getArguments()[1])).getNodeID(),
(Resource) (invocation.getArguments()[2]),
((Priority) invocation.getArguments()[3]));
return container;
}
}).when(queue).createContainer(any(FiCaSchedulerApp.class),
any(FiCaSchedulerNode.class), any(Resource.class), any(Priority.class));
// 2. Stub out LeafQueue.parent.completedContainer
CSQueue parent = queue.getParent();
doNothing().when(parent).completedContainer(any(Resource.class),
any(FiCaSchedulerApp.class), any(FiCaSchedulerNode.class),
any(RMContainer.class), any(ContainerStatus.class),
any(RMContainerEventType.class), any(CSQueue.class), anyBoolean());
return queue;
}
@Test
public void testReservation() throws Exception {
// Test that we now unreserve and use a node that has space
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration();
setup(csConf);
// Manipulate queue 'a'
LeafQueue a = stubLeafQueue((LeafQueue) queues.get(A));
// Users
final String user_0 = "user_0";
// Submit applications
final ApplicationAttemptId appAttemptId_0 = TestUtils
.getMockApplicationAttemptId(0, 0);
FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
rmContext.getRMApps().put(app_0.getApplicationId(), mock(RMApp.class));
a.submitApplicationAttempt(app_0, user_0);
final ApplicationAttemptId appAttemptId_1 = TestUtils
.getMockApplicationAttemptId(1, 0);
FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
a.submitApplicationAttempt(app_1, user_0);
// Setup some nodes
String host_0 = "host_0";
FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0,
8 * GB);
String host_1 = "host_1";
FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0,
8 * GB);
String host_2 = "host_2";
FiCaSchedulerNode node_2 = TestUtils.getMockNode(host_2, DEFAULT_RACK, 0,
8 * GB);
when(csContext.getNode(node_0.getNodeID())).thenReturn(node_0);
when(csContext.getNode(node_1.getNodeID())).thenReturn(node_1);
when(csContext.getNode(node_2.getNodeID())).thenReturn(node_2);
final int numNodes = 3;
Resource clusterResource = Resources.createResource(numNodes * (8 * GB));
when(csContext.getNumClusterNodes()).thenReturn(numNodes);
// Setup resource-requests
Priority priorityAM = TestUtils.createMockPriority(1);
Priority priorityMap = TestUtils.createMockPriority(5);
Priority priorityReduce = TestUtils.createMockPriority(10);
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 2 * GB, 1, true,
priorityAM, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 5 * GB, 2, true,
priorityReduce, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 3 * GB, 2, true,
priorityMap, recordFactory)));
// Start testing...
// Only AM
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(2 * GB, a.getUsedResources().getMemory());
assertEquals(2 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(2 * GB, a.getMetrics().getAllocatedMB());
assertEquals(22 * GB, a.getMetrics().getAvailableMB());
assertEquals(2 * GB, node_0.getUsedResource().getMemory());
assertEquals(0 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
// Only 1 map - simulating reduce
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(5 * GB, a.getUsedResources().getMemory());
assertEquals(5 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(5 * GB, a.getMetrics().getAllocatedMB());
assertEquals(19 * GB, a.getMetrics().getAvailableMB());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(0 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
// Only 1 map to other node - simulating reduce
a.assignContainers(clusterResource, node_1,
new ResourceLimits(clusterResource));
assertEquals(8 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(16 * GB, a.getMetrics().getAvailableMB());
assertEquals(16 * GB, app_0.getHeadroom().getMemory());
assertEquals(null, node_0.getReservedContainer());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
assertEquals(2, app_0.getTotalRequiredResources(priorityReduce));
// try to assign reducer (5G on node 0 and should reserve)
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(13 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(5 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(11 * GB, a.getMetrics().getAvailableMB());
assertEquals(11 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getReservedContainer().getReservedResource()
.getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
assertEquals(2, app_0.getTotalRequiredResources(priorityReduce));
// assign reducer to node 2
a.assignContainers(clusterResource, node_2,
new ResourceLimits(clusterResource));
assertEquals(18 * GB, a.getUsedResources().getMemory());
assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(5 * GB, a.getMetrics().getReservedMB());
assertEquals(13 * GB, a.getMetrics().getAllocatedMB());
assertEquals(6 * GB, a.getMetrics().getAvailableMB());
assertEquals(6 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getReservedContainer().getReservedResource()
.getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(5 * GB, node_2.getUsedResource().getMemory());
assertEquals(1, app_0.getTotalRequiredResources(priorityReduce));
// node_1 heartbeat and unreserves from node_0 in order to allocate
// on node_1
a.assignContainers(clusterResource, node_1,
new ResourceLimits(clusterResource));
assertEquals(18 * GB, a.getUsedResources().getMemory());
assertEquals(18 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(18 * GB, a.getMetrics().getAllocatedMB());
assertEquals(6 * GB, a.getMetrics().getAvailableMB());
assertEquals(6 * GB, app_0.getHeadroom().getMemory());
assertEquals(null, node_0.getReservedContainer());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(8 * GB, node_1.getUsedResource().getMemory());
assertEquals(5 * GB, node_2.getUsedResource().getMemory());
assertEquals(0, app_0.getTotalRequiredResources(priorityReduce));
}
@Test
public void testReservationNoContinueLook() throws Exception {
// Test that with reservations-continue-look-all-nodes feature off
// we don't unreserve and show we could get stuck
queues = new HashMap<String, CSQueue>();
// test that the deadlock occurs when turned off
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration();
csConf.setBoolean(
"yarn.scheduler.capacity.reservations-continue-look-all-nodes", false);
setup(csConf);
// Manipulate queue 'a'
LeafQueue a = stubLeafQueue((LeafQueue) queues.get(A));
// Users
final String user_0 = "user_0";
// Submit applications
final ApplicationAttemptId appAttemptId_0 = TestUtils
.getMockApplicationAttemptId(0, 0);
FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
rmContext.getRMApps().put(app_0.getApplicationId(), mock(RMApp.class));
a.submitApplicationAttempt(app_0, user_0);
final ApplicationAttemptId appAttemptId_1 = TestUtils
.getMockApplicationAttemptId(1, 0);
FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
a.submitApplicationAttempt(app_1, user_0);
// Setup some nodes
String host_0 = "host_0";
FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0,
8 * GB);
String host_1 = "host_1";
FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0,
8 * GB);
String host_2 = "host_2";
FiCaSchedulerNode node_2 = TestUtils.getMockNode(host_2, DEFAULT_RACK, 0,
8 * GB);
when(csContext.getNode(node_0.getNodeID())).thenReturn(node_0);
when(csContext.getNode(node_1.getNodeID())).thenReturn(node_1);
when(csContext.getNode(node_2.getNodeID())).thenReturn(node_2);
final int numNodes = 3;
Resource clusterResource = Resources.createResource(numNodes * (8 * GB));
when(csContext.getNumClusterNodes()).thenReturn(numNodes);
// Setup resource-requests
Priority priorityAM = TestUtils.createMockPriority(1);
Priority priorityMap = TestUtils.createMockPriority(5);
Priority priorityReduce = TestUtils.createMockPriority(10);
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 2 * GB, 1, true,
priorityAM, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 5 * GB, 2, true,
priorityReduce, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 3 * GB, 2, true,
priorityMap, recordFactory)));
// Start testing...
// Only AM
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(2 * GB, a.getUsedResources().getMemory());
assertEquals(2 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(2 * GB, a.getMetrics().getAllocatedMB());
assertEquals(22 * GB, a.getMetrics().getAvailableMB());
assertEquals(2 * GB, node_0.getUsedResource().getMemory());
assertEquals(0 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
// Only 1 map - simulating reduce
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(5 * GB, a.getUsedResources().getMemory());
assertEquals(5 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(5 * GB, a.getMetrics().getAllocatedMB());
assertEquals(19 * GB, a.getMetrics().getAvailableMB());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(0 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
// Only 1 map to other node - simulating reduce
a.assignContainers(clusterResource, node_1,
new ResourceLimits(clusterResource));
assertEquals(8 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(16 * GB, a.getMetrics().getAvailableMB());
assertEquals(16 * GB, app_0.getHeadroom().getMemory());
assertEquals(null, node_0.getReservedContainer());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
assertEquals(2, app_0.getTotalRequiredResources(priorityReduce));
// try to assign reducer (5G on node 0 and should reserve)
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(13 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(5 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(11 * GB, a.getMetrics().getAvailableMB());
assertEquals(11 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getReservedContainer().getReservedResource()
.getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
assertEquals(2, app_0.getTotalRequiredResources(priorityReduce));
// assign reducer to node 2
a.assignContainers(clusterResource, node_2,
new ResourceLimits(clusterResource));
assertEquals(18 * GB, a.getUsedResources().getMemory());
assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(5 * GB, a.getMetrics().getReservedMB());
assertEquals(13 * GB, a.getMetrics().getAllocatedMB());
assertEquals(6 * GB, a.getMetrics().getAvailableMB());
assertEquals(6 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getReservedContainer().getReservedResource()
.getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(5 * GB, node_2.getUsedResource().getMemory());
assertEquals(1, app_0.getTotalRequiredResources(priorityReduce));
// node_1 heartbeat and won't unreserve from node_0, potentially stuck
// if AM doesn't handle
a.assignContainers(clusterResource, node_1,
new ResourceLimits(clusterResource));
assertEquals(18 * GB, a.getUsedResources().getMemory());
assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(5 * GB, a.getMetrics().getReservedMB());
assertEquals(13 * GB, a.getMetrics().getAllocatedMB());
assertEquals(6 * GB, a.getMetrics().getAvailableMB());
assertEquals(6 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getReservedContainer().getReservedResource()
.getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(5 * GB, node_2.getUsedResource().getMemory());
assertEquals(1, app_0.getTotalRequiredResources(priorityReduce));
}
@Test
public void testAssignContainersNeedToUnreserve() throws Exception {
// Test that we now unreserve and use a node that has space
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration();
setup(csConf);
// Manipulate queue 'a'
LeafQueue a = stubLeafQueue((LeafQueue) queues.get(A));
// Users
final String user_0 = "user_0";
// Submit applications
final ApplicationAttemptId appAttemptId_0 = TestUtils
.getMockApplicationAttemptId(0, 0);
FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
rmContext.getRMApps().put(app_0.getApplicationId(), mock(RMApp.class));
a.submitApplicationAttempt(app_0, user_0);
final ApplicationAttemptId appAttemptId_1 = TestUtils
.getMockApplicationAttemptId(1, 0);
FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
a.submitApplicationAttempt(app_1, user_0);
// Setup some nodes
String host_0 = "host_0";
FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0,
8 * GB);
String host_1 = "host_1";
FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0,
8 * GB);
when(csContext.getNode(node_0.getNodeID())).thenReturn(node_0);
when(csContext.getNode(node_1.getNodeID())).thenReturn(node_1);
final int numNodes = 2;
Resource clusterResource = Resources.createResource(numNodes * (8 * GB));
when(csContext.getNumClusterNodes()).thenReturn(numNodes);
// Setup resource-requests
Priority priorityAM = TestUtils.createMockPriority(1);
Priority priorityMap = TestUtils.createMockPriority(5);
Priority priorityReduce = TestUtils.createMockPriority(10);
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 2 * GB, 1, true,
priorityAM, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 5 * GB, 2, true,
priorityReduce, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 3 * GB, 2, true,
priorityMap, recordFactory)));
// Start testing...
// Only AM
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(2 * GB, a.getUsedResources().getMemory());
assertEquals(2 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(2 * GB, a.getMetrics().getAllocatedMB());
assertEquals(14 * GB, a.getMetrics().getAvailableMB());
assertEquals(2 * GB, node_0.getUsedResource().getMemory());
assertEquals(0 * GB, node_1.getUsedResource().getMemory());
// Only 1 map - simulating reduce
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(5 * GB, a.getUsedResources().getMemory());
assertEquals(5 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(5 * GB, a.getMetrics().getAllocatedMB());
assertEquals(11 * GB, a.getMetrics().getAvailableMB());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(0 * GB, node_1.getUsedResource().getMemory());
// Only 1 map to other node - simulating reduce
a.assignContainers(clusterResource, node_1,
new ResourceLimits(clusterResource));
assertEquals(8 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(8 * GB, a.getMetrics().getAvailableMB());
assertEquals(8 * GB, app_0.getHeadroom().getMemory());
assertEquals(null, node_0.getReservedContainer());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(2, app_0.getTotalRequiredResources(priorityReduce));
// try to assign reducer (5G on node 0 and should reserve)
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(13 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(5 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(3 * GB, a.getMetrics().getAvailableMB());
assertEquals(3 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getReservedContainer().getReservedResource()
.getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(2, app_0.getTotalRequiredResources(priorityReduce));
// could allocate but told need to unreserve first
a.assignContainers(clusterResource, node_1,
new ResourceLimits(clusterResource));
assertEquals(13 * GB, a.getUsedResources().getMemory());
assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(13 * GB, a.getMetrics().getAllocatedMB());
assertEquals(3 * GB, a.getMetrics().getAvailableMB());
assertEquals(3 * GB, app_0.getHeadroom().getMemory());
assertEquals(null, node_0.getReservedContainer());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(8 * GB, node_1.getUsedResource().getMemory());
assertEquals(1, app_0.getTotalRequiredResources(priorityReduce));
}
@Test
public void testGetAppToUnreserve() throws Exception {
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration();
setup(csConf);
final String user_0 = "user_0";
final ApplicationAttemptId appAttemptId_0 = TestUtils
.getMockApplicationAttemptId(0, 0);
LeafQueue a = stubLeafQueue((LeafQueue) queues.get(A));
FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
String host_0 = "host_0";
FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0,
8 * GB);
String host_1 = "host_1";
FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0,
8 * GB);
Resource clusterResource = Resources.createResource(2 * 8 * GB);
// Setup resource-requests
Priority priorityMap = TestUtils.createMockPriority(5);
Resource capability = Resources.createResource(2*GB, 0);
RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class);
SystemMetricsPublisher publisher = mock(SystemMetricsPublisher.class);
RMContext rmContext = mock(RMContext.class);
ContainerAllocationExpirer expirer =
mock(ContainerAllocationExpirer.class);
DrainDispatcher drainDispatcher = new DrainDispatcher();
when(rmContext.getContainerAllocationExpirer()).thenReturn(expirer);
when(rmContext.getDispatcher()).thenReturn(drainDispatcher);
when(rmContext.getRMApplicationHistoryWriter()).thenReturn(writer);
when(rmContext.getSystemMetricsPublisher()).thenReturn(publisher);
when(rmContext.getYarnConfiguration()).thenReturn(new YarnConfiguration());
ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(
app_0.getApplicationId(), 1);
ContainerId containerId = BuilderUtils.newContainerId(appAttemptId, 1);
Container container = TestUtils.getMockContainer(containerId,
node_1.getNodeID(), Resources.createResource(2*GB), priorityMap);
RMContainer rmContainer = new RMContainerImpl(container, appAttemptId,
node_1.getNodeID(), "user", rmContext);
Container container_1 = TestUtils.getMockContainer(containerId,
node_0.getNodeID(), Resources.createResource(1*GB), priorityMap);
RMContainer rmContainer_1 = new RMContainerImpl(container_1, appAttemptId,
node_0.getNodeID(), "user", rmContext);
// no reserved containers
NodeId unreserveId =
app_0.getNodeIdToUnreserve(priorityMap, capability,
cs.getResourceCalculator(), clusterResource);
assertEquals(null, unreserveId);
// no reserved containers - reserve then unreserve
app_0.reserve(node_0, priorityMap, rmContainer_1, container_1);
app_0.unreserve(node_0, priorityMap);
unreserveId = app_0.getNodeIdToUnreserve(priorityMap, capability,
cs.getResourceCalculator(), clusterResource);
assertEquals(null, unreserveId);
// no container large enough is reserved
app_0.reserve(node_0, priorityMap, rmContainer_1, container_1);
unreserveId = app_0.getNodeIdToUnreserve(priorityMap, capability,
cs.getResourceCalculator(), clusterResource);
assertEquals(null, unreserveId);
// reserve one that is now large enough
app_0.reserve(node_1, priorityMap, rmContainer, container);
unreserveId = app_0.getNodeIdToUnreserve(priorityMap, capability,
cs.getResourceCalculator(), clusterResource);
assertEquals(node_1.getNodeID(), unreserveId);
}
@Test
public void testFindNodeToUnreserve() throws Exception {
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration();
setup(csConf);
final String user_0 = "user_0";
final ApplicationAttemptId appAttemptId_0 = TestUtils
.getMockApplicationAttemptId(0, 0);
LeafQueue a = stubLeafQueue((LeafQueue) queues.get(A));
FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
String host_1 = "host_1";
FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0,
8 * GB);
// Setup resource-requests
Priority priorityMap = TestUtils.createMockPriority(5);
Resource capability = Resources.createResource(2 * GB, 0);
RMApplicationHistoryWriter writer = mock(RMApplicationHistoryWriter.class);
SystemMetricsPublisher publisher = mock(SystemMetricsPublisher.class);
RMContext rmContext = mock(RMContext.class);
ContainerAllocationExpirer expirer =
mock(ContainerAllocationExpirer.class);
DrainDispatcher drainDispatcher = new DrainDispatcher();
when(rmContext.getContainerAllocationExpirer()).thenReturn(expirer);
when(rmContext.getDispatcher()).thenReturn(drainDispatcher);
when(rmContext.getRMApplicationHistoryWriter()).thenReturn(writer);
when(rmContext.getSystemMetricsPublisher()).thenReturn(publisher);
when(rmContext.getYarnConfiguration()).thenReturn(new YarnConfiguration());
ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(
app_0.getApplicationId(), 1);
ContainerId containerId = BuilderUtils.newContainerId(appAttemptId, 1);
Container container = TestUtils.getMockContainer(containerId,
node_1.getNodeID(), Resources.createResource(2*GB), priorityMap);
RMContainer rmContainer = new RMContainerImpl(container, appAttemptId,
node_1.getNodeID(), "user", rmContext);
// nothing reserved
boolean res = a.findNodeToUnreserve(csContext.getClusterResource(),
node_1, app_0, priorityMap, capability);
assertFalse(res);
// reserved but scheduler doesn't know about that node.
app_0.reserve(node_1, priorityMap, rmContainer, container);
node_1.reserveResource(app_0, priorityMap, rmContainer);
res = a.findNodeToUnreserve(csContext.getClusterResource(), node_1, app_0,
priorityMap, capability);
assertFalse(res);
}
@Test
public void testAssignToQueue() throws Exception {
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration();
setup(csConf);
// Manipulate queue 'a'
LeafQueue a = stubLeafQueue((LeafQueue) queues.get(A));
// Users
final String user_0 = "user_0";
// Submit applications
final ApplicationAttemptId appAttemptId_0 = TestUtils
.getMockApplicationAttemptId(0, 0);
FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
rmContext.getRMApps().put(app_0.getApplicationId(), mock(RMApp.class));
a.submitApplicationAttempt(app_0, user_0);
final ApplicationAttemptId appAttemptId_1 = TestUtils
.getMockApplicationAttemptId(1, 0);
FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
a.submitApplicationAttempt(app_1, user_0);
// Setup some nodes
String host_0 = "host_0";
FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0,
8 * GB);
String host_1 = "host_1";
FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0,
8 * GB);
String host_2 = "host_2";
FiCaSchedulerNode node_2 = TestUtils.getMockNode(host_2, DEFAULT_RACK, 0,
8 * GB);
when(csContext.getNode(node_0.getNodeID())).thenReturn(node_0);
when(csContext.getNode(node_1.getNodeID())).thenReturn(node_1);
when(csContext.getNode(node_2.getNodeID())).thenReturn(node_2);
final int numNodes = 2;
Resource clusterResource = Resources.createResource(numNodes * (8 * GB));
when(csContext.getNumClusterNodes()).thenReturn(numNodes);
// Setup resource-requests
Priority priorityAM = TestUtils.createMockPriority(1);
Priority priorityMap = TestUtils.createMockPriority(5);
Priority priorityReduce = TestUtils.createMockPriority(10);
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 2 * GB, 1, true,
priorityAM, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 5 * GB, 2, true,
priorityReduce, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 3 * GB, 2, true,
priorityMap, recordFactory)));
// Start testing...
// Only AM
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(2 * GB, a.getUsedResources().getMemory());
assertEquals(2 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(2 * GB, a.getMetrics().getAllocatedMB());
assertEquals(14 * GB, a.getMetrics().getAvailableMB());
assertEquals(2 * GB, node_0.getUsedResource().getMemory());
assertEquals(0 * GB, node_1.getUsedResource().getMemory());
// Only 1 map - simulating reduce
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(5 * GB, a.getUsedResources().getMemory());
assertEquals(5 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(5 * GB, a.getMetrics().getAllocatedMB());
assertEquals(11 * GB, a.getMetrics().getAvailableMB());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(0 * GB, node_1.getUsedResource().getMemory());
// Only 1 map to other node - simulating reduce
a.assignContainers(clusterResource, node_1,
new ResourceLimits(clusterResource));
assertEquals(8 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(8 * GB, a.getMetrics().getAvailableMB());
assertEquals(null, node_0.getReservedContainer());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
// allocate to queue so that the potential new capacity is greater then
// absoluteMaxCapacity
Resource capability = Resources.createResource(32 * GB, 0);
ResourceLimits limits = new ResourceLimits(clusterResource);
boolean res =
a.canAssignToThisQueue(clusterResource,
CommonNodeLabelsManager.EMPTY_STRING_SET, limits, capability, Resources.none());
assertFalse(res);
assertEquals(limits.getAmountNeededUnreserve(), Resources.none());
// now add in reservations and make sure it continues if config set
// allocate to queue so that the potential new capacity is greater then
// absoluteMaxCapacity
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(13 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(5 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(3 * GB, a.getMetrics().getAvailableMB());
assertEquals(3 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
capability = Resources.createResource(5 * GB, 0);
limits = new ResourceLimits(clusterResource);
res =
a.canAssignToThisQueue(clusterResource,
CommonNodeLabelsManager.EMPTY_STRING_SET, limits, capability, Resources
.createResource(5 * GB));
assertTrue(res);
// 16GB total, 13GB consumed (8 allocated, 5 reserved). asking for 5GB so we would have to
// unreserve 2GB to get the total 5GB needed.
// also note vcore checks not enabled
assertEquals(Resources.createResource(2 * GB, 3), limits.getAmountNeededUnreserve());
// tell to not check reservations
limits = new ResourceLimits(clusterResource);
res =
a.canAssignToThisQueue(clusterResource,
CommonNodeLabelsManager.EMPTY_STRING_SET, limits, capability, Resources.none());
assertFalse(res);
assertEquals(Resources.none(), limits.getAmountNeededUnreserve());
refreshQueuesTurnOffReservationsContLook(a, csConf);
// should return false since reservations continue look is off.
limits = new ResourceLimits(clusterResource);
res =
a.canAssignToThisQueue(clusterResource,
CommonNodeLabelsManager.EMPTY_STRING_SET, limits, capability, Resources.none());
assertFalse(res);
assertEquals(limits.getAmountNeededUnreserve(), Resources.none());
limits = new ResourceLimits(clusterResource);
res =
a.canAssignToThisQueue(clusterResource,
CommonNodeLabelsManager.EMPTY_STRING_SET, limits, capability, Resources
.createResource(5 * GB));
assertFalse(res);
assertEquals(Resources.none(), limits.getAmountNeededUnreserve());
}
public void refreshQueuesTurnOffReservationsContLook(LeafQueue a,
CapacitySchedulerConfiguration csConf) throws Exception {
// before reinitialization
assertEquals(true, a.getReservationContinueLooking());
assertEquals(true,
((ParentQueue) a.getParent()).getReservationContinueLooking());
csConf.setBoolean(
CapacitySchedulerConfiguration.RESERVE_CONT_LOOK_ALL_NODES, false);
Map<String, CSQueue> newQueues = new HashMap<String, CSQueue>();
CSQueue newRoot = CapacityScheduler.parseQueue(csContext, csConf, null,
CapacitySchedulerConfiguration.ROOT, newQueues, queues,
TestUtils.spyHook);
queues = newQueues;
root.reinitialize(newRoot, cs.getClusterResource());
// after reinitialization
assertEquals(false, a.getReservationContinueLooking());
assertEquals(false,
((ParentQueue) a.getParent()).getReservationContinueLooking());
}
@Test
public void testContinueLookingReservationsAfterQueueRefresh()
throws Exception {
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration();
setup(csConf);
// Manipulate queue 'e'
LeafQueue a = stubLeafQueue((LeafQueue) queues.get(A));
refreshQueuesTurnOffReservationsContLook(a, csConf);
}
@Test
public void testAssignToUser() throws Exception {
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration();
setup(csConf);
// Manipulate queue 'a'
LeafQueue a = stubLeafQueue((LeafQueue) queues.get(A));
// Users
final String user_0 = "user_0";
// Submit applications
final ApplicationAttemptId appAttemptId_0 = TestUtils
.getMockApplicationAttemptId(0, 0);
FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
rmContext.getRMApps().put(app_0.getApplicationId(), mock(RMApp.class));
a.submitApplicationAttempt(app_0, user_0);
final ApplicationAttemptId appAttemptId_1 = TestUtils
.getMockApplicationAttemptId(1, 0);
FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
a.submitApplicationAttempt(app_1, user_0);
// Setup some nodes
String host_0 = "host_0";
FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0,
8 * GB);
String host_1 = "host_1";
FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0,
8 * GB);
String host_2 = "host_2";
FiCaSchedulerNode node_2 = TestUtils.getMockNode(host_2, DEFAULT_RACK, 0,
8 * GB);
when(csContext.getNode(node_0.getNodeID())).thenReturn(node_0);
when(csContext.getNode(node_1.getNodeID())).thenReturn(node_1);
when(csContext.getNode(node_2.getNodeID())).thenReturn(node_2);
final int numNodes = 2;
Resource clusterResource = Resources.createResource(numNodes * (8 * GB));
when(csContext.getNumClusterNodes()).thenReturn(numNodes);
// Setup resource-requests
Priority priorityAM = TestUtils.createMockPriority(1);
Priority priorityMap = TestUtils.createMockPriority(5);
Priority priorityReduce = TestUtils.createMockPriority(10);
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 2 * GB, 1, true,
priorityAM, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 3 * GB, 2, true,
priorityMap, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 5 * GB, 2, true,
priorityReduce, recordFactory)));
// Start testing...
// Only AM
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(2 * GB, a.getUsedResources().getMemory());
assertEquals(2 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(2 * GB, a.getMetrics().getAllocatedMB());
assertEquals(14 * GB, a.getMetrics().getAvailableMB());
assertEquals(2 * GB, node_0.getUsedResource().getMemory());
assertEquals(0 * GB, node_1.getUsedResource().getMemory());
// Only 1 map - simulating reduce
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(5 * GB, a.getUsedResources().getMemory());
assertEquals(5 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(5 * GB, a.getMetrics().getAllocatedMB());
assertEquals(11 * GB, a.getMetrics().getAvailableMB());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(0 * GB, node_1.getUsedResource().getMemory());
// Only 1 map to other node - simulating reduce
a.assignContainers(clusterResource, node_1,
new ResourceLimits(clusterResource));
assertEquals(8 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(8 * GB, a.getMetrics().getAvailableMB());
assertEquals(null, node_0.getReservedContainer());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
// now add in reservations and make sure it continues if config set
// allocate to queue so that the potential new capacity is greater then
// absoluteMaxCapacity
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(13 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(5 * GB, app_0.getCurrentReservation().getMemory());
assertEquals(5 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(3 * GB, a.getMetrics().getAvailableMB());
assertEquals(3 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
// not over the limit
Resource limit = Resources.createResource(14 * GB, 0);
ResourceLimits userResourceLimits = new ResourceLimits(clusterResource);
boolean res = a.assignToUser(clusterResource, user_0, limit, app_0, null, userResourceLimits);
assertTrue(res);
assertEquals(Resources.none(), userResourceLimits.getAmountNeededUnreserve());
// set limit so it subtracts reservations and it can continue
limit = Resources.createResource(12 * GB, 0);
userResourceLimits = new ResourceLimits(clusterResource);
res = a.assignToUser(clusterResource, user_0, limit, app_0, null, userResourceLimits);
assertTrue(res);
// limit set to 12GB, we are using 13GB (8 allocated, 5 reserved), to get under limit
// we need to unreserve 1GB
// also note vcore checks not enabled
assertEquals(Resources.createResource(1 * GB, 4),
userResourceLimits.getAmountNeededUnreserve());
refreshQueuesTurnOffReservationsContLook(a, csConf);
userResourceLimits = new ResourceLimits(clusterResource);
// should now return false since feature off
res = a.assignToUser(clusterResource, user_0, limit, app_0, null, userResourceLimits);
assertFalse(res);
assertEquals(Resources.none(), userResourceLimits.getAmountNeededUnreserve());
}
@Test
public void testReservationsNoneAvailable() throws Exception {
// Test that we now unreserve and use a node that has space
CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration();
setup(csConf);
// Manipulate queue 'a'
LeafQueue a = stubLeafQueue((LeafQueue) queues.get(A));
// Users
final String user_0 = "user_0";
// Submit applications
final ApplicationAttemptId appAttemptId_0 = TestUtils
.getMockApplicationAttemptId(0, 0);
FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
rmContext.getRMApps().put(app_0.getApplicationId(), mock(RMApp.class));
a.submitApplicationAttempt(app_0, user_0);
final ApplicationAttemptId appAttemptId_1 = TestUtils
.getMockApplicationAttemptId(1, 0);
FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a,
mock(ActiveUsersManager.class), spyRMContext);
a.submitApplicationAttempt(app_1, user_0);
// Setup some nodes
String host_0 = "host_0";
FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0,
8 * GB);
String host_1 = "host_1";
FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0,
8 * GB);
String host_2 = "host_2";
FiCaSchedulerNode node_2 = TestUtils.getMockNode(host_2, DEFAULT_RACK, 0,
8 * GB);
when(csContext.getNode(node_0.getNodeID())).thenReturn(node_0);
when(csContext.getNode(node_1.getNodeID())).thenReturn(node_1);
when(csContext.getNode(node_2.getNodeID())).thenReturn(node_2);
final int numNodes = 3;
Resource clusterResource = Resources.createResource(numNodes * (8 * GB));
when(csContext.getNumClusterNodes()).thenReturn(numNodes);
// Setup resource-requests
Priority priorityAM = TestUtils.createMockPriority(1);
Priority priorityMap = TestUtils.createMockPriority(5);
Priority priorityReduce = TestUtils.createMockPriority(10);
Priority priorityLast = TestUtils.createMockPriority(12);
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 2 * GB, 1, true,
priorityAM, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 3 * GB, 2, true,
priorityMap, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 5 * GB, 1, true,
priorityReduce, recordFactory)));
app_0.updateResourceRequests(Collections.singletonList(TestUtils
.createResourceRequest(ResourceRequest.ANY, 8 * GB, 2, true,
priorityLast, recordFactory)));
// Start testing...
// Only AM
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(2 * GB, a.getUsedResources().getMemory());
assertEquals(2 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(2 * GB, a.getMetrics().getAllocatedMB());
assertEquals(22 * GB, a.getMetrics().getAvailableMB());
assertEquals(2 * GB, node_0.getUsedResource().getMemory());
assertEquals(0 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
// Only 1 map - simulating reduce
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(5 * GB, a.getUsedResources().getMemory());
assertEquals(5 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(5 * GB, a.getMetrics().getAllocatedMB());
assertEquals(19 * GB, a.getMetrics().getAvailableMB());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(0 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
// Only 1 map to other node - simulating reduce
a.assignContainers(clusterResource, node_1,
new ResourceLimits(clusterResource));
assertEquals(8 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(16 * GB, a.getMetrics().getAvailableMB());
assertEquals(16 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
// try to assign reducer (5G on node 0), but tell it's resource limits <
// used (8G) + required (5G). It will not reserved since it has to unreserve
// some resource. Even with continous reservation looking, we don't allow
// unreserve resource to reserve container.
a.assignContainers(clusterResource, node_0,
new ResourceLimits(Resources.createResource(10 * GB)));
assertEquals(8 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(16 * GB, a.getMetrics().getAvailableMB());
// app_0's headroom = limit (10G) - used (8G) = 2G
assertEquals(2 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
// try to assign reducer (5G on node 0), but tell it's resource limits <
// used (8G) + required (5G). It will not reserved since it has to unreserve
// some resource. Unfortunately, there's nothing to unreserve.
a.assignContainers(clusterResource, node_2,
new ResourceLimits(Resources.createResource(10 * GB)));
assertEquals(8 * GB, a.getUsedResources().getMemory());
assertEquals(8 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(8 * GB, a.getMetrics().getAllocatedMB());
assertEquals(16 * GB, a.getMetrics().getAvailableMB());
// app_0's headroom = limit (10G) - used (8G) = 2G
assertEquals(2 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(0 * GB, node_2.getUsedResource().getMemory());
// let it assign 5G to node_2
a.assignContainers(clusterResource, node_2,
new ResourceLimits(clusterResource));
assertEquals(13 * GB, a.getUsedResources().getMemory());
assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(0 * GB, a.getMetrics().getReservedMB());
assertEquals(13 * GB, a.getMetrics().getAllocatedMB());
assertEquals(11 * GB, a.getMetrics().getAvailableMB());
assertEquals(11 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(5 * GB, node_2.getUsedResource().getMemory());
// reserve 8G node_0
a.assignContainers(clusterResource, node_0,
new ResourceLimits(clusterResource));
assertEquals(21 * GB, a.getUsedResources().getMemory());
assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(8 * GB, a.getMetrics().getReservedMB());
assertEquals(13 * GB, a.getMetrics().getAllocatedMB());
assertEquals(3 * GB, a.getMetrics().getAvailableMB());
assertEquals(3 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(5 * GB, node_2.getUsedResource().getMemory());
// try to assign (8G on node 2). No room to allocate,
// continued to try due to having reservation above,
// but hits queue limits so can't reserve anymore.
a.assignContainers(clusterResource, node_2,
new ResourceLimits(clusterResource));
assertEquals(21 * GB, a.getUsedResources().getMemory());
assertEquals(13 * GB, app_0.getCurrentConsumption().getMemory());
assertEquals(8 * GB, a.getMetrics().getReservedMB());
assertEquals(13 * GB, a.getMetrics().getAllocatedMB());
assertEquals(3 * GB, a.getMetrics().getAvailableMB());
assertEquals(3 * GB, app_0.getHeadroom().getMemory());
assertEquals(5 * GB, node_0.getUsedResource().getMemory());
assertEquals(3 * GB, node_1.getUsedResource().getMemory());
assertEquals(5 * GB, node_2.getUsedResource().getMemory());
}
}
| |
package com.miscitems.MiscItemsAndBlocks.Gui.Electric;
import MiscUtils.Network.PacketHandler;
import com.miscitems.MiscItemsAndBlocks.Container.Electric.ContainerLensBench;
import com.miscitems.MiscItemsAndBlocks.GuiObjects.GuiLensBenchButton;
import com.miscitems.MiscItemsAndBlocks.Main.Main;
import com.miscitems.MiscItemsAndBlocks.Network.Server.ServerLensBenchPacketDone;
import com.miscitems.MiscItemsAndBlocks.TileEntity.Electric.TileEntityLensBench;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.client.gui.GuiTextField;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.util.EnumChatFormatting;
import net.minecraft.util.ResourceLocation;
import org.lwjgl.input.Keyboard;
import org.lwjgl.opengl.GL11;
import java.util.Arrays;
public class GuiLensBench extends GuiContainer{
private TileEntityLensBench tile;
private final ResourceLocation Texture = new ResourceLocation("miscitems" , "textures/gui/GuiLensBench.png");
int Power, Strength = 1;
boolean Damage, TransferPower, Redstone, Color;
int Red, Green, Blue;
GuiLensBenchButton DamageBTN, RedstoneBTN, PowerBTN;
GuiTextField PowerText, StrengthText, RedText, GreenText, BlueText;
public GuiLensBench(InventoryPlayer InvPlayer, TileEntityLensBench tile) {
super(new ContainerLensBench(InvPlayer, tile));
this.xSize = 176;
this.ySize = 256;
this.tile = tile;
}
@Override
protected void drawGuiContainerForegroundLayer(int param1, int param2) {
fontRendererObj.drawString("Lens Bench" , 32, 12, 4210752);
if(PowerText != null)
PowerText.drawTextBox();
if(StrengthText != null)
StrengthText.drawTextBox();
if(RedText != null)
RedText.drawTextBox();
if(GreenText != null)
GreenText.drawTextBox();
if(BlueText != null)
BlueText.drawTextBox();
}
@Override
protected void drawGuiContainerBackgroundLayer(float f, int X, int Y)
{
GL11.glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
Minecraft.getMinecraft().renderEngine.bindTexture(Texture);
drawTexturedModalRect(guiLeft, guiTop, 0, 0, xSize, ySize);
initGui();
}
@Override
public void initGui(){
super.initGui();
buttonList.clear();
int posX = (this.width - xSize) / 2;
int posY = (this.height - ySize) / 2;
if(DamageBTN == null){
DamageBTN = new GuiLensBenchButton(0, posX + 105, posY + 7, 1);
}else{
DamageBTN.xPosition = posX + 105;
DamageBTN.yPosition = posY + 7;
}
if(RedstoneBTN == null){
RedstoneBTN = new GuiLensBenchButton(1, posX + 125, posY + 7, 2);
}else{
RedstoneBTN.xPosition = posX + 125;
RedstoneBTN.yPosition = posY + 7;
}
if(PowerBTN == null) {
PowerBTN = new GuiLensBenchButton(2, posX + 145, posY + 7, 3);
}else{
PowerBTN.xPosition = posX + 145;
PowerBTN.yPosition = posY + 7;
}
if(PowerText == null)
PowerText = new GuiTextField(fontRendererObj, 8, 39, 60, 15);
if(StrengthText == null)
StrengthText = new GuiTextField(fontRendererObj, 88, 39, 80, 15);
if(RedText == null)
RedText = new GuiTextField(fontRendererObj, 8,80, 60, 15);
if(BlueText == null)
BlueText = new GuiTextField(fontRendererObj, 8,120, 60, 15);
if(GreenText == null)
GreenText = new GuiTextField(fontRendererObj,88, 80, 80, 15);
String add = Keyboard.isKeyDown(42) ? "++" : "+";
String subtract = Keyboard.isKeyDown(42) ? "--" : "-";
//Power
buttonList.add(new GuiButton(3, posX + 7, posY + 56, 30, 20, subtract));
buttonList.add(new GuiButton(4, posX + 39, posY + 56, 30, 20, add));
//Strength
buttonList.add(new GuiButton(5, posX + 87, posY + 56, 40, 20, subtract));
buttonList.add(new GuiButton(6, posX + 129, posY + 56, 40, 20, add));
//Red
buttonList.add(new GuiButton(7, posX + 7, posY + 97, 30, 20, subtract));
buttonList.add(new GuiButton(8, posX + 39, posY + 97, 30, 20, add));
//Green
buttonList.add(new GuiButton(9, posX + 87, posY + 97, 40, 20, subtract));
buttonList.add(new GuiButton(10, posX + 129, posY + 97, 40, 20, add));
//Blue
buttonList.add(new GuiButton(11, posX + 7, posY + 137, 30, 20, subtract));
buttonList.add(new GuiButton(12, posX + 39, posY + 137, 30, 20, add));
//Reset Color
buttonList.add(new GuiButton(13, posX + 90, posY + 120, 70, 20, "Reset Color"));
buttonList.add(new GuiButton(14, posX + 86, posY + 142, 80, 20, "Load from Lens"));
buttonList.add(DamageBTN);
buttonList.add(RedstoneBTN);
buttonList.add(PowerBTN);
}
public int Num = 1;
@Override
protected void actionPerformed(GuiButton button) {
if(tile.getStackInSlot(0) != null){
PacketHandler.sendToServer(new ServerLensBenchPacketDone(Color, Red, Green, Blue, Power, Strength, TransferPower, Damage, Redstone, tile.xCoord, tile.yCoord, tile.zCoord), Main.Utils.channels);
}
if(button.id == 0){
if(DamageBTN.Mode == 1){
DamageBTN.Mode = 2;
Damage = false;
}else if (DamageBTN.Mode == 2){
DamageBTN.Mode = 1;
Damage = true;
}
}else if (button.id == 1){
if(RedstoneBTN.Mode == 1){
RedstoneBTN.Mode = 2;
Redstone = false;
}else if (RedstoneBTN.Mode == 2){
RedstoneBTN.Mode = 1;
Redstone = true;
}
}else if (button.id == 2){
if(PowerBTN.Mode == 1){
PowerBTN.Mode = 2;
TransferPower = false;
}else if (PowerBTN.Mode == 2){
PowerBTN.Mode = 1;
TransferPower = true;
}
}
if(button.id == 3){
if(Power - Num >= 0)
Power -= Num;
else
Power = 0;
}else if (button.id == 4){
if(Power + Num < 10)
Power += Num;
else
Power = 10;
}else if (button.id == 5){
if(Strength - Num > 1)
Strength -= Num;
else
Strength = 1;
}else if (button.id == 6){
if(Strength + Num < 100)
Strength += Num;
else
Strength = 100;
}
int id = button.id;
if(id == 7){
if(Red - Num >= 0)
Red -= Num;
else
Red = 0;
}
if(id == 8){
if(Red + Num < 255)
Red += Num;
else
Red = 255;
}
if(id == 9){
if(Green - Num >= 0)
Green -= Num;
else
Green = 0;
}
if(id == 10)
if(Green + Num < 255)
Green += Num;
else
Green = 255;
if(id == 11){
if(Blue - Num >= 0)
Blue -= Num;
else
Blue = 0;
}
if(id == 12){
if(Blue + Num < 255)
Blue += Num;
else
Blue = 255;
}
if(id == 13){
Color = false;
Red = 0;
Green = 0;
Blue = 0;
if(tile.getStackInSlot(0) != null){
PacketHandler.sendToServer(new ServerLensBenchPacketDone(Color, Red, Green, Blue, Power, Strength, TransferPower, Damage, Redstone, tile.xCoord, tile.yCoord, tile.zCoord), Main.Utils.channels);
}
}
if(id == 14){
if(this.tile.getStackInSlot(0) != null){
if(this.tile.getStackInSlot(0).stackTagCompound != null){
Red = this.tile.getStackInSlot(0).stackTagCompound.getInteger("Red");
Green = this.tile.getStackInSlot(0).stackTagCompound.getInteger("Green");
Blue = this.tile.getStackInSlot(0).stackTagCompound.getInteger("Blue");
Power = this.tile.getStackInSlot(0).stackTagCompound.getInteger("Power");
Strength = this.tile.getStackInSlot(0).stackTagCompound.getInteger("Strength");
Color = this.tile.getStackInSlot(0).stackTagCompound.getBoolean("Color");
if(this.tile.getStackInSlot(0).stackTagCompound.getBoolean("TransferPower")){
TransferPower = true;
PowerBTN.Mode = 1;
}else{
TransferPower = false;
PowerBTN.Mode = 2;
}
if(this.tile.getStackInSlot(0).stackTagCompound.getBoolean("Redstone")){
Redstone = true;
RedstoneBTN.Mode = 1;
}else{
Redstone = false;
RedstoneBTN.Mode = 2;
}
if(this.tile.getStackInSlot(0).stackTagCompound.getBoolean("Safe")){
Damage = false;
DamageBTN.Mode = 2;
}else{
Damage = true;
DamageBTN.Mode = 1;
}
}
}
}
if(tile.getStackInSlot(0) != null){
PacketHandler.sendToServer(new ServerLensBenchPacketDone(Color, Red, Green, Blue, Power, Strength, TransferPower, Damage, Redstone, tile.xCoord, tile.yCoord, tile.zCoord), Main.Utils.channels);
}
}
@Override
public void drawScreen(int x, int y, float f) {
if(Red > 0 || Blue > 0 || Green > 0)
Color = true;
else
Color = false;
if(PowerText != null)
PowerText.setText("Power: " + Power);
if(StrengthText != null)
StrengthText.setText("Strength: " + Strength);
if(RedText != null)
RedText.setText("Red: " + Red);
if(BlueText != null)
BlueText.setText("Blue: " + Blue);
if(GreenText != null)
GreenText.setText("Green: " + Green);
if(Keyboard.isKeyDown(42))
Num = 10;
else
Num = 1;
super.drawScreen(x, y, f);
drawForeground(x, y, f);
}
public void drawForeground(int par1, int par2, float par3)
{
for(int i = 0; i < this.buttonList.size(); i++){
GuiButton btn = (GuiButton)this.buttonList.get(i);
if(par1 >= btn.xPosition && par1 <= btn.xPosition + btn.getButtonWidth() && par2 >= btn.yPosition && par2 <= btn.yPosition + 20){
if(btn.id == 0){
drawHoveringText(Arrays.asList(new String[] {EnumChatFormatting.WHITE + "Toggle damage", EnumChatFormatting.WHITE + "Currently enabled: " + (DamageBTN.Mode == 1 ? EnumChatFormatting.GREEN + "true" : EnumChatFormatting.RED + "false")}), par1, par2, fontRendererObj);
}else if (btn.id == 1){
drawHoveringText(Arrays.asList(new String[] {EnumChatFormatting.WHITE + "Toggle redstone transfer", EnumChatFormatting.WHITE + "Currently enabled: " + (RedstoneBTN.Mode == 1 ? EnumChatFormatting.GREEN + "true" : EnumChatFormatting.RED + "false")}), par1, par2, fontRendererObj);
}else if (btn.id == 2){
drawHoveringText(Arrays.asList(new String[] {EnumChatFormatting.WHITE + "Toggle power transfer", EnumChatFormatting.WHITE + "Currently enabled: " + (PowerBTN.Mode == 1 ? EnumChatFormatting.GREEN + "true" : EnumChatFormatting.RED + "false")}), par1, par2, fontRendererObj);
}
}
}
}
protected void keyTyped(char par1, int par2)
{
super.keyTyped(par1, par2);
if(par2 == Keyboard.KEY_R){
if(this.tile.getStackInSlot(0) != null){
if(this.tile.getStackInSlot(0).stackTagCompound != null){
Red = this.tile.getStackInSlot(0).stackTagCompound.getInteger("Red");
Green = this.tile.getStackInSlot(0).stackTagCompound.getInteger("Green");
Blue = this.tile.getStackInSlot(0).stackTagCompound.getInteger("Blue");
Power = this.tile.getStackInSlot(0).stackTagCompound.getInteger("Power");
Strength = this.tile.getStackInSlot(0).stackTagCompound.getInteger("Strength");
Color = this.tile.getStackInSlot(0).stackTagCompound.getBoolean("Color");
if(this.tile.getStackInSlot(0).stackTagCompound.getBoolean("TransferPower"))
PowerBTN.Mode = 1;
else
PowerBTN.Mode = 2;
if(this.tile.getStackInSlot(0).stackTagCompound.getBoolean("Redstone"))
RedstoneBTN.Mode = 1;
else
RedstoneBTN.Mode = 2;
if(this.tile.getStackInSlot(0).stackTagCompound.getBoolean("Safe"))
DamageBTN.Mode = 2;
else
DamageBTN.Mode = 1;
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.daohib.bpel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.apache.ode.bpel.dao.CorrelationSetDAO;
import org.apache.ode.bpel.dao.PartnerLinkDAO;
import org.apache.ode.bpel.dao.ProcessInstanceDAO;
import org.apache.ode.bpel.dao.ScopeDAO;
import org.apache.ode.bpel.dao.ScopeStateEnum;
import org.apache.ode.bpel.dao.XmlDataDAO;
import org.apache.ode.bpel.evt.BpelEvent;
import org.apache.ode.daohib.SessionManager;
import org.apache.ode.daohib.bpel.hobj.HBpelEvent;
import org.apache.ode.daohib.bpel.hobj.HCorrelationSet;
import org.apache.ode.daohib.bpel.hobj.HPartnerLink;
import org.apache.ode.daohib.bpel.hobj.HScope;
import org.apache.ode.daohib.bpel.hobj.HXmlData;
import org.apache.ode.utils.SerializableUtils;
import org.apache.ode.utils.stl.CollectionsX;
import org.apache.ode.utils.stl.UnaryFunction;
import org.apache.ode.utils.stl.UnaryFunctionEx;
import org.hibernate.Criteria;
import org.hibernate.Query;
import org.hibernate.criterion.Restrictions;
/**
* Hibernate-based {@link ScopeDAO} implementation.
*/
public class ScopeDaoImpl extends HibernateDao implements ScopeDAO {
private static final String QRY_VARIABLE = "from "
+ HXmlData.class.getName()
+ " as x where x.name = ? and x.scope.id = ?";
private static final String QRY_CSET = "from "
+ HCorrelationSet.class.getName()
+ " as c where c.name = ? and c.scope.id = ?";
private static final String QRY_SCOPE_EPR = "from "
+ HPartnerLink.class.getName()
+ " as e where e.modelId = ? and e.scope = ?";
private HScope _scope;
private HashMap<String, XmlDataDAO> _variables = new HashMap<String, XmlDataDAO>();
public ScopeDaoImpl(SessionManager sm, HScope scope) {
super(sm, scope);
entering("ScopeDaoImpl.ScopeDaoImpl");
_scope = scope;
}
/**
* @see org.apache.ode.bpel.dao.ScopeDAO#getCorrelationSet(java.lang.String)
*/
public CorrelationSetDAO getCorrelationSet(String corrSetName) {
entering("ScopeDaoImpl.getCorrelationSet");
Query qry = getSession().createQuery(QRY_CSET);
qry.setString(0, corrSetName);
qry.setLong(1, _scope.getId());
HCorrelationSet cs;
List res = qry.list();
if (res.size() == 0) {
// if it doesn't exist, we make it
cs = new HCorrelationSet(_scope, corrSetName);
// _scope.addCorrelationSet(cs);
getSession().save(cs);
} else {
cs = (HCorrelationSet) res.get(0);
}
return new CorrelationSetDaoImpl(_sm, cs);
}
/**
* @see org.apache.ode.bpel.dao.ScopeDAO#getParentScope()
*/
public ScopeDAO getParentScope() {
entering("ScopeDaoImpl.getParentScope");
return _scope.getParentScope() != null ? new ScopeDaoImpl(_sm,
_scope.getParentScope()) : null;
}
/**
* @see org.apache.ode.bpel.dao.ScopeDAO#getProcessInstance()
*/
public ProcessInstanceDAO getProcessInstance() {
entering("ScopeDaoImpl.getProcessInstance");
return new ProcessInstanceDaoImpl(_sm, _scope.getInstance());
}
/**
* @see org.apache.ode.bpel.dao.ScopeDAO#setState(org.apache.ode.bpel.dao.ScopeStateEnum)
*/
public void setState(ScopeStateEnum state) {
entering("ScopeDaoImpl.setState");
_scope.setState(state.toString());
getSession().update(_scope);
}
/**
* @see org.apache.ode.bpel.dao.ScopeDAO#getState()
*/
public ScopeStateEnum getState() {
return ScopeStateEnum.valueOf(_scope.getState());
}
/**
* @see org.apache.ode.bpel.dao.ScopeDAO#getName()
*/
public String getName() {
return _scope.getName();
}
/**
* @see org.apache.ode.bpel.dao.ScopeDAO#getVariable(java.lang.String)
*/
public XmlDataDAO getVariable(String varName) {
entering("ScopeDaoImpl.getVariable");
XmlDataDAO cached = _variables.get(varName);
if (cached != null)
return _variables.get(varName);
HXmlData data = null;
for (HXmlData e : _scope.getVariables()) {
if (e.getName().equals(varName)
&& e.getScope().getId().equals(_scope.getId())) {
data = e;
}
}
if (data == null) {
data = new HXmlData();
data.setName(varName);
data.setScope(_scope);
_scope.getVariables().add(data);
}
XmlDataDaoImpl varDao = new XmlDataDaoImpl(_sm, data);
_variables.put(varName, varDao);
return varDao;
}
public PartnerLinkDAO createPartnerLink(int modelId, String pLinkName,
String myRole, String partnerRole) {
entering("ScopeDaoImpl.createPartnerLink");
HPartnerLink epr = new HPartnerLink();
epr.setModelId(modelId);
epr.setLinkName(pLinkName);
epr.setMyRole(myRole);
epr.setPartnerRole(partnerRole);
epr.setScope(_scope);
// _scope.addPartnerLink(epr);
getSession().save(epr);
PartnerLinkDAOImpl eprDao = new PartnerLinkDAOImpl(_sm, epr);
return eprDao;
}
public PartnerLinkDAO getPartnerLink(int plinkId) {
entering("ScopeDaoImpl.getPartnerLink");
Query qry = getSession().createQuery(QRY_SCOPE_EPR);
qry.setInteger(0, plinkId);
qry.setEntity(1, _scope);
HPartnerLink hpl = (HPartnerLink) qry.uniqueResult();
if (hpl == null)
return null;
return new PartnerLinkDAOImpl(_sm, hpl);
}
public Collection<PartnerLinkDAO> getPartnerLinks() {
entering("ScopeDaoImpl.getPartnerLinks");
ArrayList<PartnerLinkDAO> plinks = new ArrayList<PartnerLinkDAO>();
for (HPartnerLink hPartnerLink : _scope.getPartnerLinks()) {
plinks.add(new PartnerLinkDAOImpl(_sm, hPartnerLink));
}
return plinks;
}
/**
* @see org.apache.ode.bpel.dao.ScopeDAO#getScopeInstanceId()
*/
public Long getScopeInstanceId() {
return _scope.getId();
}
/**
* @see org.apache.ode.bpel.dao.ScopeDAO#getModelId()
*/
public int getModelId() {
return _scope.getScopeModelId();
}
public Set<CorrelationSetDAO> getCorrelationSets() {
entering("ScopeDaoImpl.getCorrelationSets");
Set<CorrelationSetDAO> results = new HashSet<CorrelationSetDAO>();
for (HCorrelationSet hCorrelationSet : _scope.getCorrelationSets()) {
results.add(new CorrelationSetDaoImpl(_sm, hCorrelationSet));
}
return results;
}
@SuppressWarnings("unchecked")
public Collection<ScopeDAO> getChildScopes() {
entering("ScopeDaoImpl.getChildScopes");
Query q = getSession().createQuery(
"from " + HScope.class.getName()
+ " as x where x.parentScope=?");
q.setEntity(0, _scope);
Collection<HScope> hscopes = q.list();
return CollectionsX.transform(new LinkedList<ScopeDAO>(), hscopes,
new UnaryFunction<HScope, ScopeDAO>() {
public ScopeDAO apply(HScope x) {
return new ScopeDaoImpl(_sm, x);
}
});
}
@SuppressWarnings("unchecked")
public Collection<XmlDataDAO> getVariables() {
entering("ScopeDaoImpl.getVariables");
Query q = getSession().createFilter(_scope.getVariables(),
"where this.scope=?");
q.setEntity(0, _scope);
return CollectionsX.transform(new LinkedList<XmlDataDAO>(),
(Collection<HXmlData>) q.list(),
new UnaryFunction<HXmlData, XmlDataDAO>() {
public XmlDataDAO apply(HXmlData x) {
return new XmlDataDaoImpl(_sm, x);
}
});
}
@SuppressWarnings("unchecked")
public List<BpelEvent> listEvents() {
entering("ScopeDaoImpl.listEvents");
// CriteriaBuilder cb = new CriteriaBuilder();
Criteria crit = _sm.getSession().createCriteria(HBpelEvent.class);
// if (efilter != null)
// cb.buildCriteria(crit, efilter);
crit.add(Restrictions.eq("scopeId", _scope.getId()));
List<HBpelEvent> hevents = crit.list();
List<BpelEvent> ret = new ArrayList<BpelEvent>(hevents.size());
try {
CollectionsX.transformEx(ret, hevents,
new UnaryFunctionEx<HBpelEvent, BpelEvent>() {
public BpelEvent apply(HBpelEvent x) throws Exception {
return (BpelEvent) SerializableUtils.toObject(
x.getData(),
BpelEvent.class.getClassLoader());
}
});
} catch (Exception ex) {
throw new RuntimeException(ex);
}
return ret;
}
}
| |
package apple.phase;
import apple.NSObject;
import apple.avfaudio.AVAudioChannelLayout;
import apple.avfaudio.AVAudioFormat;
import apple.foundation.NSArray;
import apple.foundation.NSData;
import apple.foundation.NSDictionary;
import apple.foundation.NSError;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import apple.foundation.NSURL;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.ReferenceInfo;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.Ptr;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCBlock;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
/**
* [@interface] PHASEAssetRegistry
* <p>
* Asset registry
*/
@Generated
@Library("PHASE")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class PHASEAssetRegistry extends NSObject {
static {
NatJ.register();
}
@Generated
protected PHASEAssetRegistry(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native PHASEAssetRegistry alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native PHASEAssetRegistry allocWithZone(VoidPtr zone);
/**
* assetForIdentifier
* <p>
* Finds an asset in the asset registry, given an identifier.
*
* @param identifier The identifier of this asset
* @return A PHASEAsset object, or nil if one could not be found.
*/
@Generated
@Selector("assetForIdentifier:")
public native PHASEAsset assetForIdentifier(String identifier);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
@Generated
@Selector("description")
public static native String description_static();
/**
* [@property] globalMetaParameters
* <p>
* A dictionary of global metaparameters
*/
@Generated
@Selector("globalMetaParameters")
public native NSDictionary<String, ? extends PHASEMetaParameter> globalMetaParameters();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("init")
public native PHASEAssetRegistry init();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native PHASEAssetRegistry new_objc();
/**
* registerGlobalMetaParameter:error
* <p>
* Register a global metaparameter with the asset registry.
* [@note]
* This function is synchronous and thread-safe.
* Clients can safely run this function to register multiple global metaparameters from multiple threads, if required.
*
* @param metaParameterDefinition The metaparameter object to register.
* @param error The error object in case of an error.
* @return A PHASEGlobalMetaParameterAsset object.
*/
@Generated
@Selector("registerGlobalMetaParameter:error:")
public native PHASEGlobalMetaParameterAsset registerGlobalMetaParameterError(
PHASEMetaParameterDefinition metaParameterDefinition,
@ReferenceInfo(type = NSError.class) Ptr<NSError> error);
/**
* registerSoundAssetAtURL:identifier:assetType:channelLayout:normalizationMode:error
* <p>
* Register an audio file as a sound asset in the system.
* [@note]
* This function is synchronous and thread-safe.
* Clients can safely run this function to register multiple sound assets from multiple threads, if required.
*
* @param url The URL of the audio file.
* @param identifier An identifier that uniquely represents this sound event asset. Nil generates an automatic identifier.
* @param assetType The asset type for this sound asset.
* @param channelLayout The audio channel layout for this sound asset.
* If a valid channel layout definition is read from the file being registered, this will override it.
* If nil is passed as a value for this property, the file must either be mono or stereo, or already contain a vaild channel layout definition.
* This channel layout must have the same channel count as the audio file being loaded.
* @param normalizationMode The normalization mode.
* @param error The error object in case of an error
* @return A PHASESoundAsset object
*/
@Generated
@Selector("registerSoundAssetAtURL:identifier:assetType:channelLayout:normalizationMode:error:")
public native PHASESoundAsset registerSoundAssetAtURLIdentifierAssetTypeChannelLayoutNormalizationModeError(
NSURL url, String identifier, @NInt long assetType, AVAudioChannelLayout channelLayout,
@NInt long normalizationMode, @ReferenceInfo(type = NSError.class) Ptr<NSError> error);
/**
* registerSoundAssetWithData:identifier:format:normalizationMode:error
* <p>
* Register audio data as a sound asset in the system.
* [@note]
* This function is synchronous and thread-safe.
* Clients can safely run this function to register multiple sound assets from multiple threads, if required.
*
* @param data A buffer containing the audio data to register as a sound asset.
* Audio data must either be a single PCM buffer of interleaved channels or multiple deinterleaved PCM buffers per channel packed back to back.
* @param identifier The identifier to assign to this sound asset. Nil generates an automatic identifier.
* @param format The AVAudioFormat object that describes the audio data in the buffer.
* @param normalizationMode The normalization mode.
* @param error The error object in case of an error.
* @return A PHASESoundAsset object.
*/
@Generated
@Selector("registerSoundAssetWithData:identifier:format:normalizationMode:error:")
public native PHASESoundAsset registerSoundAssetWithDataIdentifierFormatNormalizationModeError(NSData data,
String identifier, AVAudioFormat format, @NInt long normalizationMode,
@ReferenceInfo(type = NSError.class) Ptr<NSError> error);
/**
* registerSoundEventAssetWithRootNode:identifier:error
* <p>
* Register a sound event asset with the asset registry.
* [@note]
* This function is synchronous and thread-safe.
* Clients can safely run this function to register multiple sound event assets from multiple threads, if required.
*
* @param rootNode The root node of the sound event asset to register.
* @param identifier An identifier that uniquely represents this sound event asset. Nil generates an automatic identifier.
* @param error The error object in case of an error
* @return A PHASESoundEventNodeAsset object
*/
@Generated
@Selector("registerSoundEventAssetWithRootNode:identifier:error:")
public native PHASESoundEventNodeAsset registerSoundEventAssetWithRootNodeIdentifierError(
PHASESoundEventNodeDefinition rootNode, String identifier,
@ReferenceInfo(type = NSError.class) Ptr<NSError> error);
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
/**
* unregisterAssetWithIdentifier:completion:
* <p>
* Unregister and unload an asset.
*
* @param identifier The identifier of the PHASEAsset object to unregister
* @param handler An optional completion block that will be called when the asset has been unregistered.
* Once you receive this callback, it's safe to deallocate external resources, if applicable.
*/
@Generated
@Selector("unregisterAssetWithIdentifier:completion:")
public native void unregisterAssetWithIdentifierCompletion(String identifier,
@ObjCBlock(name = "call_unregisterAssetWithIdentifierCompletion") Block_unregisterAssetWithIdentifierCompletion handler);
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_unregisterAssetWithIdentifierCompletion {
@Generated
void call_unregisterAssetWithIdentifierCompletion(boolean success);
}
@Generated
@Selector("version")
@NInt
public static native long version_static();
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2017 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.model.impl.sql.edit.struct;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.model.*;
import org.jkiss.dbeaver.model.edit.DBECommandContext;
import org.jkiss.dbeaver.model.edit.DBEPersistAction;
import org.jkiss.dbeaver.model.edit.prop.DBECommandComposite;
import org.jkiss.dbeaver.model.impl.DBObjectNameCaseTransformer;
import org.jkiss.dbeaver.model.impl.edit.DBECommandAbstract;
import org.jkiss.dbeaver.model.impl.edit.SQLDatabasePersistAction;
import org.jkiss.dbeaver.model.impl.jdbc.struct.JDBCTable;
import org.jkiss.dbeaver.model.impl.jdbc.struct.JDBCTableColumn;
import org.jkiss.dbeaver.model.impl.sql.edit.SQLObjectEditor;
import org.jkiss.dbeaver.model.messages.ModelMessages;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.sql.SQLUtils;
import org.jkiss.dbeaver.model.struct.DBSDataType;
import org.jkiss.utils.CommonUtils;
import java.util.List;
/**
* JDBC table column manager
*/
public abstract class SQLTableColumnManager<OBJECT_TYPE extends JDBCTableColumn<TABLE_TYPE>, TABLE_TYPE extends JDBCTable>
extends SQLObjectEditor<OBJECT_TYPE, TABLE_TYPE>
{
public static final long DDL_FEATURE_OMIT_COLUMN_CLAUSE_IN_DROP = 1;
public static final String QUOTE = "'";
protected interface ColumnModifier<OBJECT_TYPE extends DBPObject> {
void appendModifier(OBJECT_TYPE column, StringBuilder sql, DBECommandAbstract<OBJECT_TYPE> command);
}
protected final ColumnModifier<OBJECT_TYPE> DataTypeModifier = new ColumnModifier<OBJECT_TYPE>() {
@Override
public void appendModifier(OBJECT_TYPE column, StringBuilder sql, DBECommandAbstract<OBJECT_TYPE> command) {
final String typeName = column.getTypeName();
DBPDataKind dataKind = column.getDataKind();
final DBSDataType dataType = findDataType(column.getDataSource(), typeName);
sql.append(' ').append(typeName);
if (dataType == null) {
log.debug("Type name '" + typeName + "' is not supported by driver"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
dataKind = dataType.getDataKind();
}
String modifiers = SQLUtils.getColumnTypeModifiers(column.getDataSource(), column, typeName, dataKind);
if (modifiers != null) {
sql.append(modifiers);
}
}
};
protected final ColumnModifier<OBJECT_TYPE> NotNullModifier = new ColumnModifier<OBJECT_TYPE>() {
@Override
public void appendModifier(OBJECT_TYPE column, StringBuilder sql, DBECommandAbstract<OBJECT_TYPE> command) {
if (column.isRequired()) {
sql.append(" NOT NULL"); //$NON-NLS-1$
}
}
};
protected final ColumnModifier<OBJECT_TYPE> NullNotNullModifier = new ColumnModifier<OBJECT_TYPE>() {
@Override
public void appendModifier(OBJECT_TYPE column, StringBuilder sql, DBECommandAbstract<OBJECT_TYPE> command) {
sql.append(column.isRequired() ? " NOT NULL" : " NULL");
}
};
protected final ColumnModifier<OBJECT_TYPE> NullNotNullModifierConditional = new ColumnModifier<OBJECT_TYPE>() {
@Override
public void appendModifier(OBJECT_TYPE column, StringBuilder sql, DBECommandAbstract<OBJECT_TYPE> command) {
if (command instanceof DBECommandComposite) {
if (((DBECommandComposite) command).getProperty("required") == null) {
// Do not set NULL/NOT NULL if it wasn't chaged
return;
}
}
NullNotNullModifier.appendModifier(column, sql, command);
}
};
protected final ColumnModifier<OBJECT_TYPE> DefaultModifier = new ColumnModifier<OBJECT_TYPE>() {
@Override
public void appendModifier(OBJECT_TYPE column, StringBuilder sql, DBECommandAbstract<OBJECT_TYPE> command) {
String defaultValue = CommonUtils.toString(column.getDefaultValue());
if (!CommonUtils.isEmpty(defaultValue)) {
DBPDataKind dataKind = column.getDataKind();
boolean useQuotes = false;//dataKind == DBPDataKind.STRING;
if (!defaultValue.startsWith(QUOTE) && !defaultValue.endsWith(QUOTE)) {
if (useQuotes && defaultValue.trim().startsWith(QUOTE)) {
useQuotes = false;
}
if (dataKind == DBPDataKind.DATETIME) {
final char firstChar = defaultValue.trim().charAt(0);
if (!Character.isLetter(firstChar) && firstChar != '(' && firstChar != '[') {
useQuotes = true;
}
}
}
sql.append(" DEFAULT "); //$NON-NLS-1$
if (useQuotes) sql.append(QUOTE);
sql.append(defaultValue);
if (useQuotes) sql.append(QUOTE);
}
}
};
protected ColumnModifier[] getSupportedModifiers(OBJECT_TYPE column)
{
return new ColumnModifier[] {DataTypeModifier, NotNullModifier, DefaultModifier};
}
@Override
public boolean canEditObject(OBJECT_TYPE object)
{
TABLE_TYPE table = object.getParentObject();
return table != null && !table.isView();
}
@Override
public boolean canCreateObject(TABLE_TYPE parent)
{
return parent != null && !parent.isView();
}
@Override
public boolean canDeleteObject(OBJECT_TYPE object)
{
return canEditObject(object);
}
@Override
public long getMakerOptions()
{
return FEATURE_EDITOR_ON_CREATE;
}
protected long getDDLFeatures(OBJECT_TYPE object)
{
return 0;
}
private boolean hasDDLFeature(OBJECT_TYPE object, long feature)
{
return (getDDLFeatures(object) & feature) != 0;
}
@Override
protected void addObjectCreateActions(List<DBEPersistAction> actions, ObjectCreateCommand command)
{
final TABLE_TYPE table = command.getObject().getTable();
actions.add(
new SQLDatabasePersistAction(
ModelMessages.model_jdbc_create_new_table_column,
"ALTER TABLE " + table.getFullyQualifiedName(DBPEvaluationContext.DDL) + " ADD " + getNestedDeclaration(table, command)) );
}
@Override
protected void addObjectDeleteActions(List<DBEPersistAction> actions, ObjectDeleteCommand command)
{
actions.add(
new SQLDatabasePersistAction(
ModelMessages.model_jdbc_drop_table_column, "ALTER TABLE " + command.getObject().getTable().getFullyQualifiedName(DBPEvaluationContext.DDL) + //$NON-NLS-2$
" DROP " + (hasDDLFeature(command.getObject(), DDL_FEATURE_OMIT_COLUMN_CLAUSE_IN_DROP) ? "" : "COLUMN ") + DBUtils.getQuotedIdentifier(command.getObject())) //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
);
}
protected String getNewColumnName(DBRProgressMonitor monitor, DBECommandContext context, TABLE_TYPE table)
{
for (int i = 1; ; i++) {
final String name = DBObjectNameCaseTransformer.transformName(table.getDataSource(), "Column" + i);
try {
// check for existing columns
boolean exists = table.getAttribute(monitor, name) != null;
if (!exists) {
// Check for new columns (they are present only within command context)
for (DBPObject contextObject : context.getEditedObjects()) {
if (contextObject instanceof JDBCTableColumn && ((JDBCTableColumn) contextObject).getTable() == table && name.equalsIgnoreCase(((JDBCTableColumn) contextObject).getName())) {
exists = true;
break;
}
}
}
if (!exists) {
return name;
}
} catch (DBException e) {
log.warn(e);
return name;
}
}
}
@Override
protected StringBuilder getNestedDeclaration(TABLE_TYPE owner, DBECommandAbstract<OBJECT_TYPE> command)
{
OBJECT_TYPE column = command.getObject();
// Create column
String columnName = DBUtils.getQuotedIdentifier(column.getDataSource(), column.getName());
if (command instanceof SQLObjectEditor.ObjectRenameCommand) {
columnName = DBUtils.getQuotedIdentifier(column.getDataSource(), ((ObjectRenameCommand) command).getNewName());
}
StringBuilder decl = new StringBuilder(40);
decl.append(columnName);
for (ColumnModifier<OBJECT_TYPE> modifier : getSupportedModifiers(column)) {
modifier.appendModifier(column, decl, command);
}
return decl;
}
@Override
protected void validateObjectProperties(ObjectChangeCommand command)
throws DBException
{
if (CommonUtils.isEmpty(command.getObject().getName())) {
throw new DBException("Column name cannot be empty");
}
if (CommonUtils.isEmpty(command.getObject().getTypeName())) {
throw new DBException("Column type name cannot be empty");
}
}
private static DBSDataType findDataType(DBPDataSource dataSource, String typeName)
{
if (dataSource instanceof DBPDataTypeProvider) {
return ((DBPDataTypeProvider) dataSource).getLocalDataType(typeName);
}
return null;
}
protected static DBSDataType findBestDataType(DBPDataSource dataSource, String ... typeNames)
{
if (dataSource instanceof DBPDataTypeProvider) {
return DBUtils.findBestDataType(((DBPDataTypeProvider) dataSource).getLocalDataTypes(), typeNames);
}
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mahout.utils;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.Charset;
import com.google.common.base.Charsets;
import com.google.common.io.Closeables;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.mahout.classifier.ClassifierData;
import org.apache.mahout.common.Pair;
import org.apache.mahout.common.iterator.sequencefile.SequenceFileIterable;
import org.apache.mahout.common.iterator.sequencefile.SequenceFileValueIterable;
import org.apache.mahout.math.SequentialAccessSparseVector;
import org.apache.mahout.math.Vector;
import org.apache.mahout.math.VectorWritable;
import org.apache.mahout.math.map.OpenObjectIntHashMap;
import org.junit.Before;
import org.junit.Test;
public final class SplitInputTest extends MahoutTestCase {
private OpenObjectIntHashMap<String> countMap;
private Charset charset;
private FileSystem fs;
private Path tempInputFile;
private Path tempTrainingDirectory;
private Path tempTestDirectory;
private Path tempMapRedOutputDirectory;
private Path tempInputDirectory;
private Path tempSequenceDirectory;
private SplitInput si;
@Override
@Before
public void setUp() throws Exception {
Configuration conf = new Configuration();
fs = FileSystem.get(conf);
super.setUp();
countMap = new OpenObjectIntHashMap<String>();
charset = Charsets.UTF_8;
tempSequenceDirectory = getTestTempFilePath("tmpsequence");
tempInputFile = getTestTempFilePath("bayesinputfile");
tempTrainingDirectory = getTestTempDirPath("bayestrain");
tempTestDirectory = getTestTempDirPath("bayestest");
tempMapRedOutputDirectory = new Path(getTestTempDirPath(), "mapRedOutput");
tempInputDirectory = getTestTempDirPath("bayesinputdir");
si = new SplitInput();
si.setTrainingOutputDirectory(tempTrainingDirectory);
si.setTestOutputDirectory(tempTestDirectory);
si.setInputDirectory(tempInputDirectory);
}
private void writeMultipleInputFiles() throws IOException {
Writer writer = null;
String currentLabel = null;
for (String[] entry : ClassifierData.DATA) {
if (!entry[0].equals(currentLabel)) {
currentLabel = entry[0];
Closeables.closeQuietly(writer);
writer = new BufferedWriter(new OutputStreamWriter(fs.create(new Path(tempInputDirectory, currentLabel)),
Charsets.UTF_8));
}
countMap.adjustOrPutValue(currentLabel, 1, 1);
writer.write(currentLabel + '\t' + entry[1] + '\n');
}
Closeables.closeQuietly(writer);
}
private void writeSingleInputFile() throws IOException {
Writer writer = new BufferedWriter(new OutputStreamWriter(fs.create(tempInputFile), Charsets.UTF_8));
try {
for (String[] entry : ClassifierData.DATA) {
writer.write(entry[0] + '\t' + entry[1] + '\n');
}
} finally {
Closeables.closeQuietly(writer);
}
}
@Test
public void testSplitDirectory() throws Exception {
writeMultipleInputFiles();
final int testSplitSize = 1;
si.setTestSplitSize(testSplitSize);
si.setCallback(new SplitInput.SplitCallback() {
@Override
public void splitComplete(Path inputFile, int lineCount, int trainCount, int testCount, int testSplitStart) {
int trainingLines = countMap.get(inputFile.getName()) - testSplitSize;
assertSplit(fs, inputFile, charset, testSplitSize, trainingLines, tempTrainingDirectory, tempTestDirectory);
}
});
si.splitDirectory(tempInputDirectory);
}
@Test
public void testSplitFile() throws Exception {
writeSingleInputFile();
si.setTestSplitSize(2);
si.setCallback(new TestCallback(2, 10));
si.splitFile(tempInputFile);
}
@Test
public void testSplitFileLocation() throws Exception {
writeSingleInputFile();
si.setTestSplitSize(2);
si.setSplitLocation(50);
si.setCallback(new TestCallback(2, 10));
si.splitFile(tempInputFile);
}
@Test
public void testSplitFilePct() throws Exception {
writeSingleInputFile();
si.setTestSplitPct(25);
si.setCallback(new TestCallback(3, 9));
si.splitFile(tempInputFile);
}
@Test
public void testSplitFilePctLocation() throws Exception {
writeSingleInputFile();
si.setTestSplitPct(25);
si.setSplitLocation(50);
si.setCallback(new TestCallback(3, 9));
si.splitFile(tempInputFile);
}
@Test
public void testSplitFileRandomSelectionSize() throws Exception {
writeSingleInputFile();
si.setTestRandomSelectionSize(5);
si.setCallback(new TestCallback(5, 7));
si.splitFile(tempInputFile);
}
@Test
public void testSplitFileRandomSelectionPct() throws Exception {
writeSingleInputFile();
si.setTestRandomSelectionPct(25);
si.setCallback(new TestCallback(3, 9));
si.splitFile(tempInputFile);
}
/**
* Create a Sequencefile for testing consisting of IntWritable
* keys and VectorWritable values
* @param path path for test SequenceFile
* @param testPoints number of records in test SequenceFile
*/
private void writeVectorSequenceFile(Path path, int testPoints)
throws IOException {
Path tempSequenceFile = new Path(path, "part-00000");
Configuration conf = new Configuration();
IntWritable key = new IntWritable();
VectorWritable value = new VectorWritable();
SequenceFile.Writer writer = null;
try {
writer =
SequenceFile.createWriter(fs, conf, tempSequenceFile,
IntWritable.class, VectorWritable.class);
for (int i = 0; i < testPoints; i++) {
key.set(i);
Vector v = new SequentialAccessSparseVector(4);
v.assign(i);
value.set(v);
writer.append(key, value);
}
} finally {
IOUtils.closeStream(writer);
}
}
/**
* Create a Sequencefile for testing consisting of IntWritable
* keys and Text values
* @param path path for test SequenceFile
* @param testPoints number of records in test SequenceFile
*/
private void writeTextSequenceFile(Path path, int testPoints)
throws IOException {
Path tempSequenceFile = new Path(path, "part-00000");
Configuration conf = new Configuration();
Text key = new Text();
Text value = new Text();
SequenceFile.Writer writer = null;
try {
writer =
SequenceFile.createWriter(fs, conf, tempSequenceFile,
Text.class, Text.class);
for (int i = 0; i < testPoints; i++) {
key.set(Integer.toString(i));
value.set("Line " + i);
writer.append(key, value);
}
} finally {
IOUtils.closeStream(writer);
}
}
/**
* Display contents of a SequenceFile
* @param sequenceFilePath path to SequenceFile
*/
private static void displaySequenceFile(Path sequenceFilePath) {
for (Pair<?,?> record : new SequenceFileIterable<Writable,Writable>(sequenceFilePath, true, new Configuration())) {
System.out.println(record.getFirst() + "\t" + record.getSecond());
}
}
/**
* Determine number of records in a SequenceFile
* @param sequenceFilePath path to SequenceFile
* @return number of records
*/
private static int getNumberRecords(Path sequenceFilePath) {
int numberRecords = 0;
for (Object value : new SequenceFileValueIterable<Writable>(sequenceFilePath, true, new Configuration())) {
numberRecords++;
}
return numberRecords;
}
/**
* Test map reduce version of split input with Text, Text key value
* pairs in input
*/
@Test
public void testSplitInputMapReduceText() throws Exception {
writeTextSequenceFile(tempSequenceDirectory, 1000);
testSplitInputMapReduce(1000);
}
/**
* Test map reduce version of split input with Text, Text key value
* pairs in input called from command line
*/
@Test
public void testSplitInputMapReduceTextCli() throws Exception {
writeTextSequenceFile(tempSequenceDirectory, 1000);
testSplitInputMapReduceCli(1000);
}
/**
* Test map reduce version of split input with IntWritable, Vector key value
* pairs in input
*/
@Test
public void testSplitInputMapReduceVector() throws Exception {
writeVectorSequenceFile(tempSequenceDirectory, 1000);
testSplitInputMapReduce(1000);
}
/**
* Test map reduce version of split input with IntWritable, Vector key value
* pairs in input called from command line
*/
@Test
public void testSplitInputMapReduceVectorCli() throws Exception {
writeVectorSequenceFile(tempSequenceDirectory, 1000);
testSplitInputMapReduceCli(1000);
}
/**
* Test map reduce version of split input through CLI
*/
private void testSplitInputMapReduceCli(int numPoints) throws Exception {
int randomSelectionPct = 25;
int keepPct = 10;
String[] args =
{ "--method", "mapreduce", "--input", tempSequenceDirectory.toString(),
"--mapRedOutputDir", tempMapRedOutputDirectory.toString(),
"--randomSelectionPct", Integer.toString(randomSelectionPct),
"--keepPct", Integer.toString(keepPct), "-ow" };
SplitInput.main(args);
validateSplitInputMapReduce(numPoints, randomSelectionPct, keepPct);
}
/**
* Test map reduce version of split input through method call
*/
private void testSplitInputMapReduce(int numPoints) throws Exception {
int randomSelectionPct = 25;
si.setTestRandomSelectionPct(randomSelectionPct);
int keepPct = 10;
si.setKeepPct(keepPct);
si.setMapRedOutputDirectory(tempMapRedOutputDirectory);
si.setUseMapRed(true);
si.splitDirectory(tempSequenceDirectory);
validateSplitInputMapReduce(numPoints, randomSelectionPct, keepPct);
}
/**
* Validate that number of test records and number of training records
* are consistant with keepPct and randomSelectionPct
*/
private void validateSplitInputMapReduce(int numPoints, int randomSelectionPct, int keepPct) {
Path testPath = new Path(tempMapRedOutputDirectory, "test-r-00000");
Path trainingPath = new Path(tempMapRedOutputDirectory, "training-r-00000");
int numberTestRecords = getNumberRecords(testPath);
int numberTrainingRecords = getNumberRecords(trainingPath);
System.out.printf("Test data: %d records\n", numberTestRecords);
displaySequenceFile(testPath);
System.out.printf("Training data: %d records\n", numberTrainingRecords);
displaySequenceFile(trainingPath);
assertEquals((randomSelectionPct / 100.0) * (keepPct / 100.0) * numPoints,
numberTestRecords, 2);
assertEquals(
(1 - randomSelectionPct / 100.0) * (keepPct / 100.0) * numPoints,
numberTrainingRecords, 2);
}
@Test
public void testValidate() throws Exception {
SplitInput st = new SplitInput();
assertValidateException(st);
st.setTestSplitSize(100);
assertValidateException(st);
st.setTestOutputDirectory(tempTestDirectory);
assertValidateException(st);
st.setTrainingOutputDirectory(tempTrainingDirectory);
st.validate();
st.setTestSplitPct(50);
assertValidateException(st);
st = new SplitInput();
st.setTestRandomSelectionPct(50);
st.setTestOutputDirectory(tempTestDirectory);
st.setTrainingOutputDirectory(tempTrainingDirectory);
st.validate();
st.setTestSplitPct(50);
assertValidateException(st);
st = new SplitInput();
st.setTestRandomSelectionPct(50);
st.setTestOutputDirectory(tempTestDirectory);
st.setTrainingOutputDirectory(tempTrainingDirectory);
st.validate();
st.setTestSplitSize(100);
assertValidateException(st);
}
private class TestCallback implements SplitInput.SplitCallback {
private final int testSplitSize;
private final int trainingLines;
private TestCallback(int testSplitSize, int trainingLines) {
this.testSplitSize = testSplitSize;
this.trainingLines = trainingLines;
}
@Override
public void splitComplete(Path inputFile, int lineCount, int trainCount, int testCount, int testSplitStart) {
assertSplit(fs, tempInputFile, charset, testSplitSize, trainingLines, tempTrainingDirectory, tempTestDirectory);
}
}
private static void assertValidateException(SplitInput st) throws IOException {
try {
st.validate();
fail("Expected IllegalArgumentException");
} catch (IllegalArgumentException iae) {
// good
}
}
private static void assertSplit(FileSystem fs,
Path tempInputFile,
Charset charset,
int testSplitSize,
int trainingLines,
Path tempTrainingDirectory,
Path tempTestDirectory) {
try {
Path testFile = new Path(tempTestDirectory, tempInputFile.getName());
//assertTrue("test file exists", testFile.isFile());
assertEquals("test line count", testSplitSize, SplitInput.countLines(fs, testFile, charset));
Path trainingFile = new Path(tempTrainingDirectory, tempInputFile.getName());
//assertTrue("training file exists", trainingFile.isFile());
assertEquals("training line count", trainingLines, SplitInput.countLines(fs, trainingFile, charset));
} catch (IOException ioe) {
fail(ioe.toString());
}
}
}
| |
package ij.gui;
import ij.*;
import ij.plugin.Colors;
import ij.io.RoiDecoder;
import ij.process.*;
import ij.measure.*;
import ij.util.Tools;
import ij.plugin.filter.Analyzer;
import ij.text.TextWindow;
import java.awt.*;
import java.util.*;
import java.awt.event.*;
/** Displays a dialog that allows the user to specify ROI properties such as color and line width. */
public class RoiProperties implements TextListener, WindowListener {
private ImagePlus imp;
private Roi roi;
private Overlay overlay;
private String title;
private boolean showName = true;
private boolean showListCoordinates;
private boolean addToOverlay;
private boolean overlayOptions;
private boolean setPositions;
private boolean listCoordinates;
private boolean listProperties;
private boolean showPointCounts;
private static final String[] justNames = {"Left", "Center", "Right"};
private int nProperties;
private TextField groupField, colorField;
private Label groupName;
/** Constructs a ColorChooser using the specified title and initial color. */
public RoiProperties(String title, Roi roi) {
if (roi==null)
throw new IllegalArgumentException("ROI is null");
this.title = title;
showName = title.startsWith("Prop");
showListCoordinates = showName && title.endsWith(" ");
nProperties = showListCoordinates?roi.getPropertyCount():0;
addToOverlay = title.equals("Add to Overlay");
overlayOptions = title.equals("Overlay Options");
if (overlayOptions) {
imp = WindowManager.getCurrentImage();
overlay = imp!=null?imp.getOverlay():null;
setPositions = roi.getPosition()!=0;
}
this.roi = roi;
}
/** Displays the dialog box and returns 'false' if the user cancels it. */
public boolean showDialog() {
String name= roi.getName();
boolean isRange = name!=null && name.startsWith("range:");
String nameLabel = isRange?"Range:":"Name:";
if (isRange) name = name.substring(7);
if (name==null) name = "";
if (!isRange && (roi instanceof ImageRoi) && !overlayOptions)
return showImageDialog(name);
Color strokeColor = roi.getStrokeColor();
Color fillColor = roi.getFillColor();
double strokeWidth = roi.getStrokeWidth();
double strokeWidth2 = strokeWidth;
boolean isText = roi instanceof TextRoi;
boolean isLine = roi.isLine();
boolean isPoint = roi instanceof PointRoi;
int justification = TextRoi.LEFT;
double angle = 0.0;
boolean antialias = true;
if (isText) {
TextRoi troi = (TextRoi)roi;
Font font = troi.getCurrentFont();
strokeWidth = font.getSize();
angle = troi.getAngle();
justification = troi.getJustification();
antialias = troi.getAntiAlias();
}
String position = ""+roi.getPosition();
if (roi.hasHyperStackPosition())
position = roi.getCPosition() +","+roi.getZPosition()+","+ roi.getTPosition();
if (position.equals("0"))
position = "none";
String group = ""+roi.getGroup();
if (group.equals("0"))
group = "none";
String linec = Colors.colorToString(strokeColor);
String fillc = Colors.colorToString(fillColor);
if (IJ.isMacro()) {
fillc = "none";
setPositions = false;
}
int digits = (int)strokeWidth==strokeWidth?0:1;
GenericDialog gd = new GenericDialog(title);
if (showName) {
gd.addStringField(nameLabel, name, 20);
String label = "Position:";
ImagePlus imp = WindowManager.getCurrentImage();
if (position.contains(",") || (imp!=null&&imp.isHyperStack()))
label = "Position (c,s,f):";
gd.addStringField(label, position);
gd.addStringField("Group:", group);
gd.addToSameRow(); gd.addMessage("wwwwwwwwwwww");
}
if (isText) {
gd.addStringField("Stroke color:", linec);
gd.addNumericField("Font size:", strokeWidth, digits, 4, "points");
digits = (int)angle==angle?0:1;
gd.addNumericField("Angle:", angle, digits, 4, "degrees");
gd.setInsets(0, 0, 0);
gd.addChoice("Justification:", justNames, justNames[justification]);
} else {
if (isPoint)
gd.addStringField("Stroke (point) color:", linec);
else {
gd.addStringField("Stroke color:", linec);
gd.addNumericField("Width:", strokeWidth, digits);
}
}
groupName = (Label)gd.getMessage();
if (showName && !IJ.isMacro()) {
Vector v = gd.getStringFields();
groupField = (TextField)v.elementAt(v.size()-2);
groupField.addTextListener(this);
colorField = (TextField)v.elementAt(v.size()-1);
}
if (!isLine) {
if (isPoint) {
int index = ((PointRoi)roi).getPointType();
gd.addChoice("Point type:", PointRoi.types, PointRoi.types[index]);
index = ((PointRoi)roi).getSize();
gd.addChoice("Size:", PointRoi.sizes, PointRoi.sizes[index]);
} else {
gd.addMessage("");
gd.addStringField("Fill color:", fillc);
}
}
if (addToOverlay)
gd.addCheckbox("New overlay", false);
if (overlayOptions) {
gd.addCheckbox("Set stack positions", setPositions);
if (overlay!=null) {
int size = overlay.size();
gd.setInsets(15,20,0);
if (imp!=null && imp.getHideOverlay())
gd.addMessage("Current overlay is hidden", null, Color.darkGray);
else
gd.addMessage("Current overlay has "+size+" element"+(size>1?"s":""), null, Color.darkGray);
gd.setInsets(0,30,0);
gd.addCheckbox("Apply", false);
gd.setInsets(0,30,0);
gd.addCheckbox("Show labels", overlay.getDrawLabels());
gd.setInsets(0,30,0);
gd.addCheckbox("Hide", imp!=null?imp.getHideOverlay():false);
} else
gd.addMessage("No overlay", null, Color.darkGray);
}
if (isText)
gd.addCheckbox("Antialiased text", antialias);
if (showListCoordinates) {
if ((roi instanceof PointRoi) && Toolbar.getMultiPointMode())
showPointCounts = true;
if (showPointCounts)
gd.addCheckbox("Show point counts (shortcut: alt+y)", listCoordinates);
else
gd.addCheckbox("List coordinates ("+roi.size()+")", listCoordinates);
if (nProperties>0)
gd.addCheckbox("List properties ("+nProperties+")", listProperties);
else {
gd.setInsets(5,20,0);
gd.addMessage("No properties");
}
}
if (isText && !isRange) {
String text = ((TextRoi)roi).getText();
int nLines = Tools.split(text, "\n").length + 1;
gd.addTextAreas(text, null, Math.min(nLines+1, 5), 30);
}
if (showName && "".equals(name) && "none".equals(position) && "none".equals(group) && "none".equals(fillc))
gd.setSmartRecording(true);
gd.addWindowListener(this);
gd.showDialog();
if (gd.wasCanceled())
return false;
String position2 = "";
String group2 = "";
if (showName) {
name = gd.getNextString();
if (!isRange) roi.setName(name.length()>0?name:null);
position2 = gd.getNextString();
group2 = gd.getNextString();
}
linec = gd.getNextString();
if (!isPoint)
strokeWidth2 = gd.getNextNumber();
if (isText) {
angle = gd.getNextNumber();
justification = gd.getNextChoiceIndex();
}
if (!isLine) {
if (isPoint) {
int index = gd.getNextChoiceIndex();
((PointRoi)roi).setPointType(index);
index = gd.getNextChoiceIndex();
((PointRoi)roi).setSize(index);
} else
fillc = gd.getNextString();
}
boolean applyToOverlay = false;
boolean newOverlay = addToOverlay?gd.getNextBoolean():false;
if (overlayOptions) {
setPositions = gd.getNextBoolean();
if (overlay!=null) {
applyToOverlay = gd.getNextBoolean();
boolean labels = gd.getNextBoolean();
boolean hideOverlay = gd.getNextBoolean();
if (hideOverlay && imp!=null) {
if (!imp.getHideOverlay())
imp.setHideOverlay(true);
} else {
overlay.drawLabels(labels);
Analyzer.drawLabels(labels);
overlay.drawBackgrounds(true);
if (imp.getHideOverlay())
imp.setHideOverlay(false);
if (!applyToOverlay && imp!=null)
imp.draw();
}
}
roi.setPosition(setPositions?1:0);
}
if (isText)
antialias = gd.getNextBoolean();
if (showListCoordinates) {
listCoordinates = gd.getNextBoolean();
if (nProperties>0)
listProperties = gd.getNextBoolean();
}
strokeColor = Colors.decode(linec, null);
fillColor = Colors.decode(fillc, null);
if (isText) {
TextRoi troi = (TextRoi)roi;
Font font = troi.getCurrentFont();
if (strokeWidth2!=strokeWidth) {
font = new Font(font.getName(), font.getStyle(), (int)strokeWidth2);
troi.setCurrentFont(font);
}
troi.setAngle(angle);
if (justification!=troi.getJustification())
troi.setJustification(justification);
troi.setAntiAlias(antialias);
if (!isRange) troi.setText(gd.getNextText());
} else if (strokeWidth2!=strokeWidth)
roi.setStrokeWidth((float)strokeWidth2);
roi.setStrokeColor(strokeColor);
roi.setFillColor(fillColor);
if (showName) {
setPosition(roi, position, position2);
setGroup(roi, group, group2);
}
if (newOverlay) roi.setName("new-overlay");
if (applyToOverlay) {
if (imp==null || overlay==null)
return true;
Roi[] rois = overlay.toArray();
for (int i=0; i<rois.length; i++) {
rois[i].setStrokeColor(strokeColor);
if (strokeWidth2!=strokeWidth)
rois[i].setStrokeWidth((float)strokeWidth2);
rois[i].setFillColor(fillColor);
}
imp.draw();
imp.getProcessor(); // needed for correct recordering
}
if (listCoordinates) {
if (showPointCounts && (roi instanceof PointRoi))
((PointRoi)roi).displayCounts();
else
listCoordinates(roi);
}
if (listProperties && nProperties>0)
listProperties(roi);
return true;
}
private void setPosition(Roi roi, String pos1, String pos2) {
if (pos1.equals(pos2))
return;
if (pos2.equals("none") || pos2.equals("0")) {
roi.setPosition(0);
return;
}
String[] positions = Tools.split(pos2, " ,");
if (positions.length==1) {
double stackPos = Tools.parseDouble(positions[0]);
if (!Double.isNaN(stackPos))
roi.setPosition((int)stackPos);
return;
}
if (positions.length==3) {
int[] pos = new int[3];
for (int i=0; i<3; i++) {
double dpos = Tools.parseDouble(positions[i]);
if (Double.isNaN(dpos))
return;
else
pos[i] = (int)dpos;
}
roi.setPosition(pos[0], pos[1], pos[2]);
return;
}
}
private void setGroup(Roi roi, String group1, String group2) {
if (group1.equals(group2))
return;
if (group2.equals("none") || group2.equals("0")) {
roi.setGroup(0);
return;
}
double group = Tools.parseDouble(group2);
if (!Double.isNaN(group))
roi.setGroup((int)group);
}
public boolean showImageDialog(String name) {
ImageRoi iRoi = (ImageRoi)roi;
boolean zeroTransparent = iRoi.getZeroTransparent();
GenericDialog gd = new GenericDialog("Image ROI Properties");
gd.addStringField("Name:", name, 15);
gd.addNumericField("Opacity (0-100%):", iRoi.getOpacity()*100.0, 0);
gd.addCheckbox("Transparent background", zeroTransparent);
if (addToOverlay)
gd.addCheckbox("New Overlay", false);
gd.showDialog();
if (gd.wasCanceled())
return false;
name = gd.getNextString();
roi.setName(name.length()>0?name:null);
double opacity = gd.getNextNumber()/100.0;
iRoi.setOpacity(opacity);
boolean zeroTransparent2 = gd.getNextBoolean();
if (zeroTransparent!=zeroTransparent2)
iRoi.setZeroTransparent(zeroTransparent2);
boolean newOverlay = addToOverlay?gd.getNextBoolean():false;
if (newOverlay) roi.setName("new-overlay");
return true;
}
void listCoordinates(Roi roi) {
if (roi==null) return;
boolean allIntegers = true;
FloatPolygon fp = roi.getFloatPolygon();
ImagePlus imp = roi.getImage();
String title = "Coordinates";
if (imp!=null) {
Calibration cal = imp.getCalibration();
int height = imp.getHeight();
for (int i=0; i<fp.npoints; i++) {
fp.xpoints[i] = (float)cal.getX(fp.xpoints[i]);
fp.ypoints[i] = (float)cal.getY(fp.ypoints[i], height);
}
if (cal.pixelWidth!=1.0 || cal.pixelHeight!=1.0)
allIntegers = false;
title = imp.getTitle();
}
if (allIntegers) {
for (int i=0; i<fp.npoints; i++) {
if ((int)fp.xpoints[i]!=fp.xpoints[i] || (int)fp.ypoints[i]!=fp.ypoints[i]) {
allIntegers = false;
break;
}
}
}
ResultsTable rt = new ResultsTable();
rt.setPrecision(allIntegers?0:Analyzer.getPrecision());
for (int i=0; i<fp.npoints; i++) {
rt.incrementCounter();
rt.addValue("X", fp.xpoints[i]);
rt.addValue("Y", fp.ypoints[i]);
}
rt.show("XY_"+title);
}
void listProperties(Roi roi) {
String props = roi.getProperties();
if (props==null) return;
props = props.replaceAll(": ", "\t");
new TextWindow("Properties", "Key\tValue", props, 300, 300);
}
public void textValueChanged(TextEvent e) {
if (groupName==null)
return;
TextField tf = (TextField) e.getSource();
String str = tf.getText();
double group = Tools.parseDouble(str, Double.NaN);
if (!Double.isNaN(group) && group>=0 && group<=255) {
roi.setGroup((int)group);
String name = Roi.getGroupName((int)group);
if (name==null)
name="unnamed";
if (group==0)
name = "";
groupName.setText(" "+name);
Color strokeColor = roi.getStrokeColor();
colorField.setText(Colors.colorToString(strokeColor));
} else
groupName.setText("");
}
public void windowActivated(WindowEvent e) {
if (groupName!=null) {
String gname = Roi.getGroupName(roi.getGroup());
groupName.setText(gname!=null?" "+gname:""); // add space to separate label from field
}
}
public void windowClosing(WindowEvent e) {}
public void windowOpened(WindowEvent e) {}
public void windowClosed(WindowEvent e) {}
public void windowIconified(WindowEvent e) {}
public void windowDeiconified(WindowEvent e) {}
public void windowDeactivated(WindowEvent e) {}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.