gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package org.samples;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.io.StringWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.StreamingOutput;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.transform.stream.StreamResult;
@Path("/export")
public class ExportFile {
private static Root root;
static{
root = new Root();
}
@GET
@Path("/")
public Response service() throws Exception{
MyDataList customers = new MyDataList();
root.setCustomers(customers);
System.out.println("loading done.....");
final JAXBContext context = JAXBContext.newInstance(Root.class);
final Marshaller marshaller = context.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FRAGMENT, Boolean.TRUE);
// marshaller.
/*
StreamingOutput stream = new StreamingOutput() {
@Override
public void write(OutputStream os) throws IOException, WebApplicationException {
// final Writer writer = new BufferedWriter(new OutputStreamWriter(os));
final StreamResult result = new StreamResult( os );
// result.
try {
marshaller.marshal(root, result);
} catch (JAXBException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// writer.flush();
}
};*/
/* Executor executor = Executors.newSingleThreadExecutor();
final PipedOutputStream source = new PipedOutputStream();
final PipedInputStream sink = new PipedInputStream(source);
// ...
executor.execute(new Runnable() {
public void run() {
final StreamResult result = new StreamResult( source );
// output.setByteStream(source);
try {
marshaller.marshal(root, result);
source.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
}});*/
ResponseBuilder responseBuilder = Response.ok(root/*sink*/);
// responseBuilder.
responseBuilder.type(MediaType.APPLICATION_XML);
responseBuilder.status(200);
return responseBuilder.build();
}
public class MyDataList implements java.util.List<Customer>{
public static final int SIZE = 100000000;
private final Customer c;
public MyDataList() {
c = new Customer();
c.setId(1);
c.setAge(18);
c.setName("Customer_");
}
@Override
public int size() {
return SIZE;
}
@Override
public boolean isEmpty() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean contains(Object o) {
return false;
}
@Override
public Iterator<Customer> iterator() {
return new Iterator<Customer>(){
private int count;
@Override
public boolean hasNext() {
++count;
if(count%1000==0){
System.out.println("count=" + count);
}
return count < SIZE;
}
@Override
public Customer next() {
// TODO Auto-generated method stub
return c;
}
@Override
public void remove() {
// TODO Auto-generated method stub
}};
}
@Override
public Object[] toArray() {
return null;
}
@Override
public <T> T[] toArray(T[] a) {
return null;
}
@Override
public boolean add(Customer e) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean remove(Object o) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean containsAll(Collection<?> c) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean addAll(Collection<? extends Customer> c) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean addAll(int index, Collection<? extends Customer> c) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean removeAll(Collection<?> c) {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean retainAll(Collection<?> c) {
// TODO Auto-generated method stub
return false;
}
@Override
public void clear() {
// TODO Auto-generated method stub
}
@Override
public Customer get(int index) {
// TODO Auto-generated method stub
return c;
}
@Override
public Customer set(int index, Customer element) {
// TODO Auto-generated method stub
return null;
}
@Override
public void add(int index, Customer element) {
// TODO Auto-generated method stub
}
@Override
public Customer remove(int index) {
// TODO Auto-generated method stub
return null;
}
@Override
public int indexOf(Object o) {
// TODO Auto-generated method stub
return 0;
}
@Override
public int lastIndexOf(Object o) {
// TODO Auto-generated method stub
return 0;
}
@Override
public ListIterator<Customer> listIterator() {
// TODO Auto-generated method stub
return null;
}
@Override
public ListIterator<Customer> listIterator(int index) {
// TODO Auto-generated method stub
return null;
}
@Override
public List<Customer> subList(int fromIndex, int toIndex) {
// TODO Auto-generated method stub
return null;
}
}
}
| |
/**
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.insight.camel.audit;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.api.management.ManagedResource;
import org.apache.camel.management.event.AbstractExchangeEvent;
import org.apache.camel.management.event.ExchangeSendingEvent;
import org.apache.camel.spi.EventNotifier;
import org.fusesource.insight.camel.base.SwitchableContainerStrategy;
import org.fusesource.insight.storage.StorageService;
import org.mvel2.ParserContext;
import org.mvel2.templates.CompiledTemplate;
import org.mvel2.templates.TemplateCompiler;
import org.mvel2.templates.TemplateRuntime;
import org.osgi.service.cm.ConfigurationException;
import org.osgi.service.cm.ManagedService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URL;
import java.util.Date;
import java.util.Dictionary;
import java.util.EventObject;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
*
*/
@ManagedResource(description = "Auditor")
public class Auditor extends SwitchableContainerStrategy implements EventNotifier, ManagedService, AuditorMBean {
private static final Logger LOGGER = LoggerFactory.getLogger(Auditor.class);
private StorageService storage;
private String type = "camel";
private boolean ignoreExchangeCreatedEvent;
private boolean ignoreExchangeCompletedEvent;
private boolean ignoreExchangeFailedEvents;
private boolean ignoreExchangeRedeliveryEvents;
private boolean ignoreExchangeSendingEvents;
private boolean ignoreExchangeSentEvents;
private Dictionary<String, ?> properties;
private ParserContext context;
private Map<String, CompiledTemplate> templates = new ConcurrentHashMap<String, CompiledTemplate>();
private Map<URL, String> sources = new ConcurrentHashMap<URL, String>();
private URL defaultTemplateUrl = getClass().getResource("default.mvel");
public Auditor() {
this(null);
}
public Auditor(StorageService storage) {
super(false);
this.storage = storage;
context = new ParserContext();
try {
context.addImport("toJson", ScriptUtils.class.getMethod("toJson", Object.class));
} catch (NoSuchMethodException e) {
throw new IllegalStateException("Unable to find method toJson", e);
}
}
@Override
public void manage(CamelContext context) throws Exception {
context.getManagementStrategy().addEventNotifier(this);
}
public StorageService getStorage() {
return storage;
}
public void setStorage(StorageService storage) {
this.storage = storage;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
@Override
public void updated(Dictionary<String, ?> properties) throws ConfigurationException {
this.properties = properties;
}
@Override
public void notify(EventObject eventObject) throws Exception {
if (eventObject instanceof AbstractExchangeEvent) {
AbstractExchangeEvent aee = (AbstractExchangeEvent) eventObject;
if (isEnabled(aee.getExchange())) {
if (aee instanceof ExchangeSendingEvent) {
aee.getExchange().getIn().setHeader("AuditCallId", aee.getExchange().getContext().getUuidGenerator().generateUuid());
}
String json = toJson(aee);
storage.store(type, System.currentTimeMillis(), json);
}
}
}
protected String toJson(AbstractExchangeEvent event) {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(Auditor.class.getClassLoader());
String eventType = event.getClass().getSimpleName();
eventType = eventType.substring("Exchange".length());
eventType = eventType.substring(0, eventType.length() - "Event".length());
CompiledTemplate template = getTemplate(eventType, event.getExchange());
Map<String, Object> vars = new HashMap<String, Object>();
vars.put("event", eventType);
vars.put("host", System.getProperty("karaf.name"));
vars.put("timestamp", new Date());
vars.put("exchange", event.getExchange());
return TemplateRuntime.execute(template, context, vars).toString();
} finally {
Thread.currentThread().setContextClassLoader(cl);
}
}
private CompiledTemplate getTemplate(String event, Exchange exchange) {
String source = getTemplateSource(event, exchange);
CompiledTemplate template = templates.get(source);
if (template == null) {
template = TemplateCompiler.compileTemplate(source, context);
templates.put(source, template);
}
return template;
}
private String getTemplateSource(String event, Exchange exchange) {
String source = null;
URL url = getTemplateUrl(event, exchange);
if (url != null) {
try {
source = loadSource(url);
} catch (IOException e) {
LOGGER.warn("Unable to load mvel template " + url, e);
}
}
if (source == null) {
try {
source = loadSource(defaultTemplateUrl);
} catch (IOException e) {
throw new IllegalStateException("Default template could not be loaded", e);
}
}
return source;
}
private URL getTemplateUrl(String event, Exchange exchange) {
return null;
}
private String loadSource(URL url) throws IOException {
String source = sources.get(url);
if (source == null) {
source = IoUtils.loadFully(url);
sources.put(url, source);
}
return source;
}
@Override
public boolean isEnabled(EventObject eventObject) {
return true;
}
public boolean isIgnoreCamelContextEvents() {
return true;
}
public void setIgnoreCamelContextEvents(boolean ignoreCamelContextEvents) {
}
public boolean isIgnoreRouteEvents() {
return true;
}
public void setIgnoreRouteEvents(boolean ignoreRouteEvents) {
}
public boolean isIgnoreServiceEvents() {
return true;
}
public void setIgnoreServiceEvents(boolean ignoreServiceEvents) {
}
public boolean isIgnoreExchangeEvents() {
return false;
}
public void setIgnoreExchangeEvents(boolean ignoreExchangeEvents) {
}
public boolean isIgnoreExchangeCreatedEvent() {
return ignoreExchangeCreatedEvent;
}
public void setIgnoreExchangeCreatedEvent(boolean ignoreExchangeCreatedEvent) {
this.ignoreExchangeCreatedEvent = ignoreExchangeCreatedEvent;
}
public boolean isIgnoreExchangeCompletedEvent() {
return ignoreExchangeCompletedEvent;
}
public void setIgnoreExchangeCompletedEvent(boolean ignoreExchangeCompletedEvent) {
this.ignoreExchangeCompletedEvent = ignoreExchangeCompletedEvent;
}
public boolean isIgnoreExchangeFailedEvents() {
return ignoreExchangeFailedEvents;
}
public void setIgnoreExchangeFailedEvents(boolean ignoreExchangeFailedEvents) {
this.ignoreExchangeFailedEvents = ignoreExchangeFailedEvents;
}
public boolean isIgnoreExchangeRedeliveryEvents() {
return ignoreExchangeRedeliveryEvents;
}
public void setIgnoreExchangeRedeliveryEvents(boolean ignoreExchangeRedeliveryEvents) {
this.ignoreExchangeRedeliveryEvents = ignoreExchangeRedeliveryEvents;
}
public boolean isIgnoreExchangeSendingEvents() {
return ignoreExchangeSendingEvents;
}
public void setIgnoreExchangeSendingEvents(boolean ignoreExchangeSendingEvents) {
this.ignoreExchangeSendingEvents = ignoreExchangeSendingEvents;
}
public boolean isIgnoreExchangeSentEvents() {
return ignoreExchangeSentEvents;
}
public void setIgnoreExchangeSentEvents(boolean ignoreExchangeSentEvents) {
this.ignoreExchangeSentEvents = ignoreExchangeSentEvents;
}
}
| |
/*
* Copyright (c) 2008-2014 LabKey Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.labkey.remoteapi.query;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.labkey.remoteapi.CommandResponse;
import org.labkey.remoteapi.ResponseObject;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/*
* User: Dave
* Date: Oct 21, 2008
* Time: 3:06:00 PM
*/
/**
* Represents the response from a GetQueriesCommand.
*/
public class GetQueryDetailsResponse extends CommandResponse
{
public GetQueryDetailsResponse(String text, int statusCode, String contentType, JSONObject json, GetQueryDetailsCommand sourceCommand)
{
super(text, statusCode, contentType, json, sourceCommand);
}
/**
* Returns the schema name the query in this response belongs to.
* @return The schema name.
*/
public String getSchemaName()
{
return getProperty("schemaName");
}
/**
* Returns the query name that is described by this response
* @return The query name.
*/
public String getName()
{
return getProperty("name");
}
/**
* Returns the description of the query described by this response
* @return The query description.
*/
public String getDescription()
{
return getProperty("description");
}
/**
* Returns the title of the query described by this response (often the same as the name, but not always)
* @return The query title.
*/
public String getTitle()
{
return getProperty("title");
}
/**
* Returns the columns available in the given query name.
* @return The list of columns available within that query.
*/
@SuppressWarnings("unchecked")
public List<Column> getColumns()
{
List<Column> result = new ArrayList<Column>();
JSONArray columns = getProperty("columns");
for (int i = 0; i < columns.size(); i++)
{
result.add(new Column((Map<String, Object>)columns.get(i)));
}
return result;
}
public static class Column extends ResponseObject
{
private Column(Map<String, Object> json)
{
super(json);
}
/** @return The name of the column */
public String getName()
{
return (String) _allProperties.get("name");
}
/** @return An optional description of the column */
public String getDescription()
{
return (String) _allProperties.get("description");
}
/** @return The column's data type */
public String getType()
{
return (String) _allProperties.get("type");
}
/** @return The field key for the column. If this column comes from a foreign table, the key is a full path from the source query to this column. */
public String getFieldKey()
{
return (String) _allProperties.get("fieldKey");
}
/** @return true if this column is auto-increment */
public boolean isAutoIncrement()
{
return (Boolean) _allProperties.get("isAutoIncrement");
}
/** @return true if this column should be hidden */
public boolean isHidden()
{
return (Boolean) _allProperties.get("isHidden");
}
/** @return true if this is part of the primary key */
public boolean isKeyField()
{
return (Boolean) _allProperties.get("isKeyField");
}
/** @return true if this column is missing-value enabled */
public boolean isMvEnabled()
{
return (Boolean) _allProperties.get("isMvEnabled");
}
/** @return true if this column can accept nulls */
public boolean isNullable()
{
return (Boolean) _allProperties.get("isNullable");
}
/** @return true if this column is read-only */
public boolean isReadOnly()
{
return (Boolean) _allProperties.get("isReadOnly");
}
/** @return true if this column may be edited by the current user */
public boolean isUserEditable()
{
return (Boolean) _allProperties.get("isUserEditable");
}
/** @return true if this column is a version column */
public boolean isVersionField()
{
return (Boolean) _allProperties.get("isVersionField");
}
/** @return true if this column may be selected */
public boolean isSelectable()
{
return (Boolean) _allProperties.get("isSelectable");
}
/** @return The user-friendly caption for this column (may differ from name) */
public String getCaption()
{
return (String) _allProperties.get("caption");
}
public Lookup getLookup()
{
return _allProperties.get("lookup") == null ? null : new Lookup((Map<String, Object>)_allProperties.get("lookup"));
}
public Map<String, Object> getProperties()
{
return _allProperties;
}
public boolean isCalculated()
{
return (Boolean) _allProperties.get("calculated");
}
}
public static class Lookup extends ResponseObject
{
public Lookup(Map<String, Object> json)
{
super(json);
}
/** @return The schema in which the lookup query exists */
public String getSchemaName()
{
return (String)_allProperties.get("schemaName");
}
/** @return The name of the lookup query in that schema */
public String getQueryName()
{
return (String)_allProperties.get("queryName");
}
/** @return The container path if the lookup is defined in a different container */
public String getContainerPath()
{
return (String)_allProperties.get("containerPath");
}
/** @return The column that is normally displayed form the lookup table */
public String getDisplayColumn()
{
return (String)_allProperties.get("displayColumn");
}
/** @return The primary key column of the lookup table */
public String getKeyColumn()
{
return (String)_allProperties.get("keyColumn");
}
/** @return true if the lookup table is public (i.e., may be accessed via the API) */
public boolean isPublic()
{
return (Boolean)_allProperties.get("isPublic");
}
}
}
| |
/*
* Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xerces.internal.xpointer;
import java.util.Hashtable;
import java.util.Vector;
import com.sun.org.apache.xerces.internal.impl.Constants;
import com.sun.org.apache.xerces.internal.impl.XMLErrorReporter;
import com.sun.org.apache.xerces.internal.util.SymbolTable;
import com.sun.org.apache.xerces.internal.util.XMLChar;
import com.sun.org.apache.xerces.internal.util.XMLSymbols;
import com.sun.org.apache.xerces.internal.xinclude.XIncludeHandler;
import com.sun.org.apache.xerces.internal.xinclude.XIncludeNamespaceSupport;
import com.sun.org.apache.xerces.internal.xni.Augmentations;
import com.sun.org.apache.xerces.internal.xni.QName;
import com.sun.org.apache.xerces.internal.xni.XMLAttributes;
import com.sun.org.apache.xerces.internal.xni.XMLString;
import com.sun.org.apache.xerces.internal.xni.XNIException;
import com.sun.org.apache.xerces.internal.xni.parser.XMLConfigurationException;
import com.sun.org.apache.xerces.internal.xni.parser.XMLErrorHandler;
/**
* <p>
* This is a pipeline component which extends the XIncludeHandler to perform
* XPointer specific processing specified in the W3C XPointerFramework and
* element() Scheme Recommendations.
* </p>
*
* <p>
* This component analyzes each event in the pipeline, looking for an element
* that matches a PointerPart in the parent XInclude element's xpointer attribute
* value. If the match succeeds, all children are passed by this component.
* </p>
*
* <p>
* See the <a href="http://www.w3.org/TR/xptr-framework//">XPointer Framework Recommendation</a> for
* more information on the XPointer Framework and ShortHand Pointers.
* See the <a href="http://www.w3.org/TR/xptr-element/">XPointer element() Scheme Recommendation</a> for
* more information on the XPointer element() Scheme.
* </p>
*
* @xerces.internal
*
*/
public final class XPointerHandler extends XIncludeHandler implements
XPointerProcessor {
// Fields
// A Vector of XPointerParts
protected Vector fXPointerParts = null;
// The current XPointerPart
protected XPointerPart fXPointerPart = null;
// Has the fXPointerPart resolved successfully
protected boolean fFoundMatchingPtrPart = false;
// The XPointer Error reporter
protected XMLErrorReporter fXPointerErrorReporter;
// The XPointer Error Handler
protected XMLErrorHandler fErrorHandler;
// XPointerFramework symbol table
protected SymbolTable fSymbolTable = null;
// Supported schemes
private final String ELEMENT_SCHEME_NAME = "element";
// Has the XPointer resolved the subresource
protected boolean fIsXPointerResolved = false;
// Fixup xml:base and xml:lang attributes
protected boolean fFixupBase = false;
protected boolean fFixupLang = false;
// ************************************************************************
// Constructors
// ************************************************************************
/**
*
*/
public XPointerHandler() {
super();
fXPointerParts = new Vector();
fSymbolTable = new SymbolTable();
}
public XPointerHandler(SymbolTable symbolTable,
XMLErrorHandler errorHandler, XMLErrorReporter errorReporter) {
super();
fXPointerParts = new Vector();
fSymbolTable = symbolTable;
fErrorHandler = errorHandler;
fXPointerErrorReporter = errorReporter;
//fErrorReporter = errorReporter; // The XInclude ErrorReporter
}
// ************************************************************************
// Implementation of the XPointerProcessor interface.
// ************************************************************************
/**
* Parses the XPointer framework expression and delegates scheme specific parsing.
*
* @see com.sun.org.apache.xerces.internal.xpointer.XPointerProcessor#parseXPointer(java.lang.String)
*/
public void parseXPointer(String xpointer) throws XNIException {
// Initialize
init();
// tokens
final Tokens tokens = new Tokens(fSymbolTable);
// scanner
Scanner scanner = new Scanner(fSymbolTable) {
protected void addToken(Tokens tokens, int token)
throws XNIException {
if (token == Tokens.XPTRTOKEN_OPEN_PAREN
|| token == Tokens.XPTRTOKEN_CLOSE_PAREN
|| token == Tokens.XPTRTOKEN_SCHEMENAME
|| token == Tokens.XPTRTOKEN_SCHEMEDATA
|| token == Tokens.XPTRTOKEN_SHORTHAND) {
super.addToken(tokens, token);
return;
}
reportError("InvalidXPointerToken", new Object[] { tokens
.getTokenString(token) });
}
};
// scan the XPointer expression
int length = xpointer.length();
boolean success = scanner.scanExpr(fSymbolTable, tokens, xpointer, 0,
length);
if (!success)
reportError("InvalidXPointerExpression", new Object[] { xpointer });
while (tokens.hasMore()) {
int token = tokens.nextToken();
switch (token) {
case Tokens.XPTRTOKEN_SHORTHAND: {
// The shortHand name
token = tokens.nextToken();
String shortHandPointerName = tokens.getTokenString(token);
if (shortHandPointerName == null) {
reportError("InvalidXPointerExpression",
new Object[] { xpointer });
}
XPointerPart shortHandPointer = new ShortHandPointer(
fSymbolTable);
shortHandPointer.setSchemeName(shortHandPointerName);
fXPointerParts.add(shortHandPointer);
break;
}
case Tokens.XPTRTOKEN_SCHEMENAME: {
// Retreive the local name and prefix to form the scheme name
token = tokens.nextToken();
String prefix = tokens.getTokenString(token);
token = tokens.nextToken();
String localName = tokens.getTokenString(token);
String schemeName = prefix + localName;
// The next character should be an open parenthesis
int openParenCount = 0;
int closeParenCount = 0;
token = tokens.nextToken();
String openParen = tokens.getTokenString(token);
if (openParen != "XPTRTOKEN_OPEN_PAREN") {
// can not have more than one ShortHand Pointer
if (token == Tokens.XPTRTOKEN_SHORTHAND) {
reportError("MultipleShortHandPointers",
new Object[] { xpointer });
} else {
reportError("InvalidXPointerExpression",
new Object[] { xpointer });
}
}
openParenCount++;
// followed by zero or more ( and the schemeData
String schemeData = null;
while (tokens.hasMore()) {
token = tokens.nextToken();
schemeData = tokens.getTokenString(token);
if (schemeData != "XPTRTOKEN_OPEN_PAREN") {
break;
}
openParenCount++;
}
token = tokens.nextToken();
schemeData = tokens.getTokenString(token);
// followed by the same number of )
token = tokens.nextToken();
String closeParen = tokens.getTokenString(token);
if (closeParen != "XPTRTOKEN_CLOSE_PAREN") {
reportError("SchemeDataNotFollowedByCloseParenthesis",
new Object[] { xpointer });
}
closeParenCount++;
while (tokens.hasMore()) {
if (tokens.getTokenString(tokens.peekToken()) != "XPTRTOKEN_OPEN_PAREN") {
break;
}
closeParenCount++;
}
// check if the number of open parenthesis are equal to the number of close parenthesis
if (openParenCount != closeParenCount) {
reportError("UnbalancedParenthesisInXPointerExpression",
new Object[] { xpointer,
new Integer(openParenCount),
new Integer(closeParenCount) });
}
// Perform scheme specific parsing of the pointer part
if (schemeName.equals(ELEMENT_SCHEME_NAME)) {
XPointerPart elementSchemePointer = new ElementSchemePointer(
fSymbolTable, fErrorReporter);
elementSchemePointer.setSchemeName(schemeName);
elementSchemePointer.setSchemeData(schemeData);
// If an exception occurs while parsing the element() scheme expression
// ignore it and move on to the next pointer part
try {
elementSchemePointer.parseXPointer(schemeData);
fXPointerParts.add(elementSchemePointer);
} catch (XNIException e) {
// Re-throw the XPointer element() scheme syntax error.
throw new XNIException (e);
}
} else {
// ????
reportWarning("SchemeUnsupported",
new Object[] { schemeName });
}
break;
}
default:
reportError("InvalidXPointerExpression",
new Object[] { xpointer });
}
}
}
/**
*
* @see com.sun.org.apache.xerces.internal.xpointer.XPointerProcessor#resolveXPointer(com.sun.org.apache.xerces.internal.xni.QName, com.sun.org.apache.xerces.internal.xni.XMLAttributes, com.sun.org.apache.xerces.internal.xni.Augmentations, int event)
*/
public boolean resolveXPointer(QName element, XMLAttributes attributes,
Augmentations augs, int event) throws XNIException {
boolean resolved = false;
// The result of the first pointer part whose evaluation identifies
// one or more subresources is reported by the XPointer processor as the
// result of the pointer as a whole, and evaluation stops.
// In our implementation, typically the first xpointer scheme that
// matches an element is the document is considered.
// If the pointer part resolved then use it, else search for the fragment
// using next pointer part from lef-right.
if (!fFoundMatchingPtrPart) {
// for each element, attempt to resolve it against each pointer part
// in the XPointer expression until a matching element is found.
for (int i = 0; i < fXPointerParts.size(); i++) {
fXPointerPart = (XPointerPart) fXPointerParts.get(i);
if (fXPointerPart.resolveXPointer(element, attributes, augs,
event)) {
fFoundMatchingPtrPart = true;
resolved = true;
}
}
} else {
if (fXPointerPart.resolveXPointer(element, attributes, augs, event)) {
resolved = true;
}
}
if (!fIsXPointerResolved) {
fIsXPointerResolved = resolved;
}
return resolved;
}
/**
* Returns true if the Node fragment is resolved.
*
* @see com.sun.org.apache.xerces.internal.xpointer.XPointerProcessor#isFragmentResolved()
*/
public boolean isFragmentResolved() throws XNIException {
boolean resolved = (fXPointerPart != null) ? fXPointerPart.isFragmentResolved()
: false;
if (!fIsXPointerResolved) {
fIsXPointerResolved = resolved;
}
return resolved;
}
/**
* Returns true if the XPointer expression resolves to a non-element child
* of the current resource fragment.
*
* @see com.sun.org.apache.xerces.internal.xpointer.XPointerPart#isChildFragmentResolved()
*
*/
public boolean isChildFragmentResolved() throws XNIException {
boolean resolved = (fXPointerPart != null) ? fXPointerPart
.isChildFragmentResolved() : false;
return resolved;
}
/**
* Returns true if the XPointer successfully found a sub-resource .
*
* @see com.sun.org.apache.xerces.internal.xpointer.XPointerProcessor#isFragmentResolved()
*/
public boolean isXPointerResolved() throws XNIException {
return fIsXPointerResolved;
}
/**
* Returns the pointer part used to resolve the document fragment.
*
* @return String - The pointer part used to resolve the document fragment.
*/
public XPointerPart getXPointerPart() {
return fXPointerPart;
}
/**
* Reports XPointer Errors
*
*/
private void reportError(String key, Object[] arguments)
throws XNIException {
/*
fXPointerErrorReporter.reportError(
XPointerMessageFormatter.XPOINTER_DOMAIN, key, arguments,
XMLErrorReporter.SEVERITY_ERROR);
*/
throw new XNIException((fErrorReporter
.getMessageFormatter(XPointerMessageFormatter.XPOINTER_DOMAIN))
.formatMessage(fErrorReporter.getLocale(), key, arguments));
}
/**
* Reports XPointer Warnings
*
*/
private void reportWarning(String key, Object[] arguments)
throws XNIException {
fXPointerErrorReporter.reportError(
XPointerMessageFormatter.XPOINTER_DOMAIN, key, arguments,
XMLErrorReporter.SEVERITY_WARNING);
}
/**
* Initializes error handling objects
*
*/
protected void initErrorReporter() {
if (fXPointerErrorReporter == null) {
fXPointerErrorReporter = new XMLErrorReporter();
}
if (fErrorHandler == null) {
fErrorHandler = new XPointerErrorHandler();
}
/*
fXPointerErrorReporter.setProperty(Constants.XERCES_PROPERTY_PREFIX
+ Constants.ERROR_HANDLER_PROPERTY, fErrorHandler);
*/
fXPointerErrorReporter.putMessageFormatter(
XPointerMessageFormatter.XPOINTER_DOMAIN,
new XPointerMessageFormatter());
}
/**
* Initializes the XPointer Processor;
*/
protected void init() {
fXPointerParts.clear();
fXPointerPart = null;
fFoundMatchingPtrPart = false;
fIsXPointerResolved = false;
//fFixupBase = false;
//fFixupLang = false;
initErrorReporter();
}
/**
* Returns a Vector of XPointerPart objects
*
* @return A Vector of XPointerPart objects.
*/
public Vector getPointerParts() {
return fXPointerParts;
}
/**
* List of XPointer Framework tokens.
*
* @xerces.internal
*
*/
private final class Tokens {
/**
* XPointer Framework tokens
* [1] Pointer ::= Shorthand | SchemeBased
* [2] Shorthand ::= NCName
* [3] SchemeBased ::= PointerPart (S? PointerPart)*
* [4] PointerPart ::= SchemeName '(' SchemeData ')'
* [5] SchemeName ::= QName
* [6] SchemeData ::= EscapedData*
* [7] EscapedData ::= NormalChar | '^(' | '^)' | '^^' | '(' SchemeData ')'
* [8] NormalChar ::= UnicodeChar - [()^]
* [9] UnicodeChar ::= [#x0-#x10FFFF]
*
*/
private static final int XPTRTOKEN_OPEN_PAREN = 0,
XPTRTOKEN_CLOSE_PAREN = 1, XPTRTOKEN_SHORTHAND = 2,
XPTRTOKEN_SCHEMENAME = 3, XPTRTOKEN_SCHEMEDATA = 4;
// Token names
private final String[] fgTokenNames = { "XPTRTOKEN_OPEN_PAREN",
"XPTRTOKEN_CLOSE_PAREN", "XPTRTOKEN_SHORTHAND",
"XPTRTOKEN_SCHEMENAME", "XPTRTOKEN_SCHEMEDATA" };
// Token count
private static final int INITIAL_TOKEN_COUNT = 1 << 8;
private int[] fTokens = new int[INITIAL_TOKEN_COUNT];
private int fTokenCount = 0;
// Current token position
private int fCurrentTokenIndex;
private SymbolTable fSymbolTable;
private Hashtable fTokenNames = new Hashtable();
/**
* Constructor
*
* @param symbolTable SymbolTable
*/
private Tokens(SymbolTable symbolTable) {
fSymbolTable = symbolTable;
fTokenNames.put(new Integer(XPTRTOKEN_OPEN_PAREN),
"XPTRTOKEN_OPEN_PAREN");
fTokenNames.put(new Integer(XPTRTOKEN_CLOSE_PAREN),
"XPTRTOKEN_CLOSE_PAREN");
fTokenNames.put(new Integer(XPTRTOKEN_SHORTHAND),
"XPTRTOKEN_SHORTHAND");
fTokenNames.put(new Integer(XPTRTOKEN_SCHEMENAME),
"XPTRTOKEN_SCHEMENAME");
fTokenNames.put(new Integer(XPTRTOKEN_SCHEMEDATA),
"XPTRTOKEN_SCHEMEDATA");
}
/**
* Returns the token String
* @param token The index of the token
* @return String The token string
*/
private String getTokenString(int token) {
return (String) fTokenNames.get(new Integer(token));
}
/**
* Add the specified string as a token
*
* @param token The token string
*/
private void addToken(String tokenStr) {
Integer tokenInt = (Integer) fTokenNames.get(tokenStr);
if (tokenInt == null) {
tokenInt = new Integer(fTokenNames.size());
fTokenNames.put(tokenInt, tokenStr);
}
addToken(tokenInt.intValue());
}
/**
* Add the specified int token
*
* @param token The int specifying the token
*/
private void addToken(int token) {
try {
fTokens[fTokenCount] = token;
} catch (ArrayIndexOutOfBoundsException ex) {
int[] oldList = fTokens;
fTokens = new int[fTokenCount << 1];
System.arraycopy(oldList, 0, fTokens, 0, fTokenCount);
fTokens[fTokenCount] = token;
}
fTokenCount++;
}
/**
* Resets the current position to the head of the token list.
*/
private void rewind() {
fCurrentTokenIndex = 0;
}
/**
* Returns true if the {@link #getNextToken()} method
* returns a valid token.
*/
private boolean hasMore() {
return fCurrentTokenIndex < fTokenCount;
}
/**
* Obtains the token at the current position, then advance
* the current position by one.
*
* throws If there's no such next token, this method throws
* <tt>new XNIException("XPointerProcessingError");</tt>.
*/
private int nextToken() throws XNIException {
if (fCurrentTokenIndex == fTokenCount) {
reportError("XPointerProcessingError", null);
}
return fTokens[fCurrentTokenIndex++];
}
/**
* Obtains the token at the current position, without advancing
* the current position.
*
* If there's no such next token, this method throws
* <tt>new XNIException("XPointerProcessingError");</tt>.
*/
private int peekToken() throws XNIException {
if (fCurrentTokenIndex == fTokenCount) {
reportError("XPointerProcessingError", null);
}
return fTokens[fCurrentTokenIndex];
}
/**
* Obtains the token at the current position as a String.
*
* If there's no current token or if the current token
* is not a string token, this method throws
* If there's no such next token, this method throws
* <tt>new XNIException("XPointerProcessingError");</tt>.
*/
private String nextTokenAsString() throws XNIException {
String tokenStrint = getTokenString(nextToken());
if (tokenStrint == null) {
reportError("XPointerProcessingError", null);
}
return tokenStrint;
}
}
/**
*
* The XPointer expression scanner. Scans the XPointer framework expression.
*
* @xerces.internal
*
*/
private class Scanner {
/**
* 7-bit ASCII subset
*
* 0 1 2 3 4 5 6 7 8 9 A B C D E F
* 0, 0, 0, 0, 0, 0, 0, 0, 0, HT, LF, 0, 0, CR, 0, 0, // 0
* 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 1
* SP, !, ", #, $, %, &, ', (, ), *, +, ,, -, ., /, // 2
* 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, :, ;, <, =, >, ?, // 3
* @, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, // 4
* P, Q, R, S, T, U, V, W, X, Y, Z, [, \, ], ^, _, // 5
* `, a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, // 6
* p, q, r, s, t, u, v, w, x, y, z, {, |, }, ~, DEL // 7
*/
private static final byte CHARTYPE_INVALID = 0, // invalid XML character
CHARTYPE_OTHER = 1, // not special - one of "#%&;?\`{}~" or DEL
CHARTYPE_WHITESPACE = 2, // one of "\t\n\r " (0x09, 0x0A, 0x0D, 0x20)
CHARTYPE_CARRET = 3, // ^
CHARTYPE_OPEN_PAREN = 4, // '(' (0x28)
CHARTYPE_CLOSE_PAREN = 5, // ')' (0x29)
CHARTYPE_MINUS = 6, // '-' (0x2D)
CHARTYPE_PERIOD = 7, // '.' (0x2E)
CHARTYPE_SLASH = 8, // '/' (0x2F)
CHARTYPE_DIGIT = 9, // '0'-'9' (0x30 to 0x39)
CHARTYPE_COLON = 10, // ':' (0x3A)
CHARTYPE_EQUAL = 11, // '=' (0x3D)
CHARTYPE_LETTER = 12, // 'A'-'Z' or 'a'-'z' (0x41 to 0x5A and 0x61 to 0x7A)
CHARTYPE_UNDERSCORE = 13, // '_' (0x5F)
CHARTYPE_NONASCII = 14; // Non-ASCII Unicode codepoint (>= 0x80)
private final byte[] fASCIICharMap = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2,
0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
2, 1, 1, 1, 1, 1, 1, 1, 4, 5, 1, 1, 1, 6, 7, 8, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 10, 1, 1, 11, 1, 1, 1, 12, 12, 12, 12, 12, 12,
12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
12, 12, 12, 12, 1, 1, 1, 3, 13, 1, 12, 12, 12, 12, 12, 12, 12,
12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
12, 12, 12, 1, 1, 1, 1, 1 };
//
// Data
//
/** Symbol table. */
private SymbolTable fSymbolTable;
/**
* Constructs an XPointer Framework expression scanner.
*
* @param symbolTable SymbolTable
*/
private Scanner(SymbolTable symbolTable) {
// save pool and tokens
fSymbolTable = symbolTable;
} // <init>(SymbolTable)
/**
* Scans the XPointer Expression
*
*/
private boolean scanExpr(SymbolTable symbolTable, Tokens tokens,
String data, int currentOffset, int endOffset)
throws XNIException {
int ch;
int openParen = 0;
int closeParen = 0;
int nameOffset, dataOffset;
boolean isQName = false;
String name = null;
String prefix = null;
String schemeData = null;
StringBuffer schemeDataBuff = new StringBuffer();
while (true) {
if (currentOffset == endOffset) {
break;
}
ch = data.charAt(currentOffset);
//
while (ch == ' ' || ch == 0x0A || ch == 0x09 || ch == 0x0D) {
if (++currentOffset == endOffset) {
break;
}
ch = data.charAt(currentOffset);
}
if (currentOffset == endOffset) {
break;
}
//
// [1] Pointer ::= Shorthand | SchemeBased
// [2] Shorthand ::= NCName
// [3] SchemeBased ::= PointerPart (S? PointerPart)*
// [4] PointerPart ::= SchemeName '(' SchemeData ')'
// [5] SchemeName ::= QName
// [6] SchemeData ::= EscapedData*
// [7] EscapedData ::= NormalChar | '^(' | '^)' | '^^' | '(' SchemeData ')'
// [8] NormalChar ::= UnicodeChar - [()^]
// [9] UnicodeChar ::= [#x0-#x10FFFF]
// [?] QName ::= (NCName ':')? NCName
// [?] NCName ::= (Letter | '_') (NCNameChar)*
// [?] NCNameChar ::= Letter | Digit | '.' | '-' | '_' (ascii subset of 'NCNameChar')
// [?] Letter ::= [A-Za-z] (ascii subset of 'Letter')
// [?] Digit ::= [0-9] (ascii subset of 'Digit')
//
byte chartype = (ch >= 0x80) ? CHARTYPE_NONASCII
: fASCIICharMap[ch];
switch (chartype) {
case CHARTYPE_OPEN_PAREN: // '('
addToken(tokens, Tokens.XPTRTOKEN_OPEN_PAREN);
openParen++;
++currentOffset;
break;
case CHARTYPE_CLOSE_PAREN: // ')'
addToken(tokens, Tokens.XPTRTOKEN_CLOSE_PAREN);
closeParen++;
++currentOffset;
break;
case CHARTYPE_CARRET:
case CHARTYPE_COLON:
case CHARTYPE_DIGIT:
case CHARTYPE_EQUAL:
case CHARTYPE_LETTER:
case CHARTYPE_MINUS:
case CHARTYPE_NONASCII:
case CHARTYPE_OTHER:
case CHARTYPE_PERIOD:
case CHARTYPE_SLASH:
case CHARTYPE_UNDERSCORE:
case CHARTYPE_WHITESPACE:
// Scanning SchemeName | Shorthand
if (openParen == 0) {
nameOffset = currentOffset;
currentOffset = scanNCName(data, endOffset,
currentOffset);
if (currentOffset == nameOffset) {
reportError("InvalidShortHandPointer",
new Object[] { data });
return false;
}
if (currentOffset < endOffset) {
ch = data.charAt(currentOffset);
} else {
ch = -1;
}
name = symbolTable.addSymbol(data.substring(nameOffset,
currentOffset));
prefix = XMLSymbols.EMPTY_STRING;
// The name is a QName => a SchemeName
if (ch == ':') {
if (++currentOffset == endOffset) {
return false;
}
ch = data.charAt(currentOffset);
prefix = name;
nameOffset = currentOffset;
currentOffset = scanNCName(data, endOffset,
currentOffset);
if (currentOffset == nameOffset) {
return false;
}
if (currentOffset < endOffset) {
ch = data.charAt(currentOffset);
} else {
ch = -1;
}
isQName = true;
name = symbolTable.addSymbol(data.substring(
nameOffset, currentOffset));
}
// REVISIT:
if (currentOffset != endOffset) {
addToken(tokens, Tokens.XPTRTOKEN_SCHEMENAME);
tokens.addToken(prefix);
tokens.addToken(name);
isQName = false;
} else if (currentOffset == endOffset) {
// NCName => Shorthand
addToken(tokens, Tokens.XPTRTOKEN_SHORTHAND);
tokens.addToken(name);
isQName = false;
}
// reset open/close paren for the next pointer part
closeParen = 0;
break;
} else if (openParen > 0 && closeParen == 0 && name != null) {
// Scanning SchemeData
dataOffset = currentOffset;
currentOffset = scanData(data, schemeDataBuff,
endOffset, currentOffset);
if (currentOffset == dataOffset) {
reportError("InvalidSchemeDataInXPointer",
new Object[] { data });
return false;
}
if (currentOffset < endOffset) {
ch = data.charAt(currentOffset);
} else {
ch = -1;
}
schemeData = symbolTable.addSymbol(schemeDataBuff
.toString());
addToken(tokens, Tokens.XPTRTOKEN_SCHEMEDATA);
tokens.addToken(schemeData);
// reset open/close paren for the next pointer part
openParen = 0;
schemeDataBuff.delete(0, schemeDataBuff.length());
} else {
// ex. schemeName()
// Should we throw an exception with a more suitable message instead??
return false;
}
}
} // end while
return true;
}
/**
* Scans a NCName.
* From Namespaces in XML
* [5] NCName ::= (Letter | '_') (NCNameChar)*
* [6] NCNameChar ::= Letter | Digit | '.' | '-' | '_' | CombiningChar | Extender
*
* @param data A String containing the XPointer expression
* @param endOffset The int XPointer expression length
* @param currentOffset An int representing the current position of the XPointer expression pointer
*/
private int scanNCName(String data, int endOffset, int currentOffset) {
int ch = data.charAt(currentOffset);
if (ch >= 0x80) {
if (!XMLChar.isNameStart(ch)) {
return currentOffset;
}
} else {
byte chartype = fASCIICharMap[ch];
if (chartype != CHARTYPE_LETTER
&& chartype != CHARTYPE_UNDERSCORE) {
return currentOffset;
}
}
//while (currentOffset++ < endOffset) {
while (++currentOffset < endOffset) {
ch = data.charAt(currentOffset);
if (ch >= 0x80) {
if (!XMLChar.isName(ch)) {
break;
}
} else {
byte chartype = fASCIICharMap[ch];
if (chartype != CHARTYPE_LETTER
&& chartype != CHARTYPE_DIGIT
&& chartype != CHARTYPE_PERIOD
&& chartype != CHARTYPE_MINUS
&& chartype != CHARTYPE_UNDERSCORE) {
break;
}
}
}
return currentOffset;
}
/**
* Scans the SchemeData.
* [6] SchemeData ::= EscapedData*
* [7] EscapedData ::= NormalChar | '^(' | '^)' | '^^' | '(' SchemeData ')'
* [8] NormalChar ::= UnicodeChar - [()^]
* [9] UnicodeChar ::= [#x0-#x10FFFF]
*
*/
private int scanData(String data, StringBuffer schemeData,
int endOffset, int currentOffset) {
while (true) {
if (currentOffset == endOffset) {
break;
}
int ch = data.charAt(currentOffset);
byte chartype = (ch >= 0x80) ? CHARTYPE_NONASCII
: fASCIICharMap[ch];
if (chartype == CHARTYPE_OPEN_PAREN) {
schemeData.append(ch);
//schemeData.append(Tokens.XPTRTOKEN_OPEN_PAREN);
currentOffset = scanData(data, schemeData, endOffset,
++currentOffset);
if (currentOffset == endOffset) {
return currentOffset;
}
ch = data.charAt(currentOffset);
chartype = (ch >= 0x80) ? CHARTYPE_NONASCII
: fASCIICharMap[ch];
if (chartype != CHARTYPE_CLOSE_PAREN) {
return endOffset;
}
schemeData.append((char) ch);
++currentOffset;//
} else if (chartype == CHARTYPE_CLOSE_PAREN) {
return currentOffset;
} else if (chartype == CHARTYPE_CARRET) {
ch = data.charAt(++currentOffset);
chartype = (ch >= 0x80) ? CHARTYPE_NONASCII
: fASCIICharMap[ch];
if (chartype != CHARTYPE_CARRET
&& chartype != CHARTYPE_OPEN_PAREN
&& chartype != CHARTYPE_CLOSE_PAREN) {
break;
}
schemeData.append((char) ch);
++currentOffset;
} else {
schemeData.append((char) ch);
++currentOffset;//
}
}
return currentOffset;
}
//
// Protected methods
//
/**
* This method adds the specified token to the token list. By
* default, this method allows all tokens. However, subclasses
* of the XPathExprScanner can override this method in order
* to disallow certain tokens from being used in the scanned
* XPath expression. This is a convenient way of allowing only
* a subset of XPath.
*/
protected void addToken(Tokens tokens, int token) throws XNIException {
tokens.addToken(token);
} // addToken(int)
} // class Scanner
// ************************************************************************
// Overridden XMLDocumentHandler methods
// ************************************************************************
/**
* If the comment is a child of a matched element, then pass else return.
*
* @param text The text in the comment.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by application to signal an error.
*/
public void comment(XMLString text, Augmentations augs) throws XNIException {
if (!isChildFragmentResolved()) {
return;
}
super.comment(text, augs);
}
/**
* A processing instruction. Processing instructions consist of a
* target name and, optionally, text data. The data is only meaningful
* to the application.
* <p>
* Typically, a processing instruction's data will contain a series
* of pseudo-attributes. These pseudo-attributes follow the form of
* element attributes but are <strong>not</strong> parsed or presented
* to the application as anything other than text. The application is
* responsible for parsing the data.
*
* @param target The target.
* @param data The data or null if none specified.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void processingInstruction(String target, XMLString data,
Augmentations augs) throws XNIException {
if (!isChildFragmentResolved()) {
return;
}
super.processingInstruction(target, data, augs);
}
/**
* The start of an element.
*
* @param element The name of the element.
* @param attributes The element attributes.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void startElement(QName element, XMLAttributes attributes,
Augmentations augs) throws XNIException {
if (!resolveXPointer(element, attributes, augs,
XPointerPart.EVENT_ELEMENT_START)) {
// xml:base and xml:lang processing
if (fFixupBase) {
processXMLBaseAttributes(attributes);
}
if (fFixupLang) {
processXMLLangAttributes(attributes);
}
// set the context invalid if the element till an element from the result infoset is included
fNamespaceContext.setContextInvalid();
return;
}
super.startElement(element, attributes, augs);
}
/**
* An empty element.
*
* @param element The name of the element.
* @param attributes The element attributes.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void emptyElement(QName element, XMLAttributes attributes,
Augmentations augs) throws XNIException {
if (!resolveXPointer(element, attributes, augs,
XPointerPart.EVENT_ELEMENT_EMPTY)) {
// xml:base and xml:lang processing
if (fFixupBase) {
processXMLBaseAttributes(attributes);
}
if (fFixupLang) {
processXMLLangAttributes(attributes);
}
// no need to restore restoreBaseURI() for xml:base and xml:lang processing
// set the context invalid if the element till an element from the result infoset is included
fNamespaceContext.setContextInvalid();
return;
}
super.emptyElement(element, attributes, augs);
}
/**
* Character content.
*
* @param text The content.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void characters(XMLString text, Augmentations augs)
throws XNIException {
if (!isChildFragmentResolved()) {
return;
}
super.characters(text, augs);
}
/**
* Ignorable whitespace. For this method to be called, the document
* source must have some way of determining that the text containing
* only whitespace characters should be considered ignorable. For
* example, the validator can determine if a length of whitespace
* characters in the document are ignorable based on the element
* content model.
*
* @param text The ignorable whitespace.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void ignorableWhitespace(XMLString text, Augmentations augs)
throws XNIException {
if (!isChildFragmentResolved()) {
return;
}
super.ignorableWhitespace(text, augs);
}
/**
* The end of an element.
*
* @param element The name of the element.
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void endElement(QName element, Augmentations augs)
throws XNIException {
if (!resolveXPointer(element, null, augs,
XPointerPart.EVENT_ELEMENT_END)) {
// no need to restore restoreBaseURI() for xml:base and xml:lang processing
return;
}
super.endElement(element, augs);
}
/**
* The start of a CDATA section.
*
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void startCDATA(Augmentations augs) throws XNIException {
if (!isChildFragmentResolved()) {
return;
}
super.startCDATA(augs);
}
/**
* The end of a CDATA section.
*
* @param augs Additional information that may include infoset augmentations
*
* @exception XNIException
* Thrown by handler to signal an error.
*/
public void endCDATA(Augmentations augs) throws XNIException {
if (!isChildFragmentResolved()) {
return;
}
super.endCDATA(augs);
}
// ************************************************************************
// Overridden XMLComponent methods
// ************************************************************************
/**
* <p>
* Sets the value of a property. This method is called by the component
* manager any time after reset when a property changes value.
* </p>
* <strong>Note:</strong> Components should silently ignore properties
* that do not affect the operation of the component.
*
* @param propertyId The property identifier.
* @param value The value of the property.
*
* @throws XMLConfigurationException Thrown for configuration error.
* In general, components should
* only throw this exception if
* it is <strong>really</strong>
* a critical error.
*/
public void setProperty(String propertyId, Object value)
throws XMLConfigurationException {
// Error reporter
if (propertyId == Constants.XERCES_PROPERTY_PREFIX
+ Constants.ERROR_REPORTER_PROPERTY) {
if (value != null) {
fXPointerErrorReporter = (XMLErrorReporter) value;
} else {
fXPointerErrorReporter = null;
}
}
// Error handler
if (propertyId == Constants.XERCES_PROPERTY_PREFIX
+ Constants.ERROR_HANDLER_PROPERTY) {
if (value != null) {
fErrorHandler = (XMLErrorHandler) value;
} else {
fErrorHandler = null;
}
}
// xml:lang
if (propertyId == Constants.XERCES_FEATURE_PREFIX
+ Constants.XINCLUDE_FIXUP_LANGUAGE_FEATURE) {
if (value != null) {
fFixupLang = ((Boolean)value).booleanValue();
} else {
fFixupLang = false;
}
}
// xml:base
if (propertyId == Constants.XERCES_FEATURE_PREFIX
+ Constants.XINCLUDE_FIXUP_BASE_URIS_FEATURE) {
if (value != null) {
fFixupBase = ((Boolean)value).booleanValue();
} else {
fFixupBase = false;
}
}
//
if (propertyId == Constants.XERCES_PROPERTY_PREFIX
+ Constants.NAMESPACE_CONTEXT_PROPERTY) {
fNamespaceContext = (XIncludeNamespaceSupport) value;
}
super.setProperty(propertyId, value);
}
}
| |
/** See the file "LICENSE" for the full license governing this code. */
package au.org.ands.vocabs.toolkit.test.arquillian;
import java.io.File;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.net.URL;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status.Family;
import org.apache.commons.io.FileUtils;
import org.dbunit.DatabaseUnitException;
import org.hibernate.HibernateException;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.jboss.arquillian.test.api.ArquillianResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.Test;
import au.org.ands.vocabs.toolkit.db.TaskUtils;
import au.org.ands.vocabs.toolkit.db.model.Task;
import au.org.ands.vocabs.toolkit.tasks.TaskInfo;
import au.org.ands.vocabs.toolkit.tasks.TaskRunner;
import au.org.ands.vocabs.toolkit.test.utils.NetClientUtils;
import au.org.ands.vocabs.toolkit.utils.ApplicationContextListener;
import au.org.ands.vocabs.toolkit.utils.ToolkitConfig;
import au.org.ands.vocabs.toolkit.utils.ToolkitFileUtils;
/** All Arquillian tests of the Toolkit.
* Very unfortunately, there is no way to share Arquillian deployments
* across multiple classes. Each separate test class causes a fresh
* deployment. So for now, put all tests here. When Suite support
* is implemented, refactor. See
* <a href="https://issues.jboss.org/browse/ARQ-197">JBoss JIRA ARQ-197</a>.
* At least we can put the deployment definition in a parent class
* @see ArquillianBaseTest
*/
@Test(groups = "arquillian")
public class AllArquillianTests extends ArquillianBaseTest {
/** Logger. */
private static Logger logger;
static {
logger = LoggerFactory.getLogger(
MethodHandles.lookup().lookupClass());
}
// Test setup/shutdown
/** Set up the suite. This means:
* clear out the contents of the repository (deleting
* the directory pointed to by property {@code Toolkit.storagePath}).
* Note: Arquillian invokes this method first on the client side, and
* then on the server side after deployment.
* @throws IOException If unable to remove the repository directory
* {@code Toolkit.storagePath}.
*/
@BeforeSuite(groups = "arquillian")
public final void setupSuite() throws IOException {
if (ApplicationContextListener.getServletContext() == null) {
logger.info("In AllArquillianTests.setupSuite() on client side");
} else {
logger.info("In AllArquillianTests.setupSuite() on server side");
FileUtils.deleteDirectory(new File(
ToolkitConfig.ROOT_FILES_PATH));
}
}
// Server-side tests go here. Client-side tests later on.
// Tests of class au.org.ands.vocabs.toolkit.db.TasksUtils.
/** Server-side test of {@code TasksUtils.getAllTasks()}
* when there are no tasks. */
@Test
public final void testGetAllTasks() {
logger.info("In testGetAllTasks()");
List<Task> taskList = TaskUtils.getAllTasks();
Assert.assertNotNull(taskList,
"getAllTasks() with no tasks");
Assert.assertEquals(taskList.size(), 0,
"getAllTasks() with no tasks");
}
// Tests of class
// au.org.ands.vocabs.toolkit.provider.transform.JsonTreeTransformProvider.
// Task numbers 3 and 4 generate magic number warnings.
//CHECKSTYLE:OFF: MagicNumber
/** Server-side test of {@code JsonTreeTransformProvider}.
* @throws DatabaseUnitException If a problem with DBUnit.
* @throws HibernateException If a problem getting the underlying
* JDBC connection.
* @throws IOException If a problem getting test data for DBUnit,
* or reading JSON from the correct and test output files.
* @throws SQLException If DBUnit has a problem performing
* performing JDBC operations.
*/
@Test
public final void testJsonTreeTransformProvider1() throws
DatabaseUnitException, HibernateException, IOException, SQLException {
logger.info("In testJsonTreeTransformProvider1()");
ArquillianTestUtils.loadDbUnitTestFile(
"testJsonTreeTransformProvider1");
List<Task> taskList = TaskUtils.getAllTasks();
logger.info("testJsonTreeTransformProvider1: task list length = "
+ taskList.size());
TaskInfo taskInfo = ToolkitFileUtils.getTaskInfo(1);
Assert.assertNotNull(taskInfo, "Test data not loaded, task 1");
TaskRunner runner = new TaskRunner(taskInfo);
runner.runTask();
HashMap<String, String> results = runner.getResults();
Assert.assertNotNull(results);
Assert.assertEquals(results.get("status"), "success",
"JsonTreeTransformProvider failed on task 1");
String conceptsTreeFilename = results.get("concepts_tree");
ArquillianTestUtils.compareJson(conceptsTreeFilename,
"src/test/resources/input/"
+ "au.org.ands.vocabs.toolkit.test.arquillian."
+ "AllArquillianTests.testJsonTreeTransformProvider1/"
+ "test-data1-concepts_tree.json");
taskInfo = ToolkitFileUtils.getTaskInfo(2);
Assert.assertNotNull(taskInfo, "Test data not loaded, task 2");
runner = new TaskRunner(taskInfo);
runner.runTask();
results = runner.getResults();
Assert.assertNotNull(results);
Assert.assertEquals(results.get("status"), "success",
"JsonTreeTransformProvider failed on task 2");
conceptsTreeFilename = results.get("concepts_tree");
// Note the use of the same correct output as the previous test.
ArquillianTestUtils.compareJson(conceptsTreeFilename,
"src/test/resources/input/"
+ "au.org.ands.vocabs.toolkit.test.arquillian."
+ "AllArquillianTests.testJsonTreeTransformProvider1/"
+ "test-data1-concepts_tree.json");
// Polyhierarchy detection
taskInfo = ToolkitFileUtils.getTaskInfo(3);
Assert.assertNotNull(taskInfo, "Test data not loaded, task 3");
runner = new TaskRunner(taskInfo);
runner.runTask();
results = runner.getResults();
Assert.assertNotNull(results);
Assert.assertEquals(results.get("status"), "success",
"JsonTreeTransformProvider failed on task 3");
Assert.assertFalse(results.containsKey("concepts_tree"),
"JsonTreeTransformProvider task 3 returned a concepts_tree "
+ "value");
Assert.assertEquals(results.get("concepts_tree_not_provided"),
"No concepts tree provided, because there is a forward "
+ "or cross edge.",
"JsonTreeTransformProvider task 3 returned wrong value for "
+ "concepts_tree_not_provided");
// Cycle detection
taskInfo = ToolkitFileUtils.getTaskInfo(4);
Assert.assertNotNull(taskInfo, "Test data not loaded, task 4");
runner = new TaskRunner(taskInfo);
runner.runTask();
results = runner.getResults();
Assert.assertNotNull(results);
Assert.assertEquals(results.get("status"), "success",
"JsonTreeTransformProvider failed on task 4");
Assert.assertFalse(results.containsKey("concepts_tree"),
"JsonTreeTransformProvider task 4 returned a concepts_tree "
+ "value");
Assert.assertEquals(results.get("concepts_tree_not_provided"),
"No concepts tree provided, because there is a cycle.",
"JsonTreeTransformProvider task 4 returned wrong value for "
+ "concepts_tree_not_provided");
}
//CHECKSTYLE:ON: MagicNumber
// Client-side tests go here. Server-side tests are above this line.
// Tests of restlets defined in
// au.org.ands.vocabs.toolkit.restlet.GetInfo.
/** The base URL of the deployed webapp under test.
* Injected by Arquillian.
* For future ref: if instead of being specified as a private
* field, this was to be
* injected as a parameter into a method annotated as {@code @Test},
* TestNG has to be made happy by saying
* {@code @Test(dataProvider = Arquillian.ARQUILLIAN_DATA_PROVIDER)}.
*/
private @ArquillianResource URL baseURL;
/** Client-side test of the system health check function.
* @throws DatabaseUnitException If a problem with DBUnit.
* @throws IOException If a problem getting test data for DBUnit.
* @throws SQLException If DBUnit has a problem performing
* performing JDBC operations.
*/
@Test
@RunAsClient
public final void testSystemHealthCheck() throws
DatabaseUnitException, IOException, SQLException {
logger.info("In testSystemHealthCheck()");
ArquillianTestUtils.clientClearDatabase(baseURL);
Response response = NetClientUtils.doGet(baseURL,
"getInfo/systemHealthCheck", MediaType.APPLICATION_JSON_TYPE);
Assert.assertEquals(response.getStatusInfo().getFamily(),
Family.SUCCESSFUL,
"systemHealthCheck response status");
String body = response.readEntity(String.class);
response.close();
Assert.assertEquals(body, "[]",
"systemHealthCheck return value");
// Assert.fail("Test of failing in testSystemHealthCheck");
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.raid.protocol;
import java.io.IOException;
import java.io.DataInput;
import java.io.DataOutput;
import java.util.Properties;
import java.util.Enumeration;
import java.lang.Math;
import java.text.SimpleDateFormat;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories;
import org.apache.hadoop.io.WritableFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileStatus;
/**
* Maintains information about one policy
*/
public class PolicyInfo implements Writable {
public static final Log LOG = LogFactory.getLog(
"org.apache.hadoop.raid.protocol.PolicyInfo");
protected static final SimpleDateFormat dateFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private Path srcPath; // the specified src path
private String policyName; // name of policy
private String destinationPath; // A destination path for this policy
private String description; // A verbose description of this policy
private Configuration conf; // Hadoop configuration
private Properties properties; // Policy-dependent properties
private ReentrantReadWriteLock plock; // protects policy operations.
/**
* Create the empty object
*/
public PolicyInfo() {
this.conf = null;
this.policyName = "";
this.description = "";
this.srcPath = null;
this.properties = new Properties();
this.plock = new ReentrantReadWriteLock();
}
/**
* Create the metadata that describes a policy
*/
public PolicyInfo(String policyName, Configuration conf) {
this.conf = conf;
this.policyName = policyName;
this.description = "";
this.srcPath = null;
this.properties = new Properties();
this.plock = new ReentrantReadWriteLock();
}
/**
* Sets the input path on which this policy has to be applied
*/
public void setSrcPath(String in) throws IOException {
srcPath = new Path(in);
srcPath = srcPath.makeQualified(srcPath.getFileSystem(conf));
}
/**
* Set the destination path of this policy.
*/
public void setDestinationPath(String des) {
this.destinationPath = des;
}
/**
* Set the description of this policy.
*/
public void setDescription(String des) {
this.description = des;
}
/**
* Sets an internal property.
* @param name property name.
* @param value property value.
*/
public void setProperty(String name, String value) {
properties.setProperty(name, value);
}
/**
* Returns the value of an internal property.
* @param name property name.
*/
public String getProperty(String name) {
return properties.getProperty(name);
}
/**
* Get the name of this policy.
*/
public String getName() {
return this.policyName;
}
/**
* Get the destination path of this policy.
*/
public String getDestinationPath() {
return this.destinationPath;
}
/**
* Get the srcPath
*/
public Path getSrcPath() throws IOException {
return srcPath;
}
/**
* Get the expanded (unglobbed) forms of the srcPaths
*/
public Path[] getSrcPathExpanded() throws IOException {
FileSystem fs = srcPath.getFileSystem(conf);
// globbing on srcPath
FileStatus[] gpaths = fs.globStatus(srcPath);
if (gpaths == null) {
return null;
}
Path[] values = new Path[gpaths.length];
for (int i = 0; i < gpaths.length; i++) {
Path p = gpaths[i].getPath();
values[i] = p.makeQualified(fs);
}
return values;
}
/**
* Convert this policy into a printable form
*/
public String toString() {
StringBuffer buff = new StringBuffer();
buff.append("Policy Name:\t" + policyName + " --------------------\n");
buff.append("Source Path:\t" + srcPath + "\n");
buff.append("Dest Path:\t" + destinationPath + "\n");
for (Enumeration<?> e = properties.propertyNames(); e.hasMoreElements();) {
String name = (String) e.nextElement();
buff.append( name + ":\t" + properties.getProperty(name) + "\n");
}
if (description.length() > 0) {
int len = Math.min(description.length(), 80);
String sub = description.substring(0, len).trim();
sub = sub.replaceAll("\n", " ");
buff.append("Description:\t" + sub + "...\n");
}
return buff.toString();
}
//////////////////////////////////////////////////
// Writable
//////////////////////////////////////////////////
static { // register a ctor
WritableFactories.setFactory
(PolicyInfo.class,
new WritableFactory() {
public Writable newInstance() { return new PolicyInfo(); }
});
}
public void write(DataOutput out) throws IOException {
if(LOGDISPLAY1) LOG.info("srcPath.toString() == " + srcPath.toString());
if(LOGDISPLAY1) LOG.info("policyName == " + policyName);
if(LOGDISPLAY1) LOG.info("destinationPath == " + destinationPath);
if(LOGDISPLAY1) LOG.info("description == " + description);
if(LOGDISPLAY1) LOG.info("properties.size() == " + properties.size());
Text.writeString(out, srcPath.toString());
Text.writeString(out, policyName);
Text.writeString(out, destinationPath);
Text.writeString(out, description);
out.writeInt(properties.size());
for (Enumeration<?> e = properties.propertyNames(); e.hasMoreElements();) {
String name = (String) e.nextElement();
Text.writeString(out, name);
Text.writeString(out, properties.getProperty(name));
if(LOGDISPLAY1) LOG.info("name == " + name);
if(LOGDISPLAY1) LOG.info("properties.getProperty(name) == " + properties.getProperty(name));
}
}
public void readFields(DataInput in) throws IOException {
this.srcPath = new Path(Text.readString(in));
this.policyName = Text.readString(in);
this.destinationPath = Text.readString(in);
this.description = Text.readString(in);
for (int n = in.readInt(); n>0; n--) {
String name = Text.readString(in);
String value = Text.readString(in);
properties.setProperty(name,value);
}
}
public static boolean LOGDISPLAY=false;
public static boolean LOGDISPLAY1=false;
public static boolean LOGDISPLAY2=false;
public static boolean LOGDISPLAY3=false;
public static boolean LOGDISPLAY4=false;
public static boolean LOGDISPLAY5=false;
public static boolean LOGDISPLAY6=false;
public static boolean LOGDISPLAY7=false;
public static boolean LOGDISPLAY8=false;
public static boolean LOGDISPLAY9=false;
public static boolean LOGDISPLAY10=false;
public static boolean LOGDISPLAY0=false;
public static boolean LOGDEBUG=false;
static
{
LOGDISPLAY=System.getProperty("LOGDISPLAY")!=null&&System.getProperty("LOGDISPLAY").equalsIgnoreCase("true")?true:false;
LOGDISPLAY1=System.getProperty("LOGDISPLAY1")!=null&&System.getProperty("LOGDISPLAY1").equalsIgnoreCase("true")?true:false;
LOGDISPLAY2=System.getProperty("LOGDISPLAY2")!=null&&System.getProperty("LOGDISPLAY2").equalsIgnoreCase("true")?true:false;
LOGDISPLAY3=System.getProperty("LOGDISPLAY3")!=null&&System.getProperty("LOGDISPLAY3").equalsIgnoreCase("true")?true:false;
LOGDISPLAY4=System.getProperty("LOGDISPLAY4")!=null&&System.getProperty("LOGDISPLAY4").equalsIgnoreCase("true")?true:false;
LOGDISPLAY5=System.getProperty("LOGDISPLAY5")!=null&&System.getProperty("LOGDISPLAY5").equalsIgnoreCase("true")?true:false;
LOGDISPLAY6=System.getProperty("LOGDISPLAY6")!=null&&System.getProperty("LOGDISPLAY6").equalsIgnoreCase("true")?true:false;
LOGDISPLAY7=System.getProperty("LOGDISPLAY7")!=null&&System.getProperty("LOGDISPLAY7").equalsIgnoreCase("true")?true:false;
LOGDISPLAY8=System.getProperty("LOGDISPLAY8")!=null&&System.getProperty("LOGDISPLAY8").equalsIgnoreCase("true")?true:false;
LOGDISPLAY9=System.getProperty("LOGDISPLAY9")!=null&&System.getProperty("LOGDISPLAY9").equalsIgnoreCase("true")?true:false;
LOGDISPLAY10=System.getProperty("LOGDISPLAY10")!=null&&System.getProperty("LOGDISPLAY10").equalsIgnoreCase("true")?true:false;
LOGDISPLAY0=System.getProperty("LOGDISPLAY0")!=null&&System.getProperty("LOGDISPLAY0").equalsIgnoreCase("true")?true:false;
LOGDEBUG=System.getProperty("LOGDEBUG")!=null&&System.getProperty("LOGDEBUG").equalsIgnoreCase("true")?true:false;
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.crosswalk.engine;
import android.net.Uri;
import android.webkit.WebResourceResponse;
import org.apache.cordova.CordovaResourceApi;
import org.apache.cordova.CordovaResourceApi.OpenForReadResult;
import org.apache.cordova.LOG;
import org.chromium.net.NetError;
import org.xwalk.core.XWalkResourceClient;
import org.xwalk.core.XWalkView;
import java.io.FileNotFoundException;
import java.io.IOException;
public class XWalkCordovaResourceClient extends XWalkResourceClient {
private static final String TAG = "XWalkCordovaResourceClient";
protected XWalkWebViewEngine parentEngine;
// Success
public static final int ERROR_OK = 0;
// Generic error
public static final int ERROR_UNKNOWN = -1;
// Server or proxy hostname lookup failed
public static final int ERROR_HOST_LOOKUP = -2;
// Unsupported authentication scheme (not basic or digest)
public static final int ERROR_UNSUPPORTED_AUTH_SCHEME = -3;
// User authentication failed on server
public static final int ERROR_AUTHENTICATION = -4;
// User authentication failed on proxy
public static final int ERROR_PROXY_AUTHENTICATION = -5;
// Failed to connect to the server
public static final int ERROR_CONNECT = -6;
// Failed to read or write to the server
public static final int ERROR_IO = -7;
// Connection timed out
public static final int ERROR_TIMEOUT = -8;
// Too many redirects
public static final int ERROR_REDIRECT_LOOP = -9;
// Unsupported URI scheme
public static final int ERROR_UNSUPPORTED_SCHEME = -10;
// Failed to perform SSL handshake
public static final int ERROR_FAILED_SSL_HANDSHAKE = -11;
// Malformed URL
public static final int ERROR_BAD_URL = -12;
// Generic file error
public static final int ERROR_FILE = -13;
// File not found
public static final int ERROR_FILE_NOT_FOUND = -14;
// Too many requests during this load
public static final int ERROR_TOO_MANY_REQUESTS = -15;
public XWalkCordovaResourceClient(XWalkWebViewEngine parentEngine) {
super(parentEngine.webView);
this.parentEngine = parentEngine;
}
// Map XWalk error code about loading a page to Android specific ones.
// XWalk shares the error code with chromium currently.
static int convertErrorCode(int netError) {
// Note: many NetError.Error constants don't have an obvious mapping.
// These will be handled by the default case, ERROR_UNKNOWN.
switch (netError) {
case NetError.ERR_UNSUPPORTED_AUTH_SCHEME:
return ERROR_UNSUPPORTED_AUTH_SCHEME;
case NetError.ERR_INVALID_AUTH_CREDENTIALS:
case NetError.ERR_MISSING_AUTH_CREDENTIALS:
case NetError.ERR_MISCONFIGURED_AUTH_ENVIRONMENT:
return ERROR_AUTHENTICATION;
case NetError.ERR_TOO_MANY_REDIRECTS:
return ERROR_REDIRECT_LOOP;
case NetError.ERR_UPLOAD_FILE_CHANGED:
return ERROR_FILE_NOT_FOUND;
case NetError.ERR_INVALID_URL:
return ERROR_BAD_URL;
case NetError.ERR_DISALLOWED_URL_SCHEME:
case NetError.ERR_UNKNOWN_URL_SCHEME:
return ERROR_UNSUPPORTED_SCHEME;
case NetError.ERR_IO_PENDING:
case NetError.ERR_NETWORK_IO_SUSPENDED:
return ERROR_IO;
case NetError.ERR_CONNECTION_TIMED_OUT:
case NetError.ERR_TIMED_OUT:
return ERROR_TIMEOUT;
case NetError.ERR_FILE_TOO_BIG:
return ERROR_FILE;
case NetError.ERR_HOST_RESOLVER_QUEUE_TOO_LARGE:
case NetError.ERR_INSUFFICIENT_RESOURCES:
case NetError.ERR_OUT_OF_MEMORY:
return ERROR_TOO_MANY_REQUESTS;
case NetError.ERR_CONNECTION_CLOSED:
case NetError.ERR_CONNECTION_RESET:
case NetError.ERR_CONNECTION_REFUSED:
case NetError.ERR_CONNECTION_ABORTED:
case NetError.ERR_CONNECTION_FAILED:
case NetError.ERR_SOCKET_NOT_CONNECTED:
return ERROR_CONNECT;
case NetError.ERR_INTERNET_DISCONNECTED:
case NetError.ERR_ADDRESS_INVALID:
case NetError.ERR_ADDRESS_UNREACHABLE:
case NetError.ERR_NAME_NOT_RESOLVED:
case NetError.ERR_NAME_RESOLUTION_FAILED:
return ERROR_HOST_LOOKUP;
case NetError.ERR_SSL_PROTOCOL_ERROR:
case NetError.ERR_SSL_CLIENT_AUTH_CERT_NEEDED:
case NetError.ERR_TUNNEL_CONNECTION_FAILED:
case NetError.ERR_NO_SSL_VERSIONS_ENABLED:
case NetError.ERR_SSL_VERSION_OR_CIPHER_MISMATCH:
case NetError.ERR_SSL_RENEGOTIATION_REQUESTED:
case NetError.ERR_CERT_ERROR_IN_SSL_RENEGOTIATION:
case NetError.ERR_BAD_SSL_CLIENT_AUTH_CERT:
case NetError.ERR_SSL_NO_RENEGOTIATION:
case NetError.ERR_SSL_DECOMPRESSION_FAILURE_ALERT:
case NetError.ERR_SSL_BAD_RECORD_MAC_ALERT:
case NetError.ERR_SSL_UNSAFE_NEGOTIATION:
case NetError.ERR_SSL_WEAK_SERVER_EPHEMERAL_DH_KEY:
case NetError.ERR_SSL_CLIENT_AUTH_PRIVATE_KEY_ACCESS_DENIED:
case NetError.ERR_SSL_CLIENT_AUTH_CERT_NO_PRIVATE_KEY:
return ERROR_FAILED_SSL_HANDSHAKE;
case NetError.ERR_PROXY_AUTH_UNSUPPORTED:
case NetError.ERR_PROXY_AUTH_REQUESTED:
case NetError.ERR_PROXY_CONNECTION_FAILED:
case NetError.ERR_UNEXPECTED_PROXY_AUTH:
return ERROR_PROXY_AUTHENTICATION;
// The certificate errors are handled by onReceivedSslError
// and don't need to be reported here.
case NetError.ERR_CERT_COMMON_NAME_INVALID:
case NetError.ERR_CERT_DATE_INVALID:
case NetError.ERR_CERT_AUTHORITY_INVALID:
case NetError.ERR_CERT_CONTAINS_ERRORS:
case NetError.ERR_CERT_NO_REVOCATION_MECHANISM:
case NetError.ERR_CERT_UNABLE_TO_CHECK_REVOCATION:
case NetError.ERR_CERT_REVOKED:
case NetError.ERR_CERT_INVALID:
case NetError.ERR_CERT_WEAK_SIGNATURE_ALGORITHM:
case NetError.ERR_CERT_NON_UNIQUE_NAME:
return ERROR_OK;
default:
return ERROR_UNKNOWN;
}
}
/**
* Report an error to the host application. These errors are unrecoverable (i.e. the main resource is unavailable).
* The errorCode parameter corresponds to one of the ERROR_* constants.
*
* @param view The WebView that is initiating the callback.
* @param errorCode The error code corresponding to an ERROR_* value.
* @param description A String describing the error.
* @param failingUrl The url that failed to load.
*/
@Override
public void onReceivedLoadError(XWalkView view, int errorCode, String description,
String failingUrl) {
LOG.d(TAG, "CordovaWebViewClient.onReceivedError: Error code=%s Description=%s URL=%s", errorCode, description, failingUrl);
// Convert the XWalk error code to Cordova error code, which follows the Android spec,
// http://developer.android.com/reference/android/webkit/WebViewClient.html.
errorCode = XWalkCordovaResourceClient.convertErrorCode(errorCode);
parentEngine.client.onReceivedError(errorCode, description, failingUrl);
}
@Override
public WebResourceResponse shouldInterceptLoadRequest(XWalkView view, String url) {
try {
// Check the against the white-list.
if (!parentEngine.pluginManager.shouldAllowRequest(url)) {
LOG.w(TAG, "URL blocked by whitelist: " + url);
// Results in a 404.
return new WebResourceResponse("text/plain", "UTF-8", null);
}
CordovaResourceApi resourceApi = parentEngine.resourceApi;
Uri origUri = Uri.parse(url);
// Allow plugins to intercept WebView requests.
Uri remappedUri = resourceApi.remapUri(origUri);
if (!origUri.equals(remappedUri)) {
OpenForReadResult result = resourceApi.openForRead(remappedUri, true);
return new WebResourceResponse(result.mimeType, "UTF-8", result.inputStream);
}
// If we don't need to special-case the request, let the browser load it.
return null;
} catch (IOException e) {
if (!(e instanceof FileNotFoundException)) {
LOG.e(TAG, "Error occurred while loading a file (returning a 404).", e);
}
// Results in a 404.
return new WebResourceResponse("text/plain", "UTF-8", null);
}
}
@Override
public boolean shouldOverrideUrlLoading(XWalkView view, String url) {
return parentEngine.client.onNavigationAttempt(url);
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.designer.application.dbdiagram;
import com.eas.client.dbstructure.gui.DbSchemeEditorView;
import com.eas.client.model.Relation;
import com.eas.client.model.dbscheme.FieldsEntity;
import com.eas.client.model.gui.view.model.SelectedField;
import com.eas.client.model.gui.view.ModelSelectionListener;
import com.eas.client.model.gui.view.entities.EntityView;
import com.eas.client.dbstructure.gui.view.DbSchemeModelView;
import com.eas.client.metadata.Field;
import com.eas.designer.application.project.PlatypusProject;
import com.eas.designer.datamodel.nodes.EntityNode;
import com.eas.designer.datamodel.nodes.FieldNode;
import com.eas.designer.explorer.model.windows.ModelInspector;
import com.eas.designer.explorer.selectors.TablesSelector;
import com.eas.util.ListenerRegistration;
import java.awt.BorderLayout;
import java.awt.EventQueue;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.swing.Action;
import javax.swing.event.UndoableEditEvent;
import javax.swing.undo.UndoManager;
import javax.swing.undo.UndoableEdit;
import org.openide.ErrorManager;
import org.openide.awt.UndoRedo;
import org.openide.explorer.ExplorerManager;
import org.openide.nodes.Node;
import org.openide.util.Exceptions;
import org.openide.util.ImageUtilities;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.openide.util.Utilities;
import org.openide.util.lookup.Lookups;
import org.openide.util.lookup.ProxyLookup;
import org.openide.windows.CloneableTopComponent;
import org.openide.windows.TopComponent;
import org.openide.windows.TopComponentGroup;
import org.openide.windows.WindowManager;
/**
*
* @author mg
*/
public class PlatypusDbDiagramView extends CloneableTopComponent {
protected class DataObjectListener implements PropertyChangeListener {
@Override
public void propertyChange(PropertyChangeEvent evt) {
if ("modified".equals(evt.getPropertyName())) {
updateTitle();
}
}
}
protected class NodeSelectionListener implements PropertyChangeListener {
protected boolean processing;
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (ExplorerManager.PROP_SELECTED_NODES.equals(evt.getPropertyName()) /*
* if yout whant to uncomment the following line, you should ensure, that ccp
* operation on the nodes will not become slow
|| ExplorerManager.PROP_NODE_CHANGE.equals(evt.getPropertyName())*/) {
if (!processing) {
processing = true;
try {
Node[] nodes = ModelInspector.getInstance().getExplorerManager().getSelectedNodes();
getModelView().silentClearSelection();
getModelView().clearEntitiesFieldsSelection();
Map<EntityView<FieldsEntity>, Set<Field>> toSelectFields = new HashMap<>();
for (Node node : nodes) {
EntityView<FieldsEntity> ev;
if (node instanceof EntityNode) {
ev = getModelView().getEntityView(((EntityNode<FieldsEntity>) node).getEntity());
getModelView().silentSelectView(ev);
editor.checkActions();
} else if (node instanceof FieldNode) {
ev = getModelView().getEntityView(((EntityNode<FieldsEntity>) node.getParentNode()).getEntity());
FieldNode fieldNode = (FieldNode) node;
if (!toSelectFields.containsKey(ev)) {
toSelectFields.put(ev, new HashSet<>());
}
toSelectFields.get(ev).add(fieldNode.getField());
//ev.addSelectedField(fieldNode.getField());
}
}
toSelectFields.entrySet().stream().forEach((fEntry) -> {
EntityView<FieldsEntity> ev = fEntry.getKey();
ev.addSelectedFields(fEntry.getValue());
});
setActivatedNodes(nodes);
} finally {
processing = false;
}
}
}
}
}
public static final String PLATYPUS_DIAGRAMS_GROUP_NAME = "PlatypusModel";
static final long serialVersionUID = 1141132023402024L;
/**
* path to the icon used by the component and its open action
*/
static final String ICON_PATH = "com/eas/designer/application/dbdiagram/dbScheme.png";
private static final String PREFERRED_ID = "PlatypusDbDiagramTopComponent";
protected PlatypusDbDiagramDataObject dataObject;
protected transient DbSchemeEditorView editor;
protected transient DataObjectListener dataObjectListener;
protected transient NodeSelectionListener exlorerSelectionListener = new NodeSelectionListener();
protected transient ListenerRegistration clientChangeListener;
protected transient ListenerRegistration modelValidChangeListener;
public PlatypusDbDiagramView() throws Exception {
super();
setIcon(ImageUtilities.loadImage(ICON_PATH, true));
}
@Override
public Lookup getLookup() {
return new ProxyLookup(super.getLookup(), Lookups.fixed(getDataObject()));
}
public PlatypusDbDiagramDataObject getDataObject() {
return dataObject;
}
public void setDataObject(PlatypusDbDiagramDataObject aDataObject) throws Exception {
dataObject = aDataObject;
dataObjectListener = new DataObjectListener();
setName(dataObject.getPrimaryFile().getName());
setToolTipText(NbBundle.getMessage(PlatypusDbDiagramView.class, "HINT_PlatypusDbDiagramTopComponent", dataObject.getPrimaryFile().getPath()));
initDbRelatedViews();
modelValidChangeListener = dataObject.addModelValidChangeListener(() -> {
try {
initDbRelatedViews();
} catch (Exception ex) {
ErrorManager.getDefault().notify(ex);
}
});
clientChangeListener = dataObject.addClientChangeListener(new PlatypusProject.ClientChangeListener() {
@Override
public void connected(String aDatasourceName) {
try {
String dsName = dataObject.getModel().getDatasourceName();
if (dsName == null) {
dsName = dataObject.getProject().getSettings().getDefaultDataSourceName();
}
if (dsName == null ? aDatasourceName == null : dsName.equals(aDatasourceName)) {
initDbRelatedViews();
}
} catch (Exception ex) {
Exceptions.printStackTrace(ex);
}
}
@Override
public void disconnected(String aDatasourceName) {
try {
String dsName = dataObject.getModel().getDatasourceName();
if (dsName == null) {
dsName = dataObject.getProject().getSettings().getDefaultDataSourceName();
}
if (dsName == null ? aDatasourceName == null : dsName.equals(aDatasourceName)) {
initDbRelatedViews();
}
} catch (Exception ex) {
Exceptions.printStackTrace(ex);
}
}
@Override
public void defaultDatasourceNameChanged(String aOldDatasourceName, String aNewDatasourceName) {
try {
if (dataObject.getModel().getDatasourceName()== null && dataObject.isModelValid()) {
dataObject.setModelValid(false);
dataObject.startModelValidating();
}
} catch (Exception ex) {
Exceptions.printStackTrace(ex);
}
}
});
}
@Override
public void readExternal(ObjectInput oi) throws IOException, ClassNotFoundException {
try {
super.readExternal(oi);
setDataObject((PlatypusDbDiagramDataObject) oi.readObject());
} catch (Exception ex) {
throw new IOException(ex);
}
}
@Override
public void writeExternal(ObjectOutput oo) throws IOException {
super.writeExternal(oo);
oo.writeObject(dataObject);
}
public void updateTitle() {
String boldTitleMask = "<html><b>%s [%s.%s]</b>";
String plainTitleMask = "<html>%s [%s.%s]";
String titleMask = plainTitleMask;
if (dataObject.isModified()) {
titleMask = boldTitleMask;
}
final String newTitle = String.format(titleMask, getName(), dataObject.getResolvedDatasourceName(), dataObject.getResolvedSchemaName());
if (EventQueue.isDispatchThread()) {
setHtmlDisplayName(newTitle);
} else {
EventQueue.invokeLater(() -> {
setHtmlDisplayName(newTitle);
});
}
}
private void initDbRelatedViews() throws Exception {
removeAll();
setLayout(new BorderLayout());
if (dataObject.getProject().isDbConnected(dataObject.getModel().getDatasourceName())) {
if (dataObject.isModelValid()) {
if (editor != null) {
editor.setModel(null);
}
editor = new DbSchemeEditorView(dataObject.getModel(),
new TablesSelector(dataObject.getProject(),
NbBundle.getMessage(PlatypusDbDiagramView.class, "HINT_PlatypusDbDiagramTopComponent", dataObject.getPrimaryFile().getName()), PlatypusDbDiagramView.this),
new UndoManager() {
@Override
public synchronized boolean addEdit(UndoableEdit anEdit) {
((UndoRedo.Manager) getUndoRedo()).undoableEditHappened(new UndoableEditEvent(this, anEdit));
return true;
}
});
add(editor, BorderLayout.CENTER);
updateTitle();
dataObject.addPropertyChangeListener(dataObjectListener);
getModelView().resolveTables();
getModelView().resolveRelations();
getModelView().addModelSelectionListener(new ModelSelectionListener<FieldsEntity>() {
@Override
public void selectionChanged(Set<FieldsEntity> oldSelected, Set<FieldsEntity> newSelected) {
try {
Node[] oldNodes = getActivatedNodes();
Node[] newNodes = ModelInspector.convertSelectedToNodes(dataObject.getModelNode(), oldNodes, oldSelected, newSelected);
setActivatedNodes(newNodes);
} catch (Exception ex) {
ErrorManager.getDefault().notify(ex);
}
}
@Override
public void selectionChanged(List<SelectedField<FieldsEntity>> aParameters, List<SelectedField<FieldsEntity>> aFields) {
try {
Node[] oldNodes = getActivatedNodes();
Node[] newNodes = ModelInspector.convertSelectedToNodes(dataObject.getModelNode(), oldNodes, aParameters, aFields);
setActivatedNodes(newNodes);
} catch (Exception ex) {
ErrorManager.getDefault().notify(ex);
}
}
@Override
public void selectionChanged(Collection<Relation<FieldsEntity>> clctn, Collection<Relation<FieldsEntity>> clctn1) {
}
});
UndoRedo ur = getUndoRedo();
if (ur instanceof UndoRedo.Manager) {
((UndoRedo.Manager) ur).discardAllEdits();
}
dataObject.getModel().fireAllQueriesChanged();
componentActivated();
} else {
add(dataObject.getProject().generateDbValidatePlaceholder(), BorderLayout.CENTER);
}
} else {
add(dataObject.getProject().generateDbPlaceholder(dataObject.getModel().getDatasourceName()), BorderLayout.CENTER);
}
revalidate();
repaint();
}
@Override
public UndoRedo getUndoRedo() {
PlatypusDbDiagramSupport support = dataObject.getLookup().lookup(PlatypusDbDiagramSupport.class);
return support.getModelUndo();
}
@Override
public int getPersistenceType() {
return TopComponent.PERSISTENCE_ONLY_OPENED;
}
@Override
public boolean canClose() {
PlatypusDbDiagramSupport support = dataObject.getLookup().lookup(PlatypusDbDiagramSupport.class);
List<CloneableTopComponent> views = support.getAllViews();
if (views != null && views.size() == 1) {
return support.canClose();
}
return super.canClose();
}
@Override
public void componentClosed() {
try {
super.componentClosed();
// unsubscribe view from the dataObject
dataObject.removePropertyChangeListener(dataObjectListener);
// unsubscribe view from the model
if (editor != null) {
editor.setModel(null);
}
PlatypusDbDiagramSupport support = dataObject.getLookup().lookup(PlatypusDbDiagramSupport.class);
support.shrink();
if (clientChangeListener != null) {
clientChangeListener.remove();
}
if (modelValidChangeListener != null) {
modelValidChangeListener.remove();
}
} catch (Exception ex) {
ErrorManager.getDefault().notify(ex);
}
}
protected DbSchemeModelView getModelView() {
return editor != null ? (DbSchemeModelView) editor.getEntitiesView() : null;
}
@Override
protected void componentActivated() {
try {
if (dataObject.isValid() && dataObject.getBasesProxy()!= null && dataObject.isModelValid()) {
ModelInspector.getInstance().setNodesReflector(exlorerSelectionListener);
ModelInspector.getInstance().setViewData(new ModelInspector.ViewData<>(getModelView(), getUndoRedo(), dataObject.getModelNode()));
WindowManager wm = WindowManager.getDefault();
final TopComponentGroup group = wm.findTopComponentGroup(PLATYPUS_DIAGRAMS_GROUP_NAME);
if (group != null) {
group.open();
}
}
} catch (Exception ex) {
ErrorManager.getDefault().notify(ex);
}
super.componentActivated();
}
@Override
public Action[] getActions() {
List<Action> actions = new ArrayList<>(Arrays.asList(super.getActions()));
// XXX nicer to use MimeLookup for type-specific actions, but not easy; see org.netbeans.modules.editor.impl.EditorActionsProvider
actions.add(null);
actions.addAll(Utilities.actionsForPath("Editors/TabActions")); //NOI18N
return actions.toArray(new Action[actions.size()]);
}
@Override
protected void componentHidden() {
ModelInspector.getInstance().setNodesReflector(null);
ModelInspector.getInstance().setViewData(null);
WindowManager wm = WindowManager.getDefault();
final TopComponentGroup group = wm.findTopComponentGroup(PLATYPUS_DIAGRAMS_GROUP_NAME);
if (group != null) {
group.close();
}
super.componentHidden();
}
@Override
protected String preferredID() {
return PREFERRED_ID;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lakeformation.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/lakeformation-2017-03-31/GetQueryStatistics" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetQueryStatisticsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* An <code>ExecutionStatistics</code> structure containing execution statistics.
* </p>
*/
private ExecutionStatistics executionStatistics;
/**
* <p>
* A <code>PlanningStatistics</code> structure containing query planning statistics.
* </p>
*/
private PlanningStatistics planningStatistics;
/**
* <p>
* The time that the query was submitted.
* </p>
*/
private java.util.Date querySubmissionTime;
/**
* <p>
* An <code>ExecutionStatistics</code> structure containing execution statistics.
* </p>
*
* @param executionStatistics
* An <code>ExecutionStatistics</code> structure containing execution statistics.
*/
public void setExecutionStatistics(ExecutionStatistics executionStatistics) {
this.executionStatistics = executionStatistics;
}
/**
* <p>
* An <code>ExecutionStatistics</code> structure containing execution statistics.
* </p>
*
* @return An <code>ExecutionStatistics</code> structure containing execution statistics.
*/
public ExecutionStatistics getExecutionStatistics() {
return this.executionStatistics;
}
/**
* <p>
* An <code>ExecutionStatistics</code> structure containing execution statistics.
* </p>
*
* @param executionStatistics
* An <code>ExecutionStatistics</code> structure containing execution statistics.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetQueryStatisticsResult withExecutionStatistics(ExecutionStatistics executionStatistics) {
setExecutionStatistics(executionStatistics);
return this;
}
/**
* <p>
* A <code>PlanningStatistics</code> structure containing query planning statistics.
* </p>
*
* @param planningStatistics
* A <code>PlanningStatistics</code> structure containing query planning statistics.
*/
public void setPlanningStatistics(PlanningStatistics planningStatistics) {
this.planningStatistics = planningStatistics;
}
/**
* <p>
* A <code>PlanningStatistics</code> structure containing query planning statistics.
* </p>
*
* @return A <code>PlanningStatistics</code> structure containing query planning statistics.
*/
public PlanningStatistics getPlanningStatistics() {
return this.planningStatistics;
}
/**
* <p>
* A <code>PlanningStatistics</code> structure containing query planning statistics.
* </p>
*
* @param planningStatistics
* A <code>PlanningStatistics</code> structure containing query planning statistics.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetQueryStatisticsResult withPlanningStatistics(PlanningStatistics planningStatistics) {
setPlanningStatistics(planningStatistics);
return this;
}
/**
* <p>
* The time that the query was submitted.
* </p>
*
* @param querySubmissionTime
* The time that the query was submitted.
*/
public void setQuerySubmissionTime(java.util.Date querySubmissionTime) {
this.querySubmissionTime = querySubmissionTime;
}
/**
* <p>
* The time that the query was submitted.
* </p>
*
* @return The time that the query was submitted.
*/
public java.util.Date getQuerySubmissionTime() {
return this.querySubmissionTime;
}
/**
* <p>
* The time that the query was submitted.
* </p>
*
* @param querySubmissionTime
* The time that the query was submitted.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetQueryStatisticsResult withQuerySubmissionTime(java.util.Date querySubmissionTime) {
setQuerySubmissionTime(querySubmissionTime);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getExecutionStatistics() != null)
sb.append("ExecutionStatistics: ").append(getExecutionStatistics()).append(",");
if (getPlanningStatistics() != null)
sb.append("PlanningStatistics: ").append(getPlanningStatistics()).append(",");
if (getQuerySubmissionTime() != null)
sb.append("QuerySubmissionTime: ").append(getQuerySubmissionTime());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetQueryStatisticsResult == false)
return false;
GetQueryStatisticsResult other = (GetQueryStatisticsResult) obj;
if (other.getExecutionStatistics() == null ^ this.getExecutionStatistics() == null)
return false;
if (other.getExecutionStatistics() != null && other.getExecutionStatistics().equals(this.getExecutionStatistics()) == false)
return false;
if (other.getPlanningStatistics() == null ^ this.getPlanningStatistics() == null)
return false;
if (other.getPlanningStatistics() != null && other.getPlanningStatistics().equals(this.getPlanningStatistics()) == false)
return false;
if (other.getQuerySubmissionTime() == null ^ this.getQuerySubmissionTime() == null)
return false;
if (other.getQuerySubmissionTime() != null && other.getQuerySubmissionTime().equals(this.getQuerySubmissionTime()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getExecutionStatistics() == null) ? 0 : getExecutionStatistics().hashCode());
hashCode = prime * hashCode + ((getPlanningStatistics() == null) ? 0 : getPlanningStatistics().hashCode());
hashCode = prime * hashCode + ((getQuerySubmissionTime() == null) ? 0 : getQuerySubmissionTime().hashCode());
return hashCode;
}
@Override
public GetQueryStatisticsResult clone() {
try {
return (GetQueryStatisticsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright (c) 1998, 2006, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.java.swing.plaf.windows;
import javax.swing.*;
import javax.swing.plaf.ButtonUI;
import javax.swing.plaf.UIResource;
import java.awt.*;
import java.io.Serializable;
import static com.sun.java.swing.plaf.windows.TMSchema.*;
import static com.sun.java.swing.plaf.windows.XPStyle.Skin;
import sun.swing.MenuItemCheckIconFactory;
/**
* Factory object that can vend Icons appropriate for the Windows L & F.
* <p>
* <strong>Warning:</strong>
* Serialized objects of this class will not be compatible with
* future Swing releases. The current serialization support is appropriate
* for short term storage or RMI between applications running the same
* version of Swing. A future release of Swing will provide support for
* long term persistence.
*
* @author David Kloba
* @author Georges Saab
* @author Rich Schiavi
*/
public class WindowsIconFactory implements Serializable
{
private static Icon frame_closeIcon;
private static Icon frame_iconifyIcon;
private static Icon frame_maxIcon;
private static Icon frame_minIcon;
private static Icon frame_resizeIcon;
private static Icon checkBoxIcon;
private static Icon radioButtonIcon;
private static Icon checkBoxMenuItemIcon;
private static Icon radioButtonMenuItemIcon;
private static Icon menuItemCheckIcon;
private static Icon menuItemArrowIcon;
private static Icon menuArrowIcon;
private static VistaMenuItemCheckIconFactory menuItemCheckIconFactory;
public static Icon getMenuItemCheckIcon() {
if (menuItemCheckIcon == null) {
menuItemCheckIcon = new MenuItemCheckIcon();
}
return menuItemCheckIcon;
}
public static Icon getMenuItemArrowIcon() {
if (menuItemArrowIcon == null) {
menuItemArrowIcon = new MenuItemArrowIcon();
}
return menuItemArrowIcon;
}
public static Icon getMenuArrowIcon() {
if (menuArrowIcon == null) {
menuArrowIcon = new MenuArrowIcon();
}
return menuArrowIcon;
}
public static Icon getCheckBoxIcon() {
if (checkBoxIcon == null) {
checkBoxIcon = new CheckBoxIcon();
}
return checkBoxIcon;
}
public static Icon getRadioButtonIcon() {
if (radioButtonIcon == null) {
radioButtonIcon = new RadioButtonIcon();
}
return radioButtonIcon;
}
public static Icon getCheckBoxMenuItemIcon() {
if (checkBoxMenuItemIcon == null) {
checkBoxMenuItemIcon = new CheckBoxMenuItemIcon();
}
return checkBoxMenuItemIcon;
}
public static Icon getRadioButtonMenuItemIcon() {
if (radioButtonMenuItemIcon == null) {
radioButtonMenuItemIcon = new RadioButtonMenuItemIcon();
}
return radioButtonMenuItemIcon;
}
static
synchronized VistaMenuItemCheckIconFactory getMenuItemCheckIconFactory() {
if (menuItemCheckIconFactory == null) {
menuItemCheckIconFactory =
new VistaMenuItemCheckIconFactory();
}
return menuItemCheckIconFactory;
}
public static Icon createFrameCloseIcon() {
if (frame_closeIcon == null) {
frame_closeIcon = new FrameButtonIcon(Part.WP_CLOSEBUTTON);
}
return frame_closeIcon;
}
public static Icon createFrameIconifyIcon() {
if (frame_iconifyIcon == null) {
frame_iconifyIcon = new FrameButtonIcon(Part.WP_MINBUTTON);
}
return frame_iconifyIcon;
}
public static Icon createFrameMaximizeIcon() {
if (frame_maxIcon == null) {
frame_maxIcon = new FrameButtonIcon(Part.WP_MAXBUTTON);
}
return frame_maxIcon;
}
public static Icon createFrameMinimizeIcon() {
if (frame_minIcon == null) {
frame_minIcon = new FrameButtonIcon(Part.WP_RESTOREBUTTON);
}
return frame_minIcon;
}
public static Icon createFrameResizeIcon() {
if(frame_resizeIcon == null)
frame_resizeIcon = new ResizeIcon();
return frame_resizeIcon;
}
private static class FrameButtonIcon implements Icon, Serializable {
private Part part;
private FrameButtonIcon(Part part) {
this.part = part;
}
public void paintIcon(Component c, Graphics g, int x0, int y0) {
int width = getIconWidth();
int height = getIconHeight();
XPStyle xp = XPStyle.getXP();
if (xp != null) {
Skin skin = xp.getSkin(c, part);
JButton b = (JButton)c;
ButtonModel model = b.getModel();
// Find out if frame is inactive
JInternalFrame jif = (JInternalFrame)SwingUtilities.
getAncestorOfClass(JInternalFrame.class, b);
boolean jifSelected = (jif != null && jif.isSelected());
State state;
if (jifSelected) {
if (!model.isEnabled()) {
state = State.DISABLED;
} else if (model.isArmed() && model.isPressed()) {
state = State.PUSHED;
} else if (model.isRollover()) {
state = State.HOT;
} else {
state = State.NORMAL;
}
} else {
if (!model.isEnabled()) {
state = State.INACTIVEDISABLED;
} else if (model.isArmed() && model.isPressed()) {
state = State.INACTIVEPUSHED;
} else if (model.isRollover()) {
state = State.INACTIVEHOT;
} else {
state = State.INACTIVENORMAL;
}
}
skin.paintSkin(g, 0, 0, width, height, state);
} else {
g.setColor(Color.black);
int x = width / 12 + 2;
int y = height / 5;
int h = height - y * 2 - 1;
int w = width * 3/4 -3;
int thickness2 = Math.max(height / 8, 2);
int thickness = Math.max(width / 15, 1);
if (part == Part.WP_CLOSEBUTTON) {
int lineWidth;
if (width > 47) lineWidth = 6;
else if (width > 37) lineWidth = 5;
else if (width > 26) lineWidth = 4;
else if (width > 16) lineWidth = 3;
else if (width > 12) lineWidth = 2;
else lineWidth = 1;
y = height / 12 + 2;
if (lineWidth == 1) {
if (w % 2 == 1) { x++; w++; }
g.drawLine(x, y, x+w-2, y+w-2);
g.drawLine(x+w-2, y, x, y+w-2);
} else if (lineWidth == 2) {
if (w > 6) { x++; w--; }
g.drawLine(x, y, x+w-2, y+w-2);
g.drawLine(x+w-2, y, x, y+w-2);
g.drawLine(x+1, y, x+w-1, y+w-2);
g.drawLine(x+w-1, y, x+1, y+w-2);
} else {
x += 2; y++; w -= 2;
g.drawLine(x, y, x+w-1, y+w-1);
g.drawLine(x+w-1, y, x, y+w-1);
g.drawLine(x+1, y, x+w-1, y+w-2);
g.drawLine(x+w-2, y, x, y+w-2);
g.drawLine(x, y+1, x+w-2, y+w-1);
g.drawLine(x+w-1, y+1, x+1, y+w-1);
for (int i = 4; i <= lineWidth; i++) {
g.drawLine(x+i-2, y, x+w-1, y+w-i+1);
g.drawLine(x, y+i-2, x+w-i+1, y+w-1);
g.drawLine(x+w-i+1, y, x, y+w-i+1);
g.drawLine(x+w-1, y+i-2, x+i-2, y+w-1);
}
}
} else if (part == Part.WP_MINBUTTON) {
g.fillRect(x, y+h-thickness2, w-w/3, thickness2);
} else if (part == Part.WP_MAXBUTTON) {
g.fillRect(x, y, w, thickness2);
g.fillRect(x, y, thickness, h);
g.fillRect(x+w-thickness, y, thickness, h);
g.fillRect(x, y+h-thickness, w, thickness);
} else if (part == Part.WP_RESTOREBUTTON) {
g.fillRect(x+w/3, y, w-w/3, thickness2);
g.fillRect(x+w/3, y, thickness, h/3);
g.fillRect(x+w-thickness, y, thickness, h-h/3);
g.fillRect(x+w-w/3, y+h-h/3-thickness, w/3, thickness);
g.fillRect(x, y+h/3, w-w/3, thickness2);
g.fillRect(x, y+h/3, thickness, h-h/3);
g.fillRect(x+w-w/3-thickness, y+h/3, thickness, h-h/3);
g.fillRect(x, y+h-thickness, w-w/3, thickness);
}
}
}
public int getIconWidth() {
int width;
if (XPStyle.getXP() != null) {
// Fix for XP bug where sometimes these sizes aren't updated properly
// Assume for now that height is correct and derive width using the
// ratio from the uxtheme part
width = UIManager.getInt("InternalFrame.titleButtonHeight") -2;
Dimension d = XPStyle.getPartSize(Part.WP_CLOSEBUTTON, State.NORMAL);
if (d != null && d.width != 0 && d.height != 0) {
width = (int) ((float) width * d.width / d.height);
}
} else {
width = UIManager.getInt("InternalFrame.titleButtonWidth") -2;
}
if (XPStyle.getXP() != null) {
width -= 2;
}
return width;
}
public int getIconHeight() {
int height = UIManager.getInt("InternalFrame.titleButtonHeight")-4;
return height;
}
}
private static class ResizeIcon implements Icon, Serializable {
public void paintIcon(Component c, Graphics g, int x, int y) {
g.setColor(UIManager.getColor("InternalFrame.resizeIconHighlight"));
g.drawLine(0, 11, 11, 0);
g.drawLine(4, 11, 11, 4);
g.drawLine(8, 11, 11, 8);
g.setColor(UIManager.getColor("InternalFrame.resizeIconShadow"));
g.drawLine(1, 11, 11, 1);
g.drawLine(2, 11, 11, 2);
g.drawLine(5, 11, 11, 5);
g.drawLine(6, 11, 11, 6);
g.drawLine(9, 11, 11, 9);
g.drawLine(10, 11, 11, 10);
}
public int getIconWidth() { return 13; }
public int getIconHeight() { return 13; }
};
private static class CheckBoxIcon implements Icon, Serializable
{
final static int csize = 13;
public void paintIcon(Component c, Graphics g, int x, int y) {
JCheckBox cb = (JCheckBox) c;
ButtonModel model = cb.getModel();
XPStyle xp = XPStyle.getXP();
if (xp != null) {
State state;
if (model.isSelected()) {
state = State.CHECKEDNORMAL;
if (!model.isEnabled()) {
state = State.CHECKEDDISABLED;
} else if (model.isPressed() && model.isArmed()) {
state = State.CHECKEDPRESSED;
} else if (model.isRollover()) {
state = State.CHECKEDHOT;
}
} else {
state = State.UNCHECKEDNORMAL;
if (!model.isEnabled()) {
state = State.UNCHECKEDDISABLED;
} else if (model.isPressed() && model.isArmed()) {
state = State.UNCHECKEDPRESSED;
} else if (model.isRollover()) {
state = State.UNCHECKEDHOT;
}
}
Part part = Part.BP_CHECKBOX;
xp.getSkin(c, part).paintSkin(g, x, y, state);
} else {
// outer bevel
if(!cb.isBorderPaintedFlat()) {
// Outer top/left
g.setColor(UIManager.getColor("CheckBox.shadow"));
g.drawLine(x, y, x+11, y);
g.drawLine(x, y+1, x, y+11);
// Outer bottom/right
g.setColor(UIManager.getColor("CheckBox.highlight"));
g.drawLine(x+12, y, x+12, y+12);
g.drawLine(x, y+12, x+11, y+12);
// Inner top.left
g.setColor(UIManager.getColor("CheckBox.darkShadow"));
g.drawLine(x+1, y+1, x+10, y+1);
g.drawLine(x+1, y+2, x+1, y+10);
// Inner bottom/right
g.setColor(UIManager.getColor("CheckBox.light"));
g.drawLine(x+1, y+11, x+11, y+11);
g.drawLine(x+11, y+1, x+11, y+10);
// inside box
if((model.isPressed() && model.isArmed()) || !model.isEnabled()) {
g.setColor(UIManager.getColor("CheckBox.background"));
} else {
g.setColor(UIManager.getColor("CheckBox.interiorBackground"));
}
g.fillRect(x+2, y+2, csize-4, csize-4);
} else {
g.setColor(UIManager.getColor("CheckBox.shadow"));
g.drawRect(x+1, y+1, csize-3, csize-3);
if((model.isPressed() && model.isArmed()) || !model.isEnabled()) {
g.setColor(UIManager.getColor("CheckBox.background"));
} else {
g.setColor(UIManager.getColor("CheckBox.interiorBackground"));
}
g.fillRect(x+2, y+2, csize-4, csize-4);
}
if(model.isEnabled()) {
g.setColor(UIManager.getColor("CheckBox.foreground"));
} else {
g.setColor(UIManager.getColor("CheckBox.shadow"));
}
// paint check
if (model.isSelected()) {
g.drawLine(x+9, y+3, x+9, y+3);
g.drawLine(x+8, y+4, x+9, y+4);
g.drawLine(x+7, y+5, x+9, y+5);
g.drawLine(x+6, y+6, x+8, y+6);
g.drawLine(x+3, y+7, x+7, y+7);
g.drawLine(x+4, y+8, x+6, y+8);
g.drawLine(x+5, y+9, x+5, y+9);
g.drawLine(x+3, y+5, x+3, y+5);
g.drawLine(x+3, y+6, x+4, y+6);
}
}
}
public int getIconWidth() {
XPStyle xp = XPStyle.getXP();
if (xp != null) {
return xp.getSkin(null, Part.BP_CHECKBOX).getWidth();
} else {
return csize;
}
}
public int getIconHeight() {
XPStyle xp = XPStyle.getXP();
if (xp != null) {
return xp.getSkin(null, Part.BP_CHECKBOX).getHeight();
} else {
return csize;
}
}
}
private static class RadioButtonIcon implements Icon, UIResource, Serializable
{
public void paintIcon(Component c, Graphics g, int x, int y) {
AbstractButton b = (AbstractButton) c;
ButtonModel model = b.getModel();
XPStyle xp = XPStyle.getXP();
if (xp != null) {
Part part = Part.BP_RADIOBUTTON;
Skin skin = xp.getSkin(b, part);
State state;
int index = 0;
if (model.isSelected()) {
state = State.CHECKEDNORMAL;
if (!model.isEnabled()) {
state = State.CHECKEDDISABLED;
} else if (model.isPressed() && model.isArmed()) {
state = State.CHECKEDPRESSED;
} else if (model.isRollover()) {
state = State.CHECKEDHOT;
}
} else {
state = State.UNCHECKEDNORMAL;
if (!model.isEnabled()) {
state = State.UNCHECKEDDISABLED;
} else if (model.isPressed() && model.isArmed()) {
state = State.UNCHECKEDPRESSED;
} else if (model.isRollover()) {
state = State.UNCHECKEDHOT;
}
}
skin.paintSkin(g, x, y, state);
} else {
// fill interior
if((model.isPressed() && model.isArmed()) || !model.isEnabled()) {
g.setColor(UIManager.getColor("RadioButton.background"));
} else {
g.setColor(UIManager.getColor("RadioButton.interiorBackground"));
}
g.fillRect(x+2, y+2, 8, 8);
// outter left arc
g.setColor(UIManager.getColor("RadioButton.shadow"));
g.drawLine(x+4, y+0, x+7, y+0);
g.drawLine(x+2, y+1, x+3, y+1);
g.drawLine(x+8, y+1, x+9, y+1);
g.drawLine(x+1, y+2, x+1, y+3);
g.drawLine(x+0, y+4, x+0, y+7);
g.drawLine(x+1, y+8, x+1, y+9);
// outter right arc
g.setColor(UIManager.getColor("RadioButton.highlight"));
g.drawLine(x+2, y+10, x+3, y+10);
g.drawLine(x+4, y+11, x+7, y+11);
g.drawLine(x+8, y+10, x+9, y+10);
g.drawLine(x+10, y+9, x+10, y+8);
g.drawLine(x+11, y+7, x+11, y+4);
g.drawLine(x+10, y+3, x+10, y+2);
// inner left arc
g.setColor(UIManager.getColor("RadioButton.darkShadow"));
g.drawLine(x+4, y+1, x+7, y+1);
g.drawLine(x+2, y+2, x+3, y+2);
g.drawLine(x+8, y+2, x+9, y+2);
g.drawLine(x+2, y+3, x+2, y+3);
g.drawLine(x+1, y+4, x+1, y+7);
g.drawLine(x+2, y+8, x+2, y+8);
// inner right arc
g.setColor(UIManager.getColor("RadioButton.light"));
g.drawLine(x+2, y+9, x+3, y+9);
g.drawLine(x+4, y+10, x+7, y+10);
g.drawLine(x+8, y+9, x+9, y+9);
g.drawLine(x+9, y+8, x+9, y+8);
g.drawLine(x+10, y+7, x+10, y+4);
g.drawLine(x+9, y+3, x+9, y+3);
// indicate whether selected or not
if (model.isSelected()) {
if (model.isEnabled()) {
g.setColor(UIManager.getColor("RadioButton.foreground"));
} else {
g.setColor(UIManager.getColor("RadioButton.shadow"));
}
g.fillRect(x+4, y+5, 4, 2);
g.fillRect(x+5, y+4, 2, 4);
}
}
}
public int getIconWidth() {
XPStyle xp = XPStyle.getXP();
if (xp != null) {
return xp.getSkin(null, Part.BP_RADIOBUTTON).getWidth();
} else {
return 13;
}
}
public int getIconHeight() {
XPStyle xp = XPStyle.getXP();
if (xp != null) {
return xp.getSkin(null, Part.BP_RADIOBUTTON).getHeight();
} else {
return 13;
}
}
} // end class RadioButtonIcon
private static class CheckBoxMenuItemIcon implements Icon, UIResource, Serializable
{
public void paintIcon(Component c, Graphics g, int x, int y) {
AbstractButton b = (AbstractButton) c;
ButtonModel model = b.getModel();
boolean isSelected = model.isSelected();
if (isSelected) {
y = y - getIconHeight() / 2;
g.drawLine(x+9, y+3, x+9, y+3);
g.drawLine(x+8, y+4, x+9, y+4);
g.drawLine(x+7, y+5, x+9, y+5);
g.drawLine(x+6, y+6, x+8, y+6);
g.drawLine(x+3, y+7, x+7, y+7);
g.drawLine(x+4, y+8, x+6, y+8);
g.drawLine(x+5, y+9, x+5, y+9);
g.drawLine(x+3, y+5, x+3, y+5);
g.drawLine(x+3, y+6, x+4, y+6);
}
}
public int getIconWidth() { return 9; }
public int getIconHeight() { return 9; }
} // End class CheckBoxMenuItemIcon
private static class RadioButtonMenuItemIcon implements Icon, UIResource, Serializable
{
public void paintIcon(Component c, Graphics g, int x, int y) {
AbstractButton b = (AbstractButton) c;
ButtonModel model = b.getModel();
if (b.isSelected() == true) {
g.fillRoundRect(x+3,y+3, getIconWidth()-6, getIconHeight()-6,
4, 4);
}
}
public int getIconWidth() { return 12; }
public int getIconHeight() { return 12; }
} // End class RadioButtonMenuItemIcon
private static class MenuItemCheckIcon implements Icon, UIResource, Serializable{
public void paintIcon(Component c, Graphics g, int x, int y) {
/* For debugging:
Color oldColor = g.getColor();
g.setColor(Color.orange);
g.fill3DRect(x,y,getIconWidth(), getIconHeight(), true);
g.setColor(oldColor);
*/
}
public int getIconWidth() { return 9; }
public int getIconHeight() { return 9; }
} // End class MenuItemCheckIcon
private static class MenuItemArrowIcon implements Icon, UIResource, Serializable {
public void paintIcon(Component c, Graphics g, int x, int y) {
/* For debugging:
Color oldColor = g.getColor();
g.setColor(Color.green);
g.fill3DRect(x,y,getIconWidth(), getIconHeight(), true);
g.setColor(oldColor);
*/
}
public int getIconWidth() { return 4; }
public int getIconHeight() { return 8; }
} // End class MenuItemArrowIcon
private static class MenuArrowIcon implements Icon, UIResource, Serializable {
public void paintIcon(Component c, Graphics g, int x, int y) {
if (WindowsMenuItemUI.isVistaPainting()) {
XPStyle xp = XPStyle.getXP();
State state = State.NORMAL;
if (c instanceof JMenuItem) {
state = ((JMenuItem) c).getModel().isEnabled()
? State.NORMAL : State.DISABLED;
}
Skin skin = xp.getSkin(c, Part.MP_POPUPSUBMENU);
if (WindowsGraphicsUtils.isLeftToRight(c)) {
skin.paintSkin(g, x, y, state);
} else {
Graphics2D g2d = (Graphics2D)g.create();
g2d.translate(x + skin.getWidth(), y);
g2d.scale(-1, 1);
skin.paintSkin(g2d, 0, 0, state);
g2d.dispose();
}
} else {
g.translate(x,y);
if( WindowsGraphicsUtils.isLeftToRight(c) ) {
g.drawLine( 0, 0, 0, 7 );
g.drawLine( 1, 1, 1, 6 );
g.drawLine( 2, 2, 2, 5 );
g.drawLine( 3, 3, 3, 4 );
} else {
g.drawLine( 4, 0, 4, 7 );
g.drawLine( 3, 1, 3, 6 );
g.drawLine( 2, 2, 2, 5 );
g.drawLine( 1, 3, 1, 4 );
}
g.translate(-x,-y);
}
}
public int getIconWidth() {
if (WindowsMenuItemUI.isVistaPainting()) {
Skin skin = XPStyle.getXP().getSkin(null, Part.MP_POPUPSUBMENU);
return skin.getWidth();
} else {
return 4;
}
}
public int getIconHeight() {
if (WindowsMenuItemUI.isVistaPainting()) {
Skin skin = XPStyle.getXP().getSkin(null, Part.MP_POPUPSUBMENU);
return skin.getHeight();
} else {
return 8;
}
}
} // End class MenuArrowIcon
static class VistaMenuItemCheckIconFactory
implements MenuItemCheckIconFactory {
private static final int OFFSET = 3;
public Icon getIcon(JMenuItem component) {
return new VistaMenuItemCheckIcon(component);
}
public boolean isCompatible(Object icon, String prefix) {
return icon instanceof VistaMenuItemCheckIcon
&& ((VistaMenuItemCheckIcon) icon).type == getType(prefix);
}
public Icon getIcon(String type) {
return new VistaMenuItemCheckIcon(type);
}
static int getIconWidth() {
return XPStyle.getXP().getSkin(null, Part.MP_POPUPCHECK).getWidth()
+ 2 * OFFSET;
}
private static Class<? extends JMenuItem> getType(Component c) {
Class<? extends JMenuItem> rv = null;
if (c instanceof JCheckBoxMenuItem) {
rv = JCheckBoxMenuItem.class;
} else if (c instanceof JRadioButtonMenuItem) {
rv = JRadioButtonMenuItem.class;
} else if (c instanceof JMenu) {
rv = JMenu.class;
} else if (c instanceof JMenuItem) {
rv = JMenuItem.class;
}
return rv;
}
private static Class<? extends JMenuItem> getType(String type) {
Class<? extends JMenuItem> rv = null;
if (type == "CheckBoxMenuItem") {
rv = JCheckBoxMenuItem.class;
} else if (type == "RadioButtonMenuItem") {
rv = JRadioButtonMenuItem.class;
} else if (type == "Menu") {
rv = JMenu.class;
} else if (type == "MenuItem") {
rv = JMenuItem.class;
} else {
// this should never happen
rv = JMenuItem.class;
}
return rv;
}
/**
* CheckIcon for JMenuItem, JMenu, JCheckBoxMenuItem and
* JRadioButtonMenuItem.
* Note: to be used on Vista only.
*/
private static class VistaMenuItemCheckIcon
implements Icon, UIResource, Serializable {
private final JMenuItem menuItem;
private final Class<? extends JMenuItem> type;
VistaMenuItemCheckIcon(JMenuItem menuItem) {
this.type = getType(menuItem);
this.menuItem = menuItem;
}
VistaMenuItemCheckIcon(String type) {
this.type = getType(type);
this.menuItem = null;
}
public int getIconHeight() {
Icon lafIcon = getLaFIcon();
if (lafIcon != null) {
return lafIcon.getIconHeight();
}
Icon icon = getIcon();
int height = 0;
if (icon != null) {
height = icon.getIconHeight() + 2 * OFFSET;
} else {
Skin skin =
XPStyle.getXP().getSkin(null, Part.MP_POPUPCHECK);
height = skin.getHeight() + 2 * OFFSET;
}
return height;
}
public int getIconWidth() {
Icon lafIcon = getLaFIcon();
if (lafIcon != null) {
return lafIcon.getIconWidth();
}
Icon icon = getIcon();
int width = 0;
if (icon != null) {
width = icon.getIconWidth() + 2 * OFFSET;
} else {
width = VistaMenuItemCheckIconFactory.getIconWidth();
}
return width;
}
public void paintIcon(Component c, Graphics g, int x, int y) {
Icon lafIcon = getLaFIcon();
if (lafIcon != null) {
lafIcon.paintIcon(c, g, x, y);
return;
}
assert menuItem == null || c == menuItem;
Icon icon = getIcon();
if (type == JCheckBoxMenuItem.class
|| type == JRadioButtonMenuItem.class) {
AbstractButton b = (AbstractButton) c;
if (b.isSelected()) {
Part backgroundPart = Part.MP_POPUPCHECKBACKGROUND;
Part part = Part.MP_POPUPCHECK;
State backgroundState;
State state;
if (isEnabled(c, null)) {
backgroundState =
(icon != null) ? State.BITMAP : State.NORMAL;
state = (type == JRadioButtonMenuItem.class)
? State.BULLETNORMAL
: State.CHECKMARKNORMAL;
} else {
backgroundState = State.DISABLEDPUSHED;
state =
(type == JRadioButtonMenuItem.class)
? State.BULLETDISABLED
: State.CHECKMARKDISABLED;
}
Skin skin;
XPStyle xp = XPStyle.getXP();
skin = xp.getSkin(c, backgroundPart);
skin.paintSkin(g, x, y,
getIconWidth(), getIconHeight(), backgroundState);
if (icon == null) {
skin = xp.getSkin(c, part);
skin.paintSkin(g, x + OFFSET, y + OFFSET, state);
}
}
}
if (icon != null) {
icon.paintIcon(c, g, x + OFFSET, y + OFFSET);
}
}
private static WindowsMenuItemUIAccessor getAccessor(
JMenuItem menuItem) {
WindowsMenuItemUIAccessor rv = null;
ButtonUI uiObject = (menuItem != null) ? menuItem.getUI()
: null;
if (uiObject instanceof WindowsMenuItemUI) {
rv = ((WindowsMenuItemUI) uiObject).accessor;
} else if (uiObject instanceof WindowsMenuUI) {
rv = ((WindowsMenuUI) uiObject).accessor;
} else if (uiObject instanceof WindowsCheckBoxMenuItemUI) {
rv = ((WindowsCheckBoxMenuItemUI) uiObject).accessor;
} else if (uiObject instanceof WindowsRadioButtonMenuItemUI) {
rv = ((WindowsRadioButtonMenuItemUI) uiObject).accessor;
}
return rv;
}
private static boolean isEnabled(Component c, State state) {
if (state == null && c instanceof JMenuItem) {
WindowsMenuItemUIAccessor accessor =
getAccessor((JMenuItem) c);
if (accessor != null) {
state = accessor.getState((JMenuItem) c);
}
}
if (state == null) {
if (c != null) {
return c.isEnabled();
} else {
return true;
}
} else {
return (state != State.DISABLED)
&& (state != State.DISABLEDHOT)
&& (state != State.DISABLEDPUSHED);
}
}
private Icon getIcon() {
Icon rv = null;
if (menuItem == null) {
return rv;
}
WindowsMenuItemUIAccessor accessor =
getAccessor(menuItem);
State state = (accessor != null) ? accessor.getState(menuItem)
: null;
if (isEnabled(menuItem, null)) {
if (state == State.PUSHED) {
rv = menuItem.getPressedIcon();
} else {
rv = menuItem.getIcon();
}
} else {
rv = menuItem.getDisabledIcon();
}
return rv;
}
/**
* Check if developer changed icon in the UI table.
*
* @return the icon to use or {@code null} if the current one is to
* be used
*/
private Icon getLaFIcon() {
// use icon from the UI table if it does not match this one.
Icon rv = (Icon) UIManager.getDefaults().get(typeToString(type));
if (rv instanceof VistaMenuItemCheckIcon
&& ((VistaMenuItemCheckIcon) rv).type == type) {
rv = null;
}
return rv;
}
private static String typeToString(
Class<? extends JMenuItem> type) {
assert type == JMenuItem.class
|| type == JMenu.class
|| type == JCheckBoxMenuItem.class
|| type == JRadioButtonMenuItem.class;
StringBuilder sb = new StringBuilder(type.getName());
// remove package name, dot and the first character
sb.delete(0, sb.lastIndexOf("J") + 1);
sb.append(".checkIcon");
return sb.toString();
}
}
} // End class VistaMenuItemCheckIconFactory
}
| |
// ************************************************************************
// (c) 2016 GOBii Project
// Initial Version: Phil Glaser
// Create Date: 2016-03-25
// ************************************************************************
package org.gobiiproject.gobiiclient.gobii.dbops.crud;
import org.apache.commons.lang.StringUtils;
import org.gobiiproject.gobiiapimodel.hateos.Link;
import org.gobiiproject.gobiiapimodel.hateos.LinkCollection;
import org.gobiiproject.gobiiapimodel.payload.PayloadEnvelope;
import org.gobiiproject.gobiiapimodel.restresources.common.RestUri;
import org.gobiiproject.gobiiapimodel.types.GobiiServiceRequestId;
import org.gobiiproject.gobiiclient.core.gobii.GobiiClientContext;
import org.gobiiproject.gobiiclient.core.gobii.GobiiClientContextAuth;
import org.gobiiproject.gobiiclient.core.gobii.GobiiEnvelopeRestResource;
import org.gobiiproject.gobiiclient.gobii.Helpers.DtoRestRequestUtils;
import org.gobiiproject.gobiiclient.gobii.Helpers.EntityParamValues;
import org.gobiiproject.gobiiclient.gobii.Helpers.GlobalPkColl;
import org.gobiiproject.gobiiclient.gobii.Helpers.GlobalPkValues;
import org.gobiiproject.gobiiclient.gobii.Helpers.TestDtoFactory;
import org.gobiiproject.gobiiclient.gobii.Helpers.TestUtils;
import org.gobiiproject.gobiimodel.headerlesscontainer.AnalysisDTO;
import org.gobiiproject.gobiimodel.headerlesscontainer.DataSetDTO;
import org.gobiiproject.gobiimodel.headerlesscontainer.NameIdDTO;
import org.gobiiproject.gobiimodel.types.GobiiEntityNameType;
import org.gobiiproject.gobiimodel.types.GobiiFilterType;
import org.gobiiproject.gobiimodel.types.GobiiProcessType;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
public class DtoCrudRequestDataSetTest implements DtoCrudRequestTest {
@BeforeClass
public static void setUpClass() throws Exception {
Assert.assertTrue(GobiiClientContextAuth.authenticate());
}
@AfterClass
public static void tearDownUpClass() throws Exception {
Assert.assertTrue(GobiiClientContextAuth.deAuthenticate());
}
@Test
@Override
public void get() throws Exception {
// DtoRequestDataSet dtoRequestDataSet = new DtoRequestDataSet();
// DataSetDTO dataSetDTORequest = new DataSetDTO();
// dataSetDTORequest.setDataSetId(2);
// DataSetDTO dataSetDTOResponse = dtoRequestDataSet.process(dataSetDTORequest);
Integer dataSetid = (new GlobalPkColl<DtoCrudRequestDataSetTest>().getAPkVal(DtoCrudRequestDataSetTest.class, GobiiEntityNameType.DATASETS));
RestUri projectsUri = GobiiClientContext.getInstance(null, false)
.getUriFactory()
.resourceByUriIdParam(GobiiServiceRequestId.URL_DATASETS);
projectsUri.setParamValue("id", dataSetid.toString());
GobiiEnvelopeRestResource<DataSetDTO> gobiiEnvelopeRestResourceForProjects = new GobiiEnvelopeRestResource<>(projectsUri);
PayloadEnvelope<DataSetDTO> resultEnvelope = gobiiEnvelopeRestResourceForProjects
.get(DataSetDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
DataSetDTO dataSetDTOResponse = resultEnvelope.getPayload().getData().get(0);
Assert.assertNotEquals(null, dataSetDTOResponse);
Assert.assertNotEquals(null, dataSetDTOResponse.getDataFile());
Assert.assertNotNull(dataSetDTOResponse.getCallingAnalysisId());
Assert.assertTrue(dataSetDTOResponse.getCallingAnalysisId() > 0);
Assert.assertTrue(dataSetDTOResponse
.getAnalysesIds()
.stream()
.filter(a -> a.equals(null))
.toArray().length == 0);
} //
@Test
public void testEmptyResult() throws Exception {
DtoRestRequestUtils<DataSetDTO> dtoDtoRestRequestUtils =
new DtoRestRequestUtils<>(DataSetDTO.class, GobiiServiceRequestId.URL_DATASETS);
Integer maxId = dtoDtoRestRequestUtils.getMaxPkVal();
Integer nonExistentId = maxId + 1;
PayloadEnvelope<DataSetDTO> resultEnvelope =
dtoDtoRestRequestUtils.getResponseEnvelopeForEntityId(nonExistentId.toString());
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
Assert.assertNotNull(resultEnvelope.getPayload());
Assert.assertNotNull(resultEnvelope.getPayload().getData());
Assert.assertTrue(resultEnvelope.getPayload().getData().size() == 0 );
}
@Test
@Override
public void create() throws Exception {
RestUri namesUri = GobiiClientContext.getInstance(null, false).getUriFactory().nameIdListByQueryParams();
namesUri.setParamValue("entity", GobiiEntityNameType.CVTERMS.toString().toLowerCase());
namesUri.setParamValue("filterType", StringUtils.capitalize(GobiiFilterType.BYTYPENAME.toString()));
namesUri.setParamValue("filterValue", "analysis_type");
GobiiEnvelopeRestResource<NameIdDTO> gobiiEnvelopeRestResourceForAnalysisTerms = new GobiiEnvelopeRestResource<>(namesUri);
PayloadEnvelope<NameIdDTO> resultEnvelopeAnalysis = gobiiEnvelopeRestResourceForAnalysisTerms
.get(NameIdDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeAnalysis.getHeader()));
List<NameIdDTO> analysisTypes = resultEnvelopeAnalysis.getPayload().getData();
List<NameIdDTO> analysisProperTerms = new ArrayList<>(analysisTypes);
EntityParamValues entityParamValues = TestDtoFactory
.makeConstrainedEntityParams(analysisProperTerms, 1);
// ******** make analyses we'll need for the new data set
AnalysisDTO analysisDTORequest = TestDtoFactory
.makePopulatedAnalysisDTO(GobiiProcessType.CREATE, 1, entityParamValues);
PayloadEnvelope<AnalysisDTO> payloadEnvelopeAnalysis = new PayloadEnvelope<>(analysisDTORequest, GobiiProcessType.CREATE);
GobiiEnvelopeRestResource<AnalysisDTO> gobiiEnvelopeRestResource = new GobiiEnvelopeRestResource<>(GobiiClientContext.getInstance(null, false)
.getUriFactory()
.resourceColl(GobiiServiceRequestId.URL_ANALYSIS));
PayloadEnvelope<AnalysisDTO> analysisDTOResponseEnvelope = gobiiEnvelopeRestResource.post(AnalysisDTO.class,
payloadEnvelopeAnalysis);
AnalysisDTO callingAnalysisDTO = analysisDTOResponseEnvelope.getPayload().getData().get(0);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(analysisDTOResponseEnvelope.getHeader()));
List<AnalysisDTO> analyses = new ArrayList<>();
analyses.add(TestDtoFactory
.makePopulatedAnalysisDTO(GobiiProcessType.CREATE,
2,
entityParamValues));
analyses.add(TestDtoFactory
.makePopulatedAnalysisDTO(GobiiProcessType.CREATE,
3,
entityParamValues));
analyses.add(TestDtoFactory
.makePopulatedAnalysisDTO(GobiiProcessType.CREATE,
4,
entityParamValues));
List<Integer> analysisIds = new ArrayList<>();
for (AnalysisDTO currentAnalysis : analyses) {
payloadEnvelopeAnalysis = new PayloadEnvelope<>(currentAnalysis, GobiiProcessType.CREATE);
gobiiEnvelopeRestResource = new GobiiEnvelopeRestResource<>(GobiiClientContext.getInstance(null, false)
.getUriFactory()
.resourceColl(GobiiServiceRequestId.URL_ANALYSIS));
analysisDTOResponseEnvelope = gobiiEnvelopeRestResource.post(AnalysisDTO.class,
payloadEnvelopeAnalysis);
AnalysisDTO createdAnalysis = analysisDTOResponseEnvelope.getPayload().getData().get(0);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(analysisDTOResponseEnvelope.getHeader()));
analysisIds.add(createdAnalysis.getAnalysisId());
}
// ********** make raw data set dto and add anlyses
//DtoRequestDataSet dtoRequestDataSet = new DtoRequestDataSet();
DataSetDTO dataSetDTORequest = TestDtoFactory
.makePopulatedDataSetDTO(1,
callingAnalysisDTO.getAnalysisId(),
analysisIds);
RestUri projectsCollUri = GobiiClientContext.getInstance(null, false)
.getUriFactory()
.resourceColl(GobiiServiceRequestId.URL_DATASETS);
GobiiEnvelopeRestResource<DataSetDTO> gobiiEnvelopeRestResourceForDataSetPost = new GobiiEnvelopeRestResource<>(projectsCollUri);
PayloadEnvelope<DataSetDTO> resultEnvelope = gobiiEnvelopeRestResourceForDataSetPost
.post(DataSetDTO.class, new PayloadEnvelope<>(dataSetDTORequest, GobiiProcessType.CREATE));
//DataSetDTO dataSetDTOResponse = dtoRequestDataSet.process(dataSetDTORequest);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
DataSetDTO dataSetDTOResponse = resultEnvelope.getPayload().getData().get(0);
Assert.assertNotEquals(null, dataSetDTOResponse);
Assert.assertTrue(dataSetDTOResponse.getDataSetId() > 0);
Assert.assertTrue(dataSetDTOResponse.getCallingAnalysisId() > 0);
Assert.assertNotNull(dataSetDTOResponse.getAnalysesIds());
Assert.assertTrue(dataSetDTOResponse.getAnalysesIds().size() > 0);
Assert.assertTrue(dataSetDTOResponse.getTypeId() > 0);
GlobalPkValues.getInstance().addPkVal(GobiiEntityNameType.DATASETS,dataSetDTOResponse.getDataSetId());
// DataSetDTO dataSetDTOReRequest = new DataSetDTO();
// dataSetDTOReRequest.setDataSetId(dataSetDTOResponse.getDataSetId());
// //DataSetDTO dataSetDTOReResponse = dtoRequestDataSet.process(dataSetDTOReRequest);
RestUri projectsByIdUri = GobiiClientContext.getInstance(null, false)
.getUriFactory()
.resourceByUriIdParam(GobiiServiceRequestId.URL_DATASETS);
GobiiEnvelopeRestResource<DataSetDTO> gobiiEnvelopeRestResourceForDataSetGet = new GobiiEnvelopeRestResource<>(projectsByIdUri);
gobiiEnvelopeRestResourceForDataSetGet.setParamValue("id", dataSetDTOResponse.getDataSetId().toString());
resultEnvelope = gobiiEnvelopeRestResourceForDataSetGet
.get(DataSetDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
DataSetDTO dataSetDTOReResponse = resultEnvelope.getPayload().getData().get(0);
Assert.assertNotEquals(null, dataSetDTOReResponse);
Assert.assertTrue(dataSetDTOReResponse.getDataSetId() > 0);
Assert.assertTrue(dataSetDTOReResponse.getCallingAnalysisId() > 0);
Assert.assertNotNull(dataSetDTOReResponse.getAnalysesIds());
Assert.assertTrue(dataSetDTOReResponse.getAnalysesIds().size() > 0);
Assert.assertTrue(0 == dataSetDTOReResponse
.getAnalysesIds()
.stream()
.filter(a -> a.equals(null))
.count());
}
@Test
@Override
public void update() throws Exception {
// ******** make analyses we'll need for the new data set
RestUri namesUri = GobiiClientContext.getInstance(null, false).getUriFactory().nameIdListByQueryParams();
namesUri.setParamValue("entity", GobiiEntityNameType.CVTERMS.toString().toLowerCase());
namesUri.setParamValue("filterType", StringUtils.capitalize(GobiiFilterType.BYTYPENAME.toString()));
namesUri.setParamValue("filterValue", "analysis_type");
GobiiEnvelopeRestResource<NameIdDTO> gobiiEnvelopeRestResourceForAnalysisTerms = new GobiiEnvelopeRestResource<>(namesUri);
PayloadEnvelope<NameIdDTO> resultEnvelopeAnalysis = gobiiEnvelopeRestResourceForAnalysisTerms
.get(NameIdDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeAnalysis.getHeader()));
List<NameIdDTO> analysisTypes = resultEnvelopeAnalysis.getPayload().getData();
List<NameIdDTO> analysisProperTerms = new ArrayList<>(analysisTypes);
EntityParamValues entityParamValues = TestDtoFactory
.makeConstrainedEntityParams(analysisProperTerms, 1);
AnalysisDTO analysisDTORequest = TestDtoFactory
.makePopulatedAnalysisDTO(GobiiProcessType.CREATE, 1, entityParamValues);
PayloadEnvelope<AnalysisDTO> payloadEnvelopeAnalysis = new PayloadEnvelope<>(analysisDTORequest, GobiiProcessType.CREATE);
GobiiEnvelopeRestResource<AnalysisDTO> gobiiEnvelopeRestResource = new GobiiEnvelopeRestResource<>(GobiiClientContext.getInstance(null, false)
.getUriFactory()
.resourceColl(GobiiServiceRequestId.URL_ANALYSIS));
PayloadEnvelope<AnalysisDTO> analysisDTOResponseEnvelope = gobiiEnvelopeRestResource.post(AnalysisDTO.class,
payloadEnvelopeAnalysis);
AnalysisDTO newCallingAnalysisDTO = analysisDTOResponseEnvelope.getPayload().getData().get(0);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(analysisDTOResponseEnvelope.getHeader()));
List<AnalysisDTO> analysesToCreate = new ArrayList<>();
List<AnalysisDTO> analysesNew = new ArrayList<>();
analysesToCreate.add(TestDtoFactory
.makePopulatedAnalysisDTO(GobiiProcessType.CREATE,
2,
entityParamValues));
analysesToCreate.add(TestDtoFactory
.makePopulatedAnalysisDTO(GobiiProcessType.CREATE,
3,
entityParamValues));
analysesToCreate.add(TestDtoFactory
.makePopulatedAnalysisDTO(GobiiProcessType.CREATE,
4,
entityParamValues));
List<Integer> analysisIds = new ArrayList<>();
for (AnalysisDTO currentAnalysis : analysesToCreate) {
payloadEnvelopeAnalysis = new PayloadEnvelope<>(currentAnalysis, GobiiProcessType.CREATE);
gobiiEnvelopeRestResource = new GobiiEnvelopeRestResource<>(GobiiClientContext.getInstance(null, false)
.getUriFactory()
.resourceColl(GobiiServiceRequestId.URL_ANALYSIS));
analysisDTOResponseEnvelope = gobiiEnvelopeRestResource.post(AnalysisDTO.class,
payloadEnvelopeAnalysis);
AnalysisDTO newAnalysis = analysisDTOResponseEnvelope.getPayload().getData().get(0);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(analysisDTOResponseEnvelope.getHeader()));
analysesNew.add(newAnalysis);
analysisIds.add(newAnalysis.getAnalysisId());
}
//DtoRequestDataSet dtoRequestDataSet = new DtoRequestDataSet();
// create a new aataSet for our test
DataSetDTO newDataSetDto = TestDtoFactory
.makePopulatedDataSetDTO(1,
newCallingAnalysisDTO.getAnalysisId(),
analysisIds);
//DataSetDTO newDataSetDTOResponse = dtoRequestDataSet.process(newDataSetDto);
RestUri projectsCollUri = GobiiClientContext.getInstance(null, false)
.getUriFactory()
.resourceColl(GobiiServiceRequestId.URL_DATASETS);
GobiiEnvelopeRestResource<DataSetDTO> gobiiEnvelopeRestResourceForDataSetPost = new GobiiEnvelopeRestResource<>(projectsCollUri);
PayloadEnvelope<DataSetDTO> resultEnvelope = gobiiEnvelopeRestResourceForDataSetPost
.post(DataSetDTO.class, new PayloadEnvelope<>(newDataSetDto, GobiiProcessType.CREATE));
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
DataSetDTO newDataSetDTOResponse = resultEnvelope.getPayload().getData().get(0);
// re-retrieve the aataSet we just created so we start with a fresh READ mode dto
// DataSetDTO dataSetDTORequest = new DataSetDTO();
// dataSetDTORequest.setDataSetId(newDataSetDTOResponse.getDataSetId());
// DataSetDTO dataSetDTOReceived = dtoRequestDataSet.process(dataSetDTORequest);
RestUri projectsByIdUri = GobiiClientContext.getInstance(null, false)
.getUriFactory()
.resourceByUriIdParam(GobiiServiceRequestId.URL_DATASETS);
GobiiEnvelopeRestResource<DataSetDTO> gobiiEnvelopeRestResourceForDataSetById = new GobiiEnvelopeRestResource<>(projectsByIdUri);
gobiiEnvelopeRestResourceForDataSetById.setParamValue("id", newDataSetDTOResponse.getDataSetId().toString());
resultEnvelope = gobiiEnvelopeRestResourceForDataSetById
.get(DataSetDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
DataSetDTO dataSetDTOReceived = resultEnvelope.getPayload().getData().get(0);
// so this would be the typical workflow for the client app
String newDataFile = UUID.randomUUID().toString();
dataSetDTOReceived.setDataFile(newDataFile);
Integer anlysisIdRemovedFromList = dataSetDTOReceived.getAnalysesIds().remove(0);
Integer newCallingAnalysisId = anlysisIdRemovedFromList;
dataSetDTOReceived.setCallingAnalysisId(newCallingAnalysisId);
//DataSetDTO dataSetDTOResponse = dtoRequestDataSet.process(dataSetDTOReceived);
resultEnvelope = gobiiEnvelopeRestResourceForDataSetById
.put(DataSetDTO.class, new PayloadEnvelope<>(dataSetDTOReceived, GobiiProcessType.UPDATE));
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
DataSetDTO dataSetDTOResponse = resultEnvelope.getPayload().getData().get(0);
gobiiEnvelopeRestResourceForDataSetById.setParamValue("id", dataSetDTOResponse.getDataSetId().toString());
resultEnvelope = gobiiEnvelopeRestResourceForDataSetById
.get(DataSetDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
DataSetDTO dtoRequestDataSetReRetrieved = resultEnvelope.getPayload().getData().get(0);
// dataSetDTORequest.setGobiiProcessType(GobiiProcessType.READ);
// dataSetDTORequest.setDataSetId(dataSetDTOResponse.getDataSetId());
// DataSetDTO dtoRequestDataSetReRetrieved =
// dtoRequestDataSet.process(dataSetDTORequest);
Assert.assertTrue(dtoRequestDataSetReRetrieved.getDataSetId().equals(dataSetDTOReceived.getDataSetId()));
Assert.assertTrue(dtoRequestDataSetReRetrieved.getDataFile().equals(newDataFile));
Assert.assertTrue(dtoRequestDataSetReRetrieved.getCallingAnalysisId().equals(newCallingAnalysisId));
Assert.assertTrue(dtoRequestDataSetReRetrieved
.getAnalysesIds()
.stream()
.filter(a -> a.equals(anlysisIdRemovedFromList))
.toArray().length == 0);
}
@Test
@Override
public void getList() throws Exception {
RestUri restUriDataSet = GobiiClientContext.getInstance(null, false)
.getUriFactory().resourceColl(GobiiServiceRequestId.URL_DATASETS);
GobiiEnvelopeRestResource<DataSetDTO> gobiiEnvelopeRestResource = new GobiiEnvelopeRestResource<>(restUriDataSet);
PayloadEnvelope<DataSetDTO> resultEnvelope = gobiiEnvelopeRestResource
.get(DataSetDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
List<DataSetDTO> dataSetDTOList = resultEnvelope.getPayload().getData();
Assert.assertNotNull(dataSetDTOList);
Assert.assertTrue(dataSetDTOList.size() > 0);
Assert.assertNotNull(dataSetDTOList.get(0).getName());
LinkCollection linkCollection = resultEnvelope.getPayload().getLinkCollection();
Assert.assertTrue(linkCollection.getLinksPerDataItem().size() == dataSetDTOList.size());
List<Integer> itemsToTest = new ArrayList<>();
if (dataSetDTOList.size() > 50) {
itemsToTest = TestUtils.makeListOfIntegersInRange(10, dataSetDTOList.size());
} else {
for (int idx = 0; idx < dataSetDTOList.size(); idx++) {
itemsToTest.add(idx);
}
}
for (Integer currentIdx : itemsToTest) {
DataSetDTO currentDataSetDto = dataSetDTOList.get(currentIdx);
Link currentLink = linkCollection.getLinksPerDataItem().get(currentIdx);
RestUri restUriDataSetForGetById = GobiiClientContext.getInstance(null, false)
.getUriFactory()
.RestUriFromUri(currentLink.getHref());
GobiiEnvelopeRestResource<DataSetDTO> gobiiEnvelopeRestResourceForGetById = new GobiiEnvelopeRestResource<>(restUriDataSetForGetById);
PayloadEnvelope<DataSetDTO> resultEnvelopeForGetByID = gobiiEnvelopeRestResourceForGetById
.get(DataSetDTO.class);
Assert.assertNotNull(resultEnvelopeForGetByID);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeForGetByID.getHeader()));
DataSetDTO dataSetDTOFromLink = resultEnvelopeForGetByID.getPayload().getData().get(0);
Assert.assertTrue(currentDataSetDto.getName().equals(dataSetDTOFromLink.getName()));
Assert.assertTrue(currentDataSetDto.getDataSetId().equals(dataSetDTOFromLink.getDataSetId()));
Assert.assertNotNull(dataSetDTOFromLink.getAnalysesIds());
for (Integer currentAnalysisId : dataSetDTOFromLink.getAnalysesIds()) {
RestUri restUriAnalysisForGetById = GobiiClientContext.getInstance(null, false)
.getUriFactory()
.resourceByUriIdParam(GobiiServiceRequestId.URL_ANALYSIS);
restUriAnalysisForGetById.setParamValue("id", currentAnalysisId.toString());
GobiiEnvelopeRestResource<AnalysisDTO> gobiiEnvelopeRestResourceAnalysisForGetById = new GobiiEnvelopeRestResource<>(restUriAnalysisForGetById);
PayloadEnvelope<AnalysisDTO> resultEnvelopeForGetById = gobiiEnvelopeRestResourceAnalysisForGetById
.get(AnalysisDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
AnalysisDTO analysisDTO = resultEnvelopeForGetById.getPayload().getData().get(0);
Assert.assertTrue(analysisDTO.getAnalysisId() > 0);
Assert.assertNotNull(analysisDTO.getAnalysisName());
}
}
}
@Test
public void getDataSetsByTypeId() throws Exception {
Integer dataSetid = (new GlobalPkColl<DtoCrudRequestDataSetTest>().getAPkVal(DtoCrudRequestDataSetTest.class, GobiiEntityNameType.DATASETS));
RestUri restUriForDataSets = GobiiClientContext.getInstance(null, false)
.getUriFactory()
.resourceByUriIdParam(GobiiServiceRequestId.URL_DATASETS);
restUriForDataSets.setParamValue("id", dataSetid.toString());
GobiiEnvelopeRestResource<DataSetDTO> gobiiEnvelopeRestResourceForDataSet = new GobiiEnvelopeRestResource<>(restUriForDataSets);
PayloadEnvelope<DataSetDTO> resultEnvelopeDataSet = gobiiEnvelopeRestResourceForDataSet
.get(DataSetDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeDataSet.getHeader()));
DataSetDTO dataSetDTOResponse = resultEnvelopeDataSet.getPayload().getData().get(0);
Integer typeId = dataSetDTOResponse.getTypeId();
RestUri restUriForDataTypes = GobiiClientContext.getInstance(null, false)
.getUriFactory()
.resourceColl(GobiiServiceRequestId.URL_DATASETTYPES)
.addUriParam("id")
.setParamValue("id", typeId.toString());
GobiiEnvelopeRestResource<DataSetDTO> gobiiEnvelopeRestResourceForDataTypes = new GobiiEnvelopeRestResource<>(restUriForDataTypes);
PayloadEnvelope<DataSetDTO> resultEnvelopeDataTypes = gobiiEnvelopeRestResourceForDataTypes
.get(DataSetDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeDataTypes.getHeader()));
List<DataSetDTO> dataSetDTOList = resultEnvelopeDataTypes.getPayload().getData();
Assert.assertNotNull(dataSetDTOList);
Assert.assertTrue(dataSetDTOList.size() >= 0);
if(dataSetDTOList.size() > 0) {
Assert.assertNotNull(dataSetDTOList.get(0).getName());
}
LinkCollection linkCollection = resultEnvelopeDataTypes.getPayload().getLinkCollection();
Assert.assertTrue(linkCollection.getLinksPerDataItem().size() == dataSetDTOList.size());
List<Integer> itemsToTest = new ArrayList<>();
if (dataSetDTOList.size() > 50) {
itemsToTest = TestUtils.makeListOfIntegersInRange(10, dataSetDTOList.size());
} else {
for (int idx = 0; idx < dataSetDTOList.size(); idx++) {
itemsToTest.add(idx);
}
}
for (Integer currentIdx : itemsToTest) {
DataSetDTO currentDatasetDto = dataSetDTOList.get(currentIdx);
Link currentLink = linkCollection.getLinksPerDataItem().get(currentIdx);
RestUri restUriDForGetById = GobiiClientContext.getInstance(null, false)
.getUriFactory()
.RestUriFromUri(currentLink.getHref());
GobiiEnvelopeRestResource<DataSetDTO> gobiiEnvelopeRestResourceForGetById = new GobiiEnvelopeRestResource<>(restUriDForGetById);
PayloadEnvelope<DataSetDTO> resultEnvelopeForGetByID = gobiiEnvelopeRestResourceForGetById
.get(DataSetDTO.class);
Assert.assertNotNull(resultEnvelopeForGetByID);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeForGetByID.getHeader()));
DataSetDTO dataDTOFromLink = resultEnvelopeForGetByID.getPayload().getData().get(0);
Assert.assertTrue(currentDatasetDto.getName().equals(dataDTOFromLink.getName()));
Assert.assertTrue(currentDatasetDto.getDataSetId().equals(dataDTOFromLink.getDataSetId()));
}
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 NBCO Yandex.Money LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.yandex.money.api.net.clients;
import com.yandex.money.api.authorization.AuthorizationData;
import com.yandex.money.api.authorization.AuthorizationParameters;
import com.yandex.money.api.net.ApiRequest;
import com.yandex.money.api.net.DefaultUserAgent;
import com.yandex.money.api.net.UserAgent;
import com.yandex.money.api.net.providers.DefaultApiV1HostsProvider;
import com.yandex.money.api.net.providers.HostsProvider;
import com.yandex.money.api.util.HttpHeaders;
import com.yandex.money.api.util.Language;
import com.yandex.money.api.util.Strings;
import okhttp3.CacheControl;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import java.util.Map;
import static com.yandex.money.api.util.Common.checkNotNull;
/**
* Default implementation of {@link ApiClient} interface. This implementation is suitable in most cases. To create an
* instance of this class use {@link DefaultApiClient.Builder}.
*
* @author Slava Yasevich (vyasevich@yamoney.ru)
*/
public class DefaultApiClient implements ApiClient {
private final CacheControl cacheControl = new CacheControl.Builder().noCache().build();
private final String clientId;
private final HostsProvider hostsProvider;
private final UserAgent userAgent;
private final Language language;
private final boolean debugMode;
private final OkHttpClient httpClient;
private String accessToken;
/**
* Constructor.
*
* @param builder provides required data to create an object
*/
protected DefaultApiClient(Builder builder) {
clientId = checkNotNull(builder.clientId, "clientId");
hostsProvider = checkNotNull(builder.hostsProvider, "hostsProvider");
userAgent = checkNotNull(builder.userAgent, "userAgent");
language = checkNotNull(builder.language, "language");
debugMode = builder.debugMode;
if (builder.httpClient == null) {
builder.httpClient = HttpClientFactory.newOkHttpClient(debugMode);
}
httpClient = builder.httpClient;
}
@Override
public String getClientId() {
return clientId;
}
@Override
public Language getLanguage() {
return language;
}
@Override
public HostsProvider getHostsProvider() {
return hostsProvider;
}
@Override
public UserAgent getUserAgent() {
return userAgent;
}
@Override
public <T> T execute(ApiRequest<T> request) throws Exception {
Response response = httpClient.newCall(prepareRequest(request)).execute();
return request.parse(new OkHttpClientResponse(response, debugMode));
}
@Override
public AuthorizationData createAuthorizationData(AuthorizationParameters parameters) {
parameters.add("client_id", getClientId());
return new AuthorizationDataImpl(getHostsProvider().getMoney(), parameters.build());
}
@Override
public final void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
@Override
public final boolean isAuthorized() {
return !Strings.isNullOrEmpty(accessToken);
}
/**
* @return {@code true} if debug mode is enabled
*/
protected final boolean isDebugMode() {
return debugMode;
}
private Request prepareRequest(ApiRequest<?> request) {
checkNotNull(request, "request");
Request.Builder builder = new Request.Builder()
.cacheControl(cacheControl)
.url(request.requestUrl(getHostsProvider()))
.addHeader(HttpHeaders.USER_AGENT, getUserAgent().getName())
.addHeader(HttpHeaders.ACCEPT_LANGUAGE, getLanguage().iso6391Code);
if (isAuthorized()) {
builder.addHeader(HttpHeaders.AUTHORIZATION, "Bearer " + accessToken);
}
for (Map.Entry<String, String> entry : request.getHeaders().entrySet()) {
String value = entry.getValue();
if (value != null) {
builder.addHeader(entry.getKey(), value);
}
}
ApiRequest.Method method = request.getMethod();
if (method != ApiRequest.Method.GET) {
RequestBody body = RequestBody.create(MediaType.parse(request.getContentType()), request.getBody());
switch (method) {
case POST:
builder.post(body);
break;
case PUT:
builder.put(body);
break;
case DELETE:
builder.delete();
break;
}
}
return builder.build();
}
/**
* Builder for {@link DefaultApiClient}.
*/
public static class Builder {
boolean debugMode = false;
String clientId;
UserAgent userAgent = new DefaultUserAgent("Java");
HostsProvider hostsProvider = new DefaultApiV1HostsProvider(false);
Language language = Language.getDefault();
OkHttpClient httpClient;
/**
* Sets debug mode. Enables logging. Default value is {@code false}.
*
* @param debugMode {@code true}, if debug mode is enabled
* @return itself
*/
public final Builder setDebugMode(boolean debugMode) {
this.debugMode = debugMode;
return this;
}
/**
* Sets client id of {@link DefaultApiClient}.
*
* @param clientId client id
* @return itself
*/
public final Builder setClientId(String clientId) {
this.clientId = clientId;
return this;
}
/**
* Sets hosts provider. Default value is an instance of {@link DefaultApiV1HostsProvider}.
*
* @param hostsProvider hosts provider
* @return itself
*/
public final Builder setHostsProvider(HostsProvider hostsProvider) {
this.hostsProvider = hostsProvider;
return this;
}
/**
* Sets {@link UserAgent} to use.
*
* @param userAgent user agent
* @return itself
*/
public final Builder setUserAgent(UserAgent userAgent) {
this.userAgent = userAgent;
return this;
}
/**
* Sets language for API responses.
*
* @param language language to use
* @return itself
*/
public final Builder setLanguage(Language language) {
this.language = language;
return this;
}
/**
* Sets HTTP client to use.
*
* @param httpClient HTTP client
* @return itself
*/
public final Builder setHttpClient(OkHttpClient httpClient) {
this.httpClient = httpClient;
return this;
}
/**
* Creates instance of {@link DefaultApiClient}.
*
* @return implementation of {@link ApiClient}
*/
public DefaultApiClient create() {
return new DefaultApiClient(this);
}
}
private static final class AuthorizationDataImpl implements AuthorizationData {
private final String url;
private final byte[] parameters;
AuthorizationDataImpl(String host, byte[] parameters) {
this.url = host + "/oauth/authorize";
this.parameters = parameters;
}
@Override
public String getUrl() {
return url;
}
@Override
public byte[] getParameters() {
return parameters;
}
}
}
| |
package water.fvec;
import static org.junit.Assert.*;
import org.junit.*;
import java.io.File;
import water.*;
import water.DException.DistributedException;
import water.util.ArrayUtils;
public class FVecTest extends TestUtil {
@BeforeClass public static void setup() { stall_till_cloudsize(1); }
static final double EPSILON = 1e-6;
public static Key makeByteVec(Key k, String... data) {
byte [][] chunks = new byte[data.length][];
long [] espc = new long[data.length+1];
for(int i = 0; i < chunks.length; ++i){
chunks[i] = data[i].getBytes();
espc[i+1] = espc[i] + data[i].length();
}
Futures fs = new Futures();
Key key = Vec.newKey();
ByteVec bv = new ByteVec(key,Vec.ESPC.rowLayout(key,espc));
for(int i = 0; i < chunks.length; ++i){
Key chunkKey = bv.chunkKey(i);
DKV.put(chunkKey, new Value(chunkKey,chunks[i].length,chunks[i],TypeMap.C1NCHUNK,Value.ICE),fs);
}
DKV.put(bv._key,bv,fs);
Frame fr = new Frame(k,new String[]{"makeByteVec"},new Vec[]{bv});
DKV.put(k, fr, fs);
fs.blockForPending();
return k;
}
// ==========================================================================
@Test public void testBasicCRUD() {
// Make and insert a FileVec to the global store
File file = find_test_file("./smalldata/junit/cars.csv");
NFSFileVec nfs = NFSFileVec.make(file);
int sum = ArrayUtils.sum(new ByteHisto().doAll(nfs)._x);
assertEquals(file.length(),sum);
nfs.remove();
}
private static class ByteHisto extends MRTask<ByteHisto> {
public int[] _x;
// Count occurrences of bytes
@Override public void map( Chunk bv ) {
_x = new int[256]; // One-time set histogram array
for( int i=0; i< bv._len; i++ )
_x[(int)bv.atd(i)]++;
}
// ADD together all results
@Override public void reduce( ByteHisto bh ) { ArrayUtils.add(_x,bh._x); }
}
// ==========================================================================
@Test public void testSet() {
Frame fr = null;
try {
fr = parse_test_file("./smalldata/airlines/allyears2k_headers.zip");
double[] mins =new double[fr.numCols()];
for (int i=0; i < mins.length; i++)
mins[i] = fr.vecs()[i].min();
// Scribble into a freshly parsed frame
new SetDoubleInt(mins).doAll(fr);
} finally {
if( fr != null ) fr.delete();
}
}
static class SetDoubleInt extends MRTask {
final double _mins[];
public SetDoubleInt(double [] mins) {_mins = mins;}
@Override public void map( Chunk chks[] ) {
Chunk c=null;
int i;
for(i=0; i < chks.length; i++) {
if( chks[i].getClass()==water.fvec.C2Chunk.class )
{ c=chks[i]; break; }
}
Assert.assertNotNull("Expect to find a C2Chunk", c);
assertTrue(c._vec.writable());
double d=_mins[i];
for(i=0; i< c._len; i++ ) {
double e = c.atd(i);
c.set(i, d);
d=e;
}
}
}
// ==========================================================================
// Test making a appendable vector from a plain vector
@Test public void testNewVec() {
// Make and insert a File8Vec to the global store
File file = find_test_file("./smalldata/junit/cars.csv");
NFSFileVec nfs = NFSFileVec.make(file);
Vec res = new TestNewVec().doAll(new byte[]{Vec.T_NUM},nfs).outputFrame(new String[]{"v"},new String[][]{null}).anyVec();
assertEquals(nfs.at8(0)+1,res.at8(0));
assertEquals(nfs.at8(1)+1,res.at8(1));
assertEquals(nfs.at8(2)+1,res.at8(2));
nfs.remove();
res.remove();
}
private static class TestNewVec extends MRTask<TestNewVec> {
@Override public void map( Chunk in, NewChunk out ) {
for( int i=0; i< in._len; i++ )
out.append2( in.at8_abs(i)+(in.at8_abs(i) >= ' ' ? 1 : 0),0);
}
}
// ==========================================================================
@Test public void testParse2() {
Frame fr = null;
Vec vz = null;
try {
fr = parse_test_file("smalldata/junit/syn_2659x1049.csv.gz");
assertEquals(fr.numCols(),1050); // Count of columns
assertEquals(fr.numRows(),2659); // Count of rows
double[] sums = new Sum().doAll(fr)._sums;
assertEquals(3949,sums[0],EPSILON);
assertEquals(3986,sums[1],EPSILON);
assertEquals(3993,sums[2],EPSILON);
// Create a temp column of zeros
Vec v0 = fr.vecs()[0];
Vec v1 = fr.vecs()[1];
vz = v0.makeZero();
// Add column 0 & 1 into the temp column
new PairSum().doAll(vz,v0,v1);
// Add the temp to frame
// Now total the temp col
fr.delete(); // Remove all other columns
fr = new Frame(Key.make(),new String[]{"tmp"},new Vec[]{vz}); // Add just this one
sums = new Sum().doAll(fr)._sums;
assertEquals(3949+3986,sums[0],EPSILON);
} finally {
if( vz != null ) vz.remove();
if( fr != null ) fr.delete();
}
}
// Sum each column independently
private static class Sum extends MRTask<Sum> {
double _sums[];
@Override public void map( Chunk[] bvs ) {
_sums = new double[bvs.length];
int len = bvs[0]._len;
for( int i=0; i<len; i++ )
for( int j=0; j<bvs.length; j++ )
_sums[j] += bvs[j].atd(i);
}
@Override public void reduce( Sum mrt ) { ArrayUtils.add(_sums, mrt._sums); }
}
// Simple vector sum C=A+B
private static class PairSum extends MRTask<Sum> {
@Override public void map( Chunk out, Chunk in1, Chunk in2 ) {
for( int i=0; i< out._len; i++ )
out.set(i, in1.at8(i) + in2.at8(i));
}
}
@Test public void testRollups() {
// Frame fr = null;
// try {
Key rebalanced = Key.make("rebalanced");
Vec v = null;
Frame fr = null;
try {
v = Vec.makeVec(new double[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}, Vec.newKey());
Futures fs = new Futures();
assertEquals(0, v.min(), 0);
assertEquals(9, v.max(), 0);
assertEquals(4.5,v.mean(),1e-8);
H2O.submitTask(new RebalanceDataSet(new Frame(v), rebalanced, 10)).join();
fr = DKV.get(rebalanced).get();
Vec v2 = fr.anyVec();
assertEquals(0, v2.min(), 0);
assertEquals(9, v2.max(), 0);
assertEquals(4.5, v.mean(), 1e-8);
v2.set(5, -100);
assertEquals(-100, v2.min(), 0);
v2.set(5, 5);
// make several rollups requests in parallel with and without histo and then get histo
v2.startRollupStats(fs);
v2.startRollupStats(fs);
v2.startRollupStats(fs,true);
assertEquals(0, v2.min(), 0);
long [] bins = v2.bins();
assertEquals(10,bins.length);
// TODO: should test percentiles?
for(long l:bins) assertEquals(1,l);
Vec.Writer w = v2.open();
try {
v2.min();
assertTrue("should have thrown IAE since we're requesting rollups while changing the Vec (got Vec.Writer)",false); // fail - should've thrown
} catch( DistributedException de ) {
assertTrue(de.getMessage().contains("IllegalArgumentException"));
// expect to get IAE since we're requesting rollups while also changing the vec
} catch( IllegalArgumentException ie ) {
// if on local node can get iae directly
}
w.close(fs);
fs.blockForPending();
assertEquals(0,v2.min(),0);
fr.delete();
v.remove();
fr = null;
} finally {
if( v != null)v.remove();
if(fr != null)fr.delete();
}
}
// The rollups only compute approximate quantiles, not exact.
@Test public void test50pct() {
Vec vec = null;
try {
double[] d = new double[]{0.812834256224, 1.56386606237, 3.12702210880, 3.68417563302, 5.51277746586};
vec = Vec.makeVec(d,Vec.newKey());
double pct[] = vec.pctiles();
double eps = (vec.max()-vec.min())/1e-3;
Assert.assertEquals(pct[0],d[0],eps); // 0.01
Assert.assertEquals(pct[1],d[0],eps); // 0.1
Assert.assertEquals(pct[2],d[0],eps); // 0.25
Assert.assertEquals(pct[3],d[1],eps); // 1/3
Assert.assertEquals(pct[4],d[2],eps); // 0.5
Assert.assertEquals(pct[5],d[2],eps); // 2/3
Assert.assertEquals(pct[6],d[3],eps); // 0.75
Assert.assertEquals(pct[7],d[4],eps); // 0.9
Assert.assertEquals(pct[8],d[4],eps); // 0.99
vec.remove();
d = new double[]{490,492,494,496,498};
vec = Vec.makeVec(d,Vec.newKey());
pct = vec.pctiles();
eps = (vec.max()-vec.min())/1e-3;
System.out.println(java.util.Arrays.toString(pct));
Assert.assertEquals(pct[0],d[0],eps); // 0.01
} finally {
if( vec != null ) vec.remove();
}
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/websecurityscanner/v1/web_security_scanner.proto
package com.google.cloud.websecurityscanner.v1;
/**
*
*
* <pre>
* Request for the `UpdateScanConfigRequest` method.
* </pre>
*
* Protobuf type {@code google.cloud.websecurityscanner.v1.UpdateScanConfigRequest}
*/
public final class UpdateScanConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.websecurityscanner.v1.UpdateScanConfigRequest)
UpdateScanConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateScanConfigRequest.newBuilder() to construct.
private UpdateScanConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateScanConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateScanConfigRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private UpdateScanConfigRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18:
{
com.google.cloud.websecurityscanner.v1.ScanConfig.Builder subBuilder = null;
if (scanConfig_ != null) {
subBuilder = scanConfig_.toBuilder();
}
scanConfig_ =
input.readMessage(
com.google.cloud.websecurityscanner.v1.ScanConfig.parser(),
extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(scanConfig_);
scanConfig_ = subBuilder.buildPartial();
}
break;
}
case 26:
{
com.google.protobuf.FieldMask.Builder subBuilder = null;
if (updateMask_ != null) {
subBuilder = updateMask_.toBuilder();
}
updateMask_ =
input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(updateMask_);
updateMask_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1_UpdateScanConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1_UpdateScanConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.class,
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.Builder.class);
}
public static final int SCAN_CONFIG_FIELD_NUMBER = 2;
private com.google.cloud.websecurityscanner.v1.ScanConfig scanConfig_;
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*
* @return Whether the scanConfig field is set.
*/
@java.lang.Override
public boolean hasScanConfig() {
return scanConfig_ != null;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*
* @return The scanConfig.
*/
@java.lang.Override
public com.google.cloud.websecurityscanner.v1.ScanConfig getScanConfig() {
return scanConfig_ == null
? com.google.cloud.websecurityscanner.v1.ScanConfig.getDefaultInstance()
: scanConfig_;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
@java.lang.Override
public com.google.cloud.websecurityscanner.v1.ScanConfigOrBuilder getScanConfigOrBuilder() {
return getScanConfig();
}
public static final int UPDATE_MASK_FIELD_NUMBER = 3;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return updateMask_ != null;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return getUpdateMask();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (scanConfig_ != null) {
output.writeMessage(2, getScanConfig());
}
if (updateMask_ != null) {
output.writeMessage(3, getUpdateMask());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (scanConfig_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getScanConfig());
}
if (updateMask_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateMask());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest other =
(com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest) obj;
if (hasScanConfig() != other.hasScanConfig()) return false;
if (hasScanConfig()) {
if (!getScanConfig().equals(other.getScanConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasScanConfig()) {
hash = (37 * hash) + SCAN_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getScanConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `UpdateScanConfigRequest` method.
* </pre>
*
* Protobuf type {@code google.cloud.websecurityscanner.v1.UpdateScanConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.websecurityscanner.v1.UpdateScanConfigRequest)
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1_UpdateScanConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1_UpdateScanConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.class,
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.Builder.class);
}
// Construct using com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (scanConfigBuilder_ == null) {
scanConfig_ = null;
} else {
scanConfig_ = null;
scanConfigBuilder_ = null;
}
if (updateMaskBuilder_ == null) {
updateMask_ = null;
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.websecurityscanner.v1.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1_UpdateScanConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest
getDefaultInstanceForType() {
return com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest build() {
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest buildPartial() {
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest result =
new com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest(this);
if (scanConfigBuilder_ == null) {
result.scanConfig_ = scanConfig_;
} else {
result.scanConfig_ = scanConfigBuilder_.build();
}
if (updateMaskBuilder_ == null) {
result.updateMask_ = updateMask_;
} else {
result.updateMask_ = updateMaskBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest) {
return mergeFrom((com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest other) {
if (other
== com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest.getDefaultInstance())
return this;
if (other.hasScanConfig()) {
mergeScanConfig(other.getScanConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.cloud.websecurityscanner.v1.ScanConfig scanConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.websecurityscanner.v1.ScanConfig,
com.google.cloud.websecurityscanner.v1.ScanConfig.Builder,
com.google.cloud.websecurityscanner.v1.ScanConfigOrBuilder>
scanConfigBuilder_;
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*
* @return Whether the scanConfig field is set.
*/
public boolean hasScanConfig() {
return scanConfigBuilder_ != null || scanConfig_ != null;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*
* @return The scanConfig.
*/
public com.google.cloud.websecurityscanner.v1.ScanConfig getScanConfig() {
if (scanConfigBuilder_ == null) {
return scanConfig_ == null
? com.google.cloud.websecurityscanner.v1.ScanConfig.getDefaultInstance()
: scanConfig_;
} else {
return scanConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
public Builder setScanConfig(com.google.cloud.websecurityscanner.v1.ScanConfig value) {
if (scanConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
scanConfig_ = value;
onChanged();
} else {
scanConfigBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
public Builder setScanConfig(
com.google.cloud.websecurityscanner.v1.ScanConfig.Builder builderForValue) {
if (scanConfigBuilder_ == null) {
scanConfig_ = builderForValue.build();
onChanged();
} else {
scanConfigBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
public Builder mergeScanConfig(com.google.cloud.websecurityscanner.v1.ScanConfig value) {
if (scanConfigBuilder_ == null) {
if (scanConfig_ != null) {
scanConfig_ =
com.google.cloud.websecurityscanner.v1.ScanConfig.newBuilder(scanConfig_)
.mergeFrom(value)
.buildPartial();
} else {
scanConfig_ = value;
}
onChanged();
} else {
scanConfigBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
public Builder clearScanConfig() {
if (scanConfigBuilder_ == null) {
scanConfig_ = null;
onChanged();
} else {
scanConfig_ = null;
scanConfigBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
public com.google.cloud.websecurityscanner.v1.ScanConfig.Builder getScanConfigBuilder() {
onChanged();
return getScanConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
public com.google.cloud.websecurityscanner.v1.ScanConfigOrBuilder getScanConfigOrBuilder() {
if (scanConfigBuilder_ != null) {
return scanConfigBuilder_.getMessageOrBuilder();
} else {
return scanConfig_ == null
? com.google.cloud.websecurityscanner.v1.ScanConfig.getDefaultInstance()
: scanConfig_;
}
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>.google.cloud.websecurityscanner.v1.ScanConfig scan_config = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.websecurityscanner.v1.ScanConfig,
com.google.cloud.websecurityscanner.v1.ScanConfig.Builder,
com.google.cloud.websecurityscanner.v1.ScanConfigOrBuilder>
getScanConfigFieldBuilder() {
if (scanConfigBuilder_ == null) {
scanConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.websecurityscanner.v1.ScanConfig,
com.google.cloud.websecurityscanner.v1.ScanConfig.Builder,
com.google.cloud.websecurityscanner.v1.ScanConfigOrBuilder>(
getScanConfig(), getParentForChildren(), isClean());
scanConfig_ = null;
}
return scanConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return updateMaskBuilder_ != null || updateMask_ != null;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
onChanged();
} else {
updateMaskBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
onChanged();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (updateMask_ != null) {
updateMask_ =
com.google.protobuf.FieldMask.newBuilder(updateMask_).mergeFrom(value).buildPartial();
} else {
updateMask_ = value;
}
onChanged();
} else {
updateMaskBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder clearUpdateMask() {
if (updateMaskBuilder_ == null) {
updateMask_ = null;
onChanged();
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.websecurityscanner.v1.UpdateScanConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1.UpdateScanConfigRequest)
private static final com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest();
}
public static com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateScanConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateScanConfigRequest>() {
@java.lang.Override
public UpdateScanConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UpdateScanConfigRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<UpdateScanConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateScanConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1.UpdateScanConfigRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package com.glitchcog.fontificator.gui.chat;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.io.IOException;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JRootPane;
import javax.swing.KeyStroke;
import com.glitchcog.fontificator.config.ConfigChat;
import com.glitchcog.fontificator.config.FontificatorProperties;
import com.glitchcog.fontificator.gui.FontificatorError;
import com.glitchcog.fontificator.gui.controls.ControlWindow;
/**
* This window displays the chat. It is intended to be minimalistic and solely display the chat.
*
* @author Matt Yanos
*/
public class ChatWindow extends JFrame
{
private static final long serialVersionUID = 1L;
/**
* A stored reference to the chat panel, used whenever some part of the system has a reference to this ChatWindow,
* but needs to modify something on the panel. This is in lieu of putting a bunch of accessor methods here to do
* pass through the function to the panel in an encapsulated way.
*/
private ChatPanel chatPanel;
/**
* A static copy of this ChatWindow for accessing it globally
*/
public static ChatWindow me;
/**
* The popup for submitting errors that the user needs to see
*/
public static FontificatorError popup;
/**
* Mouse listeners for dragging the Chat Window around when dragging the mouse inside the chat
*/
private ChatMouseListeners mouseListeners;
/**
* Escape stroke to close popups
*/
private static final KeyStroke escapeStroke = KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0);
/**
* Construct the Chat Window
*/
public ChatWindow()
{
me = this;
setTitle("Fontificator Chat");
popup = new FontificatorError(null);
}
/**
* Sets the properties to get hooks into the properties' configuration models; Sets the ControlWindow to get hooks
* back into the controls; Sets the loaded member Boolean to indicate it has everything it needs to begin rendering
* the visualization
*
* @param fProps
* @param ctrlWindow
* @throws IOException
*/
public void initChat(final FontificatorProperties fProps, final ControlWindow ctrlWindow) throws IOException
{
chatPanel = new ChatPanel();
add(chatPanel);
mouseListeners = new ChatMouseListeners(this, ctrlWindow);
addMouseListener(mouseListeners);
addMouseMotionListener(mouseListeners);
addMouseWheelListener(chatPanel);
setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
setChatSize(fProps.getChatConfig());
setResizable(fProps.getChatConfig().isResizable());
setAlwaysOnTop(fProps.getChatConfig().isAlwaysOnTop());
chatPanel.setConfig(fProps);
chatPanel.initExpirationTimer();
addWindowListener(new WindowListener()
{
@Override
public void windowOpened(WindowEvent e)
{
}
@Override
public void windowClosing(WindowEvent e)
{
callExit(e.getComponent());
}
@Override
public void windowClosed(WindowEvent e)
{
}
@Override
public void windowIconified(WindowEvent e)
{
}
@Override
public void windowDeiconified(WindowEvent e)
{
}
@Override
public void windowActivated(WindowEvent e)
{
}
@Override
public void windowDeactivated(WindowEvent e)
{
}
/**
* Calls exit from the control window
*/
private void callExit(Component caller)
{
ctrlWindow.attemptToExit(caller);
}
});
}
/**
* Handles sizing the Chat Window, clearing out old JFrame-based config values in the ConfigChat model if they were
* the only ones available
*
* @param chatConfig
*/
public void setChatSize(ConfigChat chatConfig)
{
if (chatConfig.getWindowWidth() != null && chatConfig.getWindowHeight() != null)
{
setSize(chatConfig.getWindowWidth(), chatConfig.getWindowHeight());
chatConfig.setWidth(getContentPane().getWidth());
chatConfig.setHeight(getContentPane().getHeight());
chatConfig.clearLegacyWindowSize();
}
else if (chatConfig.getWidth() > 0 && chatConfig.getHeight() > 0)
{
getContentPane().setPreferredSize(new Dimension(chatConfig.getWidth(), chatConfig.getHeight()));
getContentPane().setSize(chatConfig.getWidth(), chatConfig.getHeight());
pack();
}
}
/**
* Does the work required to make the parameter JDialog be hidden when pressing escape
*
* @param popup
*/
public static void setupHideOnEscape(final JDialog popup)
{
Action aa = new AbstractAction()
{
private static final long serialVersionUID = 1L;
public void actionPerformed(ActionEvent event)
{
popup.setVisible(false);
}
};
final String mapKey = "escapePressed";
JRootPane root = popup.getRootPane();
root.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(escapeStroke, mapKey);
root.getActionMap().put(mapKey, aa);
}
/**
* Access to the chat panel through this window, for any places there is a reference to just the ChatWindow, but
* that needs to affect the ChatPanel, which has all the actual chat options
*
* @return chatPanel
*/
public ChatPanel getChatPanel()
{
return chatPanel;
}
/**
* This is a small hack to expose the ctrl window to the message control panel so its username case map can be
* cleared out if the user changes the type of username case resolution
*/
public void clearUsernameCases()
{
mouseListeners.clearUsernameCases();
}
}
| |
package com.bangtoven.cookievideoview;
/*
* Copyright (C) 2006 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.annotation.TargetApi;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnCompletionListener;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnInfoListener;
import android.net.Uri;
import android.os.Build;
import android.util.AttributeSet;
import android.util.Log;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityNodeInfo;
import android.widget.MediaController;
import android.widget.MediaController.MediaPlayerControl;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.http.cookie.Cookie;
import com.loopj.android.http.PersistentCookieStore;
/**
* Displays a video file. The VideoView class
* can load images from various sources (such as resources or content
* providers), takes care of computing its measurement from the video so that
* it can be used in any layout manager, and provides various display options
* such as scaling and tinting.
*/
public class CookieVideoView extends SurfaceView implements MediaPlayerControl {
private String TAG = "CookieVideoView";
// settable by the client
private Uri mUri;
private Map<String, String> mHeaders;
// all possible internal states
private static final int STATE_ERROR = -1;
private static final int STATE_IDLE = 0;
private static final int STATE_PREPARING = 1;
private static final int STATE_PREPARED = 2;
private static final int STATE_PLAYING = 3;
private static final int STATE_PAUSED = 4;
private static final int STATE_PLAYBACK_COMPLETED = 5;
// mCurrentState is a VideoView object's current state.
// mTargetState is the state that a method caller intends to reach.
// For instance, regardless the VideoView object's current state,
// calling pause() intends to bring the object to a target state
// of STATE_PAUSED.
private int mCurrentState = STATE_IDLE;
private int mTargetState = STATE_IDLE;
// All the stuff we need for playing and showing a video
private SurfaceHolder mSurfaceHolder = null;
private MediaPlayer mMediaPlayer = null;
private int mVideoWidth;
private int mVideoHeight;
private int mSurfaceWidth;
private int mSurfaceHeight;
private MediaController mMediaController;
private OnCompletionListener mOnCompletionListener;
private MediaPlayer.OnPreparedListener mOnPreparedListener;
private int mCurrentBufferPercentage;
private OnErrorListener mOnErrorListener;
private OnInfoListener mOnInfoListener;
private int mSeekWhenPrepared; // recording the seek position while preparing
private boolean mCanPause;
private boolean mCanSeekBack;
private boolean mCanSeekForward;
private Context mContext;
public CookieVideoView(Context context) {
super(context);
initVideoView(context);
}
public CookieVideoView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
initVideoView(context);
}
public CookieVideoView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
initVideoView(context);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
//Log.i("@@@@", "onMeasure");
int width = getDefaultSize(mVideoWidth, widthMeasureSpec);
int height = getDefaultSize(mVideoHeight, heightMeasureSpec);
if (mVideoWidth > 0 && mVideoHeight > 0) {
if ( mVideoWidth * height > width * mVideoHeight ) {
//Log.i("@@@", "image too tall, correcting");
height = width * mVideoHeight / mVideoWidth;
} else if ( mVideoWidth * height < width * mVideoHeight ) {
//Log.i("@@@", "image too wide, correcting");
width = height * mVideoWidth / mVideoHeight;
} else {
//Log.i("@@@", "aspect ratio is correct: " +
//width+"/"+height+"="+
//mVideoWidth+"/"+mVideoHeight);
}
}
//Log.i("@@@@@@@@@@", "setting size: " + width + 'x' + height);
setMeasuredDimension(width, height);
}
@Override
public void onInitializeAccessibilityEvent(AccessibilityEvent event) {
super.onInitializeAccessibilityEvent(event);
event.setClassName(CookieVideoView.class.getName());
}
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
@Override
public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) {
super.onInitializeAccessibilityNodeInfo(info);
info.setClassName(CookieVideoView.class.getName());
}
public int resolveAdjustedSize(int desiredSize, int measureSpec) {
int result = desiredSize;
int specMode = MeasureSpec.getMode(measureSpec);
int specSize = MeasureSpec.getSize(measureSpec);
switch (specMode) {
case MeasureSpec.UNSPECIFIED:
/* Parent says we can be as big as we want. Just don't be larger
* than max size imposed on ourselves.
*/
result = desiredSize;
break;
case MeasureSpec.AT_MOST:
/* Parent says we can be as big as we want, up to specSize.
* Don't be larger than specSize, and don't be larger than
* the max size imposed on ourselves.
*/
result = Math.min(desiredSize, specSize);
break;
case MeasureSpec.EXACTLY:
// No choice. Do what we are told.
result = specSize;
break;
}
return result;
}
private void initVideoView(Context context) {
mVideoWidth = 0;
mVideoHeight = 0;
getHolder().addCallback(mSHCallback);
getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
setFocusable(true);
setFocusableInTouchMode(true);
requestFocus();
mCurrentState = STATE_IDLE;
mTargetState = STATE_IDLE;
mContext = context;
}
public void setVideoPath(String path) {
setVideoURI(Uri.parse(path));
}
public void setVideoURI(Uri uri) {
mUri = uri;
mHeaders = getCookieHeader();
mSeekWhenPrepared = 0;
openVideo();
requestLayout();
invalidate();
}
private Map<String,String> getCookieHeader() {
String hostDomain = mUri.getHost();
List<Cookie> cookieList = new PersistentCookieStore(mContext).getCookies();
for(Cookie cookie : cookieList) {
if (cookie.getDomain().equalsIgnoreCase(hostDomain)) {
Map<String,String> header = new HashMap<String, String>();
header.put("Cookie", cookie.getName() + "=" + cookie.getValue());
Log.d(TAG,"Cookie: "+header.toString());
return header;
}
}
return null;
}
public void stopPlayback() {
if (mMediaPlayer != null) {
mMediaPlayer.stop();
mMediaPlayer.release();
mMediaPlayer = null;
mCurrentState = STATE_IDLE;
mTargetState = STATE_IDLE;
}
}
private void openVideo() {
if (mUri == null || mSurfaceHolder == null) {
// not ready for playback just yet, will try again later
return;
}
// Tell the music playback service to pause
// TODO: these constants need to be published somewhere in the framework.
Intent i = new Intent("com.android.music.musicservicecommand");
i.putExtra("command", "pause");
mContext.sendBroadcast(i);
// we shouldn't clear the target state, because somebody might have
// called start() previously
release(false);
try {
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setOnPreparedListener(mPreparedListener);
mMediaPlayer.setOnVideoSizeChangedListener(mSizeChangedListener);
mMediaPlayer.setOnCompletionListener(mCompletionListener);
mMediaPlayer.setOnErrorListener(mErrorListener);
mMediaPlayer.setOnInfoListener(mOnInfoListener);
mMediaPlayer.setOnBufferingUpdateListener(mBufferingUpdateListener);
mCurrentBufferPercentage = 0;
mMediaPlayer.setDataSource(mContext, mUri, mHeaders);
mMediaPlayer.setDisplay(mSurfaceHolder);
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mMediaPlayer.setScreenOnWhilePlaying(true);
mMediaPlayer.prepareAsync();
// we don't set the target state here either, but preserve the
// target state that was there before.
mCurrentState = STATE_PREPARING;
attachMediaController();
} catch (IOException ex) {
Log.w(TAG, "Unable to open content: " + mUri, ex);
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
mErrorListener.onError(mMediaPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, 0);
return;
} catch (IllegalArgumentException ex) {
Log.w(TAG, "Unable to open content: " + mUri, ex);
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
mErrorListener.onError(mMediaPlayer, MediaPlayer.MEDIA_ERROR_UNKNOWN, 0);
return;
}
}
public void setMediaController(MediaController controller) {
if (mMediaController != null) {
mMediaController.hide();
}
mMediaController = controller;
attachMediaController();
}
private void attachMediaController() {
if (mMediaPlayer != null && mMediaController != null) {
mMediaController.setMediaPlayer(this);
View anchorView = this.getParent() instanceof View ?
(View)this.getParent() : this;
mMediaController.setAnchorView(anchorView);
mMediaController.setEnabled(isInPlaybackState());
}
}
MediaPlayer.OnVideoSizeChangedListener mSizeChangedListener =
new MediaPlayer.OnVideoSizeChangedListener() {
public void onVideoSizeChanged(MediaPlayer mp, int width, int height) {
mVideoWidth = mp.getVideoWidth();
mVideoHeight = mp.getVideoHeight();
if (mVideoWidth != 0 && mVideoHeight != 0) {
getHolder().setFixedSize(mVideoWidth, mVideoHeight);
requestLayout();
}
}
};
MediaPlayer.OnPreparedListener mPreparedListener = new MediaPlayer.OnPreparedListener() {
public void onPrepared(MediaPlayer mp) {
mCurrentState = STATE_PREPARED;
// Get the capabilities of the player for this stream
// Metadata data = mp.getMetadata(MediaPlayer.METADATA_ALL,
// MediaPlayer.BYPASS_METADATA_FILTER);
//
// if (data != null) {
// mCanPause = !data.has(Metadata.PAUSE_AVAILABLE)
// || data.getBoolean(Metadata.PAUSE_AVAILABLE);
// mCanSeekBack = !data.has(Metadata.SEEK_BACKWARD_AVAILABLE)
// || data.getBoolean(Metadata.SEEK_BACKWARD_AVAILABLE);
// mCanSeekForward = !data.has(Metadata.SEEK_FORWARD_AVAILABLE)
// || data.getBoolean(Metadata.SEEK_FORWARD_AVAILABLE);
// } else {
mCanPause = mCanSeekBack = mCanSeekForward = true;
// }
if (mOnPreparedListener != null) {
mOnPreparedListener.onPrepared(mMediaPlayer);
}
if (mMediaController != null) {
mMediaController.setEnabled(true);
}
mVideoWidth = mp.getVideoWidth();
mVideoHeight = mp.getVideoHeight();
int seekToPosition = mSeekWhenPrepared; // mSeekWhenPrepared may be changed after seekTo() call
if (seekToPosition != 0) {
seekTo(seekToPosition);
}
if (mVideoWidth != 0 && mVideoHeight != 0) {
//Log.i("@@@@", "video size: " + mVideoWidth +"/"+ mVideoHeight);
getHolder().setFixedSize(mVideoWidth, mVideoHeight);
if (mSurfaceWidth == mVideoWidth && mSurfaceHeight == mVideoHeight) {
// We didn't actually change the size (it was already at the size
// we need), so we won't get a "surface changed" callback, so
// start the video here instead of in the callback.
if (mTargetState == STATE_PLAYING) {
start();
if (mMediaController != null) {
mMediaController.show();
}
} else if (!isPlaying() &&
(seekToPosition != 0 || getCurrentPosition() > 0)) {
if (mMediaController != null) {
// Show the media controls when we're paused into a video and make 'em stick.
mMediaController.show(0);
}
}
}
} else {
// We don't know the video size yet, but should start anyway.
// The video size might be reported to us later.
if (mTargetState == STATE_PLAYING) {
start();
}
}
}
};
private MediaPlayer.OnCompletionListener mCompletionListener =
new MediaPlayer.OnCompletionListener() {
public void onCompletion(MediaPlayer mp) {
mCurrentState = STATE_PLAYBACK_COMPLETED;
mTargetState = STATE_PLAYBACK_COMPLETED;
if (mMediaController != null) {
mMediaController.hide();
}
if (mOnCompletionListener != null) {
mOnCompletionListener.onCompletion(mMediaPlayer);
}
}
};
private MediaPlayer.OnErrorListener mErrorListener =
new MediaPlayer.OnErrorListener() {
public boolean onError(MediaPlayer mp, int framework_err, int impl_err) {
Log.d(TAG, "Error: " + framework_err + "," + impl_err);
mCurrentState = STATE_ERROR;
mTargetState = STATE_ERROR;
if (mMediaController != null) {
mMediaController.hide();
}
/* If an error handler has been supplied, use it and finish. */
if (mOnErrorListener != null) {
if (mOnErrorListener.onError(mMediaPlayer, framework_err, impl_err)) {
return true;
}
}
/* Otherwise, pop up an error dialog so the user knows that
* something bad has happened. Only try and pop up the dialog
* if we're attached to a window. When we're going away and no
* longer have a window, don't bother showing the user an error.
*/
if (getWindowToken() != null) {
// Resources r = mContext.getResources();
int messageId;
if (framework_err == MediaPlayer.MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK) {
messageId = R.string.VideoView_error_text_invalid_progressive_playback;
} else {
messageId = R.string.VideoView_error_text_unknown;
}
new AlertDialog.Builder(mContext)
.setMessage(messageId)
.setPositiveButton(R.string.VideoView_error_button,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
/* If we get here, there is no onError listener, so
* at least inform them that the video is over.
*/
if (mOnCompletionListener != null) {
mOnCompletionListener.onCompletion(mMediaPlayer);
}
}
})
.setCancelable(false)
.show();
}
return true;
}
};
private MediaPlayer.OnBufferingUpdateListener mBufferingUpdateListener =
new MediaPlayer.OnBufferingUpdateListener() {
public void onBufferingUpdate(MediaPlayer mp, int percent) {
mCurrentBufferPercentage = percent;
}
};
/**
* Register a callback to be invoked when the media file
* is loaded and ready to go.
*
* @param l The callback that will be run
*/
public void setOnPreparedListener(MediaPlayer.OnPreparedListener l)
{
mOnPreparedListener = l;
}
/**
* Register a callback to be invoked when the end of a media file
* has been reached during playback.
*
* @param l The callback that will be run
*/
public void setOnCompletionListener(OnCompletionListener l)
{
mOnCompletionListener = l;
}
/**
* Register a callback to be invoked when an error occurs
* during playback or setup. If no listener is specified,
* or if the listener returned false, VideoView will inform
* the user of any errors.
*
* @param l The callback that will be run
*/
public void setOnErrorListener(OnErrorListener l)
{
mOnErrorListener = l;
}
/**
* Register a callback to be invoked when an informational event
* occurs during playback or setup.
*
* @param l The callback that will be run
*/
public void setOnInfoListener(OnInfoListener l) {
mOnInfoListener = l;
}
SurfaceHolder.Callback mSHCallback = new SurfaceHolder.Callback()
{
public void surfaceChanged(SurfaceHolder holder, int format,
int w, int h)
{
mSurfaceWidth = w;
mSurfaceHeight = h;
boolean isValidState = (mTargetState == STATE_PLAYING);
boolean hasValidSize = (mVideoWidth == w && mVideoHeight == h);
if (mMediaPlayer != null && isValidState && hasValidSize) {
if (mSeekWhenPrepared != 0) {
seekTo(mSeekWhenPrepared);
}
start();
}
}
public void surfaceCreated(SurfaceHolder holder)
{
mSurfaceHolder = holder;
openVideo();
}
public void surfaceDestroyed(SurfaceHolder holder)
{
// after we return from this we can't use the surface any more
mSurfaceHolder = null;
if (mMediaController != null) mMediaController.hide();
release(true);
}
};
/*
* release the media player in any state
*/
private void release(boolean cleartargetstate) {
if (mMediaPlayer != null) {
mMediaPlayer.reset();
mMediaPlayer.release();
mMediaPlayer = null;
mCurrentState = STATE_IDLE;
if (cleartargetstate) {
mTargetState = STATE_IDLE;
}
}
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
if (isInPlaybackState() && mMediaController != null) {
toggleMediaControlsVisiblity();
}
return false;
}
@Override
public boolean onTrackballEvent(MotionEvent ev) {
if (isInPlaybackState() && mMediaController != null) {
toggleMediaControlsVisiblity();
}
return false;
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event)
{
boolean isKeyCodeSupported = keyCode != KeyEvent.KEYCODE_BACK &&
keyCode != KeyEvent.KEYCODE_VOLUME_UP &&
keyCode != KeyEvent.KEYCODE_VOLUME_DOWN &&
keyCode != KeyEvent.KEYCODE_VOLUME_MUTE &&
keyCode != KeyEvent.KEYCODE_MENU &&
keyCode != KeyEvent.KEYCODE_CALL &&
keyCode != KeyEvent.KEYCODE_ENDCALL;
if (isInPlaybackState() && isKeyCodeSupported && mMediaController != null) {
if (keyCode == KeyEvent.KEYCODE_HEADSETHOOK ||
keyCode == KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE) {
if (mMediaPlayer.isPlaying()) {
pause();
mMediaController.show();
} else {
start();
mMediaController.hide();
}
return true;
} else if (keyCode == KeyEvent.KEYCODE_MEDIA_PLAY) {
if (!mMediaPlayer.isPlaying()) {
start();
mMediaController.hide();
}
return true;
} else if (keyCode == KeyEvent.KEYCODE_MEDIA_STOP
|| keyCode == KeyEvent.KEYCODE_MEDIA_PAUSE) {
if (mMediaPlayer.isPlaying()) {
pause();
mMediaController.show();
}
return true;
} else {
toggleMediaControlsVisiblity();
}
}
return super.onKeyDown(keyCode, event);
}
private void toggleMediaControlsVisiblity() {
if (mMediaController.isShowing()) {
mMediaController.hide();
} else {
mMediaController.show();
}
}
public void start() {
if (isInPlaybackState()) {
mMediaPlayer.start();
mCurrentState = STATE_PLAYING;
}
mTargetState = STATE_PLAYING;
}
public void pause() {
if (isInPlaybackState()) {
if (mMediaPlayer.isPlaying()) {
mMediaPlayer.pause();
mCurrentState = STATE_PAUSED;
}
}
mTargetState = STATE_PAUSED;
}
public void suspend() {
release(false);
}
public void resume() {
openVideo();
}
public int getDuration() {
if (isInPlaybackState()) {
return mMediaPlayer.getDuration();
}
return -1;
}
public int getCurrentPosition() {
if (isInPlaybackState()) {
return mMediaPlayer.getCurrentPosition();
}
return 0;
}
public void seekTo(int msec) {
if (isInPlaybackState()) {
mMediaPlayer.seekTo(msec);
mSeekWhenPrepared = 0;
} else {
mSeekWhenPrepared = msec;
}
}
public boolean isPlaying() {
return isInPlaybackState() && mMediaPlayer.isPlaying();
}
public int getBufferPercentage() {
if (mMediaPlayer != null) {
return mCurrentBufferPercentage;
}
return 0;
}
private boolean isInPlaybackState() {
return (mMediaPlayer != null &&
mCurrentState != STATE_ERROR &&
mCurrentState != STATE_IDLE &&
mCurrentState != STATE_PREPARING);
}
public boolean canPause() {
return mCanPause;
}
public boolean canSeekBackward() {
return mCanSeekBack;
}
public boolean canSeekForward() {
return mCanSeekForward;
}
}
| |
/*
Part of the G4P library for Processing
http://www.lagers.org.uk/g4p/index.html
http://sourceforge.net/projects/g4p/files/?source=navbar
Copyright (c) 2012 Peter Lager
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place, Suite 330,
Boston, MA 02111-1307 USA
*/
package g4p_controls;
import g4p_controls.HotSpot.HSalpha;
import processing.core.PApplet;
import processing.core.PImage;
import processing.event.MouseEvent;
/**
* Buttons created from this class have 2 or more toggle states. If the number of states
* is N then the button's value will be in the range 0 to N-1. Most toggle buttons will
* have just two states and these have values 0 and 1. <br>
* Clicking on the button advances the state by one, restarting at zero after the last
* state. <br>
* Each state must have its own 'picture' and the user must supply these as a tiled image
* where the pictures are tiled in 1D or 2D arrangement without 'empty space' around the
* tiles. <br>
* If for any reason the library is unable to use the specified graphics then it will
* provide a default two state toggle switch. <br>
* It is also possible to provide an over-button image set for when the mouse moves
* over the button - this is optional. <br>
* The button control will always be resized to suit the state picture size (tile size). <br>
* The mouse is considered to be over the button it its position is over an opaque pixel
* in the state picture. Since transparent pixels are not included then the button shape
* can be different for each state. <br>
*
*
*
* Three types of event can be generated :- <br>
* <b> GEvent.PRESSED GEvent.RELEASED GEvent.CLICKED </b><br>
*
* To simplify event handling the button only fires off CLICKED events
* when the mouse button is pressed and released over the button face
* (the default behaviour). <br>
*
* Using <pre>button1.fireAllEvents(true);</pre> enables the other 2 events
* for button <b>button1</b>. A PRESSED event is created if the mouse button
* is pressed down over the button face, the CLICKED event is then generated
* if the mouse button is released over the button face. Releasing the
* button off the button face creates a RELEASED event. This is included for
* completeness since it is unlikely you will need to detect these events
* for this type of control. <br>
*
*
* @author Peter Lager
*
*/
public class GImageToggleButton extends GAbstractControl {
private static PImage toggle = null;
private static final String TOGGLE = "toggle.png";
protected int nbrStates = 2;
protected int stateValue = 0;
protected PImage[] offImage;
protected PImage[] overImage;
protected int status;
protected boolean reportAllButtonEvents = false;
/**
* Create the library default image-toggle-button at the stated position. <br>
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1){
this(theApplet, p0, p1, null, null, 1, 1);
}
/**
* Create an image-toggle-button. <br>
* Single row of tiles.
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
* @param offPicture the filename of bitmap containing toggle state pictures
* @param nbrCols number of tiles horizontally
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1, String offPicture, int nbrCols){
this(theApplet, p0, p1, offPicture, null, nbrCols, 1);
}
/**
* Create an image-toggle-button. <br>
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
* @param offPicture the filename of bitmap containing toggle state pictures
* @param nbrCols number of tiles horizontally
* @param nbrRows number of tiles vertically
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1, String offPicture, int nbrCols, int nbrRows){
this(theApplet, p0, p1, offPicture, null, nbrCols, nbrRows);
}
/**
* Create an image-toggle-button. <br>
* Single row of tiles.
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
* @param offPicture the filename of bitmap containing toggle state pictures
* @param overPicture the filename of bitmap containing mouse-over button toggle state pictures
* @param nbrCols number of tiles horizontally
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1, String offPicture, String overPicture, int nbrCols){
this(theApplet, p0, p1, offPicture, overPicture, nbrCols, 1);
}
/**
* Create an image-toggle-button. <br>
*
* @param theApplet
* @param p0 horizontal position of the control
* @param p1 vertical position of the control
* @param offPicture the filename of bitmap containing toggle state pictures
* @param overPicture the filename of bitmap containing mouse-over button toggle state pictures
* @param nbrCols number of tiles horizontally
* @param nbrRows number of tiles vertically
*/
public GImageToggleButton(PApplet theApplet, float p0, float p1, String offPicture, String overPicture, int nbrCols, int nbrRows){
super(theApplet, p0, p1);
// Attempt to get off-control image data
PImage temp = null;
if(nbrCols < 1 || nbrRows < 1 || offPicture == null || null == (temp = ImageManager.loadImage(winApp, offPicture))){
// Invalid data use default
nbrStates = 2;
if(toggle == null)
toggle = ImageManager.loadImage(winApp, TOGGLE);
offImage = ImageManager.makeTiles1D(winApp, toggle, 2, 1);
}
else {
// Off-control image data valid
nbrStates = nbrCols * nbrRows;
offImage = ImageManager.makeTiles1D(winApp, temp, nbrCols, nbrRows);
// Now check for over-control image data
if(overPicture != null && null != (temp = ImageManager.loadImage(winApp, overPicture))){
overImage = ImageManager.makeTiles1D(winApp, temp, nbrCols, nbrRows);
}
}
// The control will always be resized to match the image size
resize(offImage[0].width, offImage[0].height);
//========================================================================
// Setup the hotspots
hotspots = new HotSpot[]{
new HSalpha(1, 0, 0, offImage[stateValue], PApplet.CORNER)
};
//========================================================================
z = Z_SLIPPY;
// Now register control with applet
createEventHandler(G4P.sketchApplet, "handleToggleButtonEvents",
new Class<?>[]{ GImageToggleButton.class, GEvent.class },
new String[]{ "button", "event" }
);
registeredMethods = DRAW_METHOD | MOUSE_METHOD;
cursorOver = HAND;
G4P.addControl(this);
}
public void draw(){
if(!visible) return;
// Update buffer if invalid
//updateBuffer();
winApp.pushStyle();
winApp.pushMatrix();
// Perform the rotation
winApp.translate(cx, cy);
winApp.rotate(rotAngle);
// Move matrix to line up with top-left corner
winApp.translate(-halfWidth, -halfHeight);
// Draw buffer
winApp.imageMode(PApplet.CORNER);
if(alphaLevel < 255)
winApp.tint(TINT_FOR_ALPHA, alphaLevel);
if(status == OVER_CONTROL && overImage != null)
winApp.image(overImage[stateValue], 0, 0);
else
winApp.image(offImage[stateValue], 0, 0);
winApp.popMatrix();
winApp.popStyle();
}
/**
*
* When a mouse button is clicked on a GImageToggleButton it generates the GEvent.CLICKED event. If
* you also want the button to generate GEvent.PRESSED and GEvent.RELEASED events
* then you need the following statement.<br>
* <pre>btnName.fireAllEvents(true); </pre><br>
* <pre>
* void handleButtonEvents(void handleToggleButtonEvents(GImageToggleButton button, GEvent event) {
* if(button == btnName && event == GEvent.CLICKED){
* int buttonState = btnName.stateValue();
* }
* </pre> <br>
* Where <pre><b>btnName</b></pre> is the GImageToggleButton identifier (variable name) <br><br>
*
*/
public void mouseEvent(MouseEvent event){
if(!visible || !enabled || !available) return;
calcTransformedOrigin(winApp.mouseX, winApp.mouseY);
currSpot = whichHotSpot(ox, oy);
if(currSpot >= 0 || focusIsWith == this)
cursorIsOver = this;
else if(cursorIsOver == this)
cursorIsOver = null;
switch(event.getAction()){
case MouseEvent.PRESS:
if(focusIsWith != this && currSpot >= 0 && z > focusObjectZ()){
dragging = false;
status = PRESS_CONTROL;
takeFocus();
if(reportAllButtonEvents)
fireEvent(this, GEvent.PRESSED);
}
break;
case MouseEvent.CLICK:
// No need to test for isOver() since if the component has focus
// and the mouse has not moved since MOUSE_PRESSED otherwise we
// would not get the Java MouseEvent.MOUSE_CLICKED event
if(focusIsWith == this){
status = OFF_CONTROL;
loseFocus(null);
dragging = false;
nextState();
fireEvent(this, GEvent.CLICKED);
}
break;
case MouseEvent.RELEASE:
// if the mouse has moved then release focus otherwise
// MOUSE_CLICKED will handle it
if(focusIsWith == this && dragging){
if(currSpot >= 0){
nextState();
fireEvent(this, GEvent.CLICKED);
}
else {
if(reportAllButtonEvents){
fireEvent(this, GEvent.RELEASED);
}
}
dragging = false;
loseFocus(null);
status = OFF_CONTROL;
}
break;
case MouseEvent.MOVE:
// If dragged state will stay as PRESSED
if(currSpot >= 0)
status = OVER_CONTROL;
else
status = OFF_CONTROL;
break;
case MouseEvent.DRAG:
dragging = (focusIsWith == this);
break;
}
}
/**
* Advance to the next state and adjust the hotspot to use the current image
*/
private void nextState(){
stateValue++;
stateValue %= nbrStates;
hotspots[0].adjust(0,0,offImage[stateValue]);
}
/**
* Get the current state value of the button.
* @deprecated use getState()
*/
@Deprecated
public int stateValue(){
return stateValue;
}
/**
* Get the current state value of the button.
*/
public int getState(){
return stateValue;
}
/**
* Change the current toggle state. <br>
* If the parameter is not a valid toggle state value then it
* is ignored and the button's state value is unchanged.
* @deprecated use setState(int)
* @param newState
*/
@Deprecated
public void stateValue(int newState){
if(newState >= 0 && newState < nbrStates && newState != stateValue){
stateValue = newState;
hotspots[0].adjust(0,0,offImage[stateValue]);
bufferInvalid = true;
}
}
/**
* Change the current toggle state. <br>
* If the parameter is not a valid toggle state value then it
* is ignored and the button's state value is unchanged.
* @param newState
*/
public void setState(int newState){
if(newState >= 0 && newState < nbrStates && newState != stateValue){
stateValue = newState;
hotspots[0].adjust(0,0,offImage[stateValue]);
bufferInvalid = true;
}
}
/**
* If the parameter is true all 3 event types are generated, if false
* only CLICKED events are generated (default behaviour). <br>
* For this toggle control I can't see the need for anything but
* CLICKED events
* @param all
*/
public void fireAllEvents(boolean all){
reportAllButtonEvents = all;
}
}
| |
/*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
package com.sun.org.apache.bcel.internal.generic;
/* ====================================================================
* The Apache Software License, Version 1.1
*
* Copyright (c) 2001 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Apache" and "Apache Software Foundation" and
* "Apache BCEL" must not be used to endorse or promote products
* derived from this software without prior written permission. For
* written permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache",
* "Apache BCEL", nor may "Apache" appear in their name, without
* prior written permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
import java.io.*;
import com.sun.org.apache.bcel.internal.util.ByteSequence;
import com.sun.org.apache.bcel.internal.classfile.Utility;
import com.sun.org.apache.bcel.internal.Constants;
/**
* Abstract super class for instructions dealing with local variables.
*
* @author <A HREF="mailto:markus.dahm@berlin.de">M. Dahm</A>
*/
public abstract class LocalVariableInstruction extends Instruction
implements TypedInstruction, IndexedInstruction {
protected int n = -1; // index of referenced variable
private short c_tag = -1; // compact version, such as ILOAD_0
private short canon_tag = -1; // canonical tag such as ILOAD
private final boolean wide() { return n > Constants.MAX_BYTE; }
/**
* Empty constructor needed for the Class.newInstance() statement in
* Instruction.readInstruction(). Not to be used otherwise.
* tag and length are defined in readInstruction and initFromFile, respectively.
*/
LocalVariableInstruction(short canon_tag, short c_tag) {
super();
this.canon_tag = canon_tag;
this.c_tag = c_tag;
}
/**
* Empty constructor needed for the Class.newInstance() statement in
* Instruction.readInstruction(). Also used by IINC()!
*/
LocalVariableInstruction() {
}
/**
* @param opcode Instruction opcode
* @param c_tag Instruction number for compact version, ALOAD_0, e.g.
* @param n local variable index (unsigned short)
*/
protected LocalVariableInstruction(short opcode, short c_tag, int n) {
super(opcode, (short)2);
this.c_tag = c_tag;
canon_tag = opcode;
setIndex(n);
}
/**
* Dump instruction as byte code to stream out.
* @param out Output stream
*/
public void dump(DataOutputStream out) throws IOException {
if(wide()) // Need WIDE prefix ?
out.writeByte(Constants.WIDE);
out.writeByte(opcode);
if(length > 1) { // Otherwise ILOAD_n, instruction, e.g.
if(wide())
out.writeShort(n);
else
out.writeByte(n);
}
}
/**
* Long output format:
*
* <name of opcode> "["<opcode number>"]"
* "("<length of instruction>")" "<"< local variable index>">"
*
* @param verbose long/short format switch
* @return mnemonic for instruction
*/
public String toString(boolean verbose) {
if(((opcode >= Constants.ILOAD_0) &&
(opcode <= Constants.ALOAD_3)) ||
((opcode >= Constants.ISTORE_0) &&
(opcode <= Constants.ASTORE_3)))
return super.toString(verbose);
else
return super.toString(verbose) + " " + n;
}
/**
* Read needed data (e.g. index) from file.
* PRE: (ILOAD <= tag <= ALOAD_3) || (ISTORE <= tag <= ASTORE_3)
*/
protected void initFromFile(ByteSequence bytes, boolean wide)
throws IOException
{
if(wide) {
n = bytes.readUnsignedShort();
length = 4;
} else if(((opcode >= Constants.ILOAD) &&
(opcode <= Constants.ALOAD)) ||
((opcode >= Constants.ISTORE) &&
(opcode <= Constants.ASTORE))) {
n = bytes.readUnsignedByte();
length = 2;
} else if(opcode <= Constants.ALOAD_3) { // compact load instruction such as ILOAD_2
n = (opcode - Constants.ILOAD_0) % 4;
length = 1;
} else { // Assert ISTORE_0 <= tag <= ASTORE_3
n = (opcode - Constants.ISTORE_0) % 4;
length = 1;
}
}
/**
* @return local variable index referred by this instruction.
*/
public final int getIndex() { return n; }
/**
* Set the local variable index
*/
public void setIndex(int n) {
if((n < 0) || (n > Constants.MAX_SHORT))
throw new ClassGenException("Illegal value: " + n);
this.n = n;
if(n >= 0 && n <= 3) { // Use more compact instruction xLOAD_n
opcode = (short)(c_tag + n);
length = 1;
} else {
opcode = canon_tag;
if(wide()) // Need WIDE prefix ?
length = 4;
else
length = 2;
}
}
/** @return canonical tag for instruction, e.g., ALOAD for ALOAD_0
*/
public short getCanonicalTag() {
return canon_tag;
}
/**
* Returns the type associated with the instruction -
* in case of ALOAD or ASTORE Type.OBJECT is returned.
* This is just a bit incorrect, because ALOAD and ASTORE
* may work on every ReferenceType (including Type.NULL) and
* ASTORE may even work on a ReturnaddressType .
* @return type associated with the instruction
*/
public Type getType(ConstantPoolGen cp) {
switch(canon_tag) {
case Constants.ILOAD: case Constants.ISTORE:
return Type.INT;
case Constants.LLOAD: case Constants.LSTORE:
return Type.LONG;
case Constants.DLOAD: case Constants.DSTORE:
return Type.DOUBLE;
case Constants.FLOAD: case Constants.FSTORE:
return Type.FLOAT;
case Constants.ALOAD: case Constants.ASTORE:
return Type.OBJECT;
default: throw new ClassGenException("Oops: unknown case in switch" + canon_tag);
}
}
}
| |
package com.baidu.cloud.push.transform;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Pattern;
import com.baidu.cloud.core.annotation.CollectionRestrict;
import com.baidu.cloud.core.annotation.HttpParamKeyName;
import com.baidu.cloud.core.annotation.HttpPathKeyName;
import com.baidu.cloud.core.annotation.R;
import com.baidu.cloud.core.annotation.RangeRestrict;
import com.baidu.cloud.core.annotation.RegexRestrict;
import com.baidu.cloud.core.utility.StringUtility;
import com.baidu.cloud.push.exception.PushClientException;
import com.baidu.cloud.push.model.PushRequest;
public class PushRestRequestChecker {
@SuppressWarnings({ "unused", "unchecked" })
public boolean validate(PushRequest request) throws PushClientException {
Field[] childField = request.getClass().getDeclaredFields();
Field[] superFileds = request.getClass().getSuperclass().getDeclaredFields();
List<Field> fieldList = new LinkedList<Field>();
fieldList.addAll(Arrays.asList(childField));
fieldList.addAll(Arrays.asList(superFileds));
for ( Field field : fieldList.toArray(new Field[0]) ) {
try {
field.setAccessible(true);
if ( field.isAnnotationPresent(HttpPathKeyName.class) ) {
Object obj = field.get(request);
if ( obj == null ) {
HttpPathKeyName annotation = field.getAnnotation(HttpPathKeyName.class);
if ( annotation.param() == R.REQUIRE ) {
throw new PushClientException(
field.getName() + " is null, default require");
}
}
}
if ( field.isAnnotationPresent(HttpParamKeyName.class) ) {
Object obj = field.get(request);
if ( obj == null ) {
HttpParamKeyName annotation = field.getAnnotation(HttpParamKeyName.class);
if ( annotation.param() == R.REQUIRE ) {
throw new PushClientException(
field.getName() + " is null, default require");
}
} else {
HttpParamKeyName annotation = field.getAnnotation(HttpParamKeyName.class);
Class<?> zlass = field.getType();
if ( zlass.equals(Long.class) || "long".equalsIgnoreCase(zlass.getName()) ) {
if ( obj == null ) { // (Long)obj < 0
if ( annotation.param() == R.REQUIRE ) {
throw new PushClientException(
field.getName() + " haven't been set, default require");
}
} else {
long ele = (Long)obj;
if ( field.isAnnotationPresent(RangeRestrict.class) ) {
RangeRestrict range = field.getAnnotation(RangeRestrict.class);
if ( ele < range.minLength() || ele > range.maxLength() ) {
throw new PushClientException(
field.getName() + " value isn't in range [" + range.minLength()
+ ", " + range.maxLength() + "]." );
}
}
}
} else if ( zlass.equals(Integer.class) || "int".equalsIgnoreCase(zlass.getName()) ) {
if ( obj == null ) { // (Integer)obj < 0
if ( annotation.param() == R.REQUIRE ) {
throw new PushClientException(
field.getName() + " haven't been set, default require");
}
} else {
int ele = (Integer)obj;
if ( field.isAnnotationPresent(RangeRestrict.class) ) {
RangeRestrict range = field.getAnnotation(RangeRestrict.class);
if ( ele < range.minLength() || ele > range.maxLength() ) {
throw new PushClientException(
field.getName() + " value isn't in range [" + range.minLength()
+ ", " + range.maxLength() + "]." );
}
}
}
} else if ( zlass.equals(String.class) ) {
if ( StringUtility.isNull((String)obj) ) {
throw new PushClientException(
field.getName() + " is Zero Length String");
} else {
String sobj = (String)obj;
// String len restrict
if ( field.isAnnotationPresent(RangeRestrict.class) ) {
RangeRestrict range = field.getAnnotation(RangeRestrict.class);
if ( sobj.length() < range.minLength() || sobj.length() > range.maxLength() ) {
throw new PushClientException(
field.getName() + " length isn't in range [" + range.minLength()
+ ", " + range.maxLength() + "]." );
}
}
// regex restrict
if ( field.isAnnotationPresent(RegexRestrict.class) ) {
RegexRestrict regex = field.getAnnotation(RegexRestrict.class);
if ( !Pattern.matches(regex.regex(), sobj) ) {
throw new PushClientException(
field.getName() + " doesn't meet the regular expression " + regex.regex());
}
}
}
} else if ( zlass.equals(java.util.Date.class) ) {
} else if ( zlass.isEnum() ) {
} else if ( zlass.equals(java.util.List.class) ) {
java.util.List<Object> list = (java.util.List<Object>)obj;
if ( list.size() == 0 ) {
// throw new Exception();
throw new PushClientException(
field.getName() + " size is zero.");
}
if ( field.isAnnotationPresent(CollectionRestrict.class) ) {
CollectionRestrict rcollection = field.getAnnotation(CollectionRestrict.class);
if ( list.size() < rcollection.minLength() || list.size() > rcollection.maxLength() ) {
throw new PushClientException(
field.getName() + " size isn't in range [" + rcollection.minLength()
+ ", " + rcollection.maxLength() + "].");
}
}
for ( Object o : list ) {
if ( o instanceof String ) {
String ele = (String)o;
if ( field.isAnnotationPresent(RangeRestrict.class) ) {
RangeRestrict range = field.getAnnotation(RangeRestrict.class);
if ( ele.length() < range.minLength() || ele.length() > range.maxLength() ) {
throw new PushClientException(
field.getName() + " length isn't in range [" + range.minLength()
+ ", " + range.maxLength() + "]." );
}
}
// regex restrict
if ( field.isAnnotationPresent(RegexRestrict.class) ) {
RegexRestrict regex = field.getAnnotation(RegexRestrict.class);
if ( !Pattern.matches(regex.regex(), ele) ) {
throw new PushClientException(
field.getName() + " doesn't meet the regular expression " + regex.regex());
}
}
} else if ( o instanceof Integer ) {
// do nothing
int ele = (Integer)o;
if ( field.isAnnotationPresent(RangeRestrict.class) ) {
RangeRestrict range = field.getAnnotation(RangeRestrict.class);
if ( ele < range.minLength() || ele > range.maxLength() ) {
throw new PushClientException(
field.getName() + " value isn't in range [" + range.minLength()
+ ", " + range.maxLength() + "]." );
}
}
} else if ( o instanceof Long ) {
// do nothing
long ele = (Long)o;
if ( field.isAnnotationPresent(RangeRestrict.class) ) {
RangeRestrict range = field.getAnnotation(RangeRestrict.class);
if ( ele < range.minLength() || ele > range.maxLength() ) {
throw new PushClientException(
field.getName() + " value isn't in range [" + range.minLength()
+ ", " + range.maxLength() + "]." );
}
}
}
}
} else if ( zlass.equals(java.util.Set.class) ) {
java.util.Set<String> set = (java.util.Set<String>)obj;
if ( set.size() == 0 ) {
// throw new Exception();
throw new PushClientException(
field.getName() + " size is zero.");
}
if ( field.isAnnotationPresent(CollectionRestrict.class) ) {
CollectionRestrict rcollection = field.getAnnotation(CollectionRestrict.class);
if ( set.size() < rcollection.minLength() || set.size() > rcollection.maxLength() ) {
throw new PushClientException(
field.getName() + " size isn't in range [" + rcollection.minLength()
+ ", " + rcollection.maxLength() + "].");
}
}
for ( String ele : set ) {
if ( field.isAnnotationPresent(RangeRestrict.class) ) {
RangeRestrict range = field.getAnnotation(RangeRestrict.class);
if ( ele.length() < range.minLength() || ele.length() > range.maxLength() ) {
throw new PushClientException(
field.getName() + " length isn't in range [" + range.minLength()
+ ", " + range.maxLength() + "]." );
}
}
// regex restrict
if ( field.isAnnotationPresent(RegexRestrict.class) ) {
RegexRestrict regex = field.getAnnotation(RegexRestrict.class);
if ( !Pattern.matches(regex.regex(), ele) ) {
throw new PushClientException(
field.getName() + " doesn't meet the regular expression " + regex.regex());
}
}
}
} else {
// throw new Exception();
throw new PushClientException("no " + field.getName() + " field type");
}
}
}
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
return true;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axis2.datasource.jaxb;
import java.util.ArrayList;
import java.util.List;
import javax.xml.namespace.NamespaceContext;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* In some cases, we want to marshal an array of objects as a series
* of elements instead of a single element containing items.
*
* Unfortunately there is no way to tell the JAXB marshal method to
* marshal an array or List as a series of elements.
*
* Instead, we use the JAXB marshal method to output a single element
* with items and then filter the write events to transform it into
* a series of elements.
*
* Thus
* <myElement>
* <item>Hello</item>
* <item>World</item>
* </myElement>
* Becomes
* <myElement>Hello</myElement>
* <myElement>World</myElement>
*
* Special care is taken to ensure that namespace declarations are property preserved.
*/
public class XMLStreamWriterArrayFilter implements XMLStreamWriter {
private static final Log log = LogFactory.getLog(XMLStreamWriterArrayFilter.class);
XMLStreamWriter writer;
int depth = 0;
boolean isBuffering = true;
List<List<String>> bufferedCommands = new ArrayList<List<String>>();
// The extensive debug was used during development but is not necessary in production.
// Change DEBUG_ENABLED = log.isDebugEnabled();
// to get extensive debug output.
private static boolean DEBUG_ENABLED = false;
private static final String XSI_URI = "http://www.w3.org/2001/XMLSchema-instance";
public XMLStreamWriterArrayFilter(XMLStreamWriter writer)
throws XMLStreamException {
super();
if (DEBUG_ENABLED) {
log.debug("XMLStreamWriterArrayFilter " + writer);
}
this.writer = writer;
}
public void close() throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("close");
}
if (writer != null) {
writer.close();
}
}
public void flush() throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("flush");
}
if (writer != null) {
writer.flush();
}
}
public NamespaceContext getNamespaceContext() {
if (DEBUG_ENABLED) {
log.debug("getNamespaceContext");
}
return writer.getNamespaceContext();
}
public String getPrefix(String arg0) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("getPrefix " + arg0);
}
return writer.getPrefix(arg0);
}
public Object getProperty(String arg0) throws IllegalArgumentException {
if (DEBUG_ENABLED) {
log.debug("getProperty " + arg0);
}
return writer.getProperty(arg0);
}
public void setDefaultNamespace(String arg0) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("setDefaultNamespace " + arg0);
}
writer.setDefaultNamespace(arg0);
}
public void setNamespaceContext(NamespaceContext arg0) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("setNamespaceContext " + arg0);
}
writer.setNamespaceContext(arg0);
}
public void setPrefix(String arg0, String arg1) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("setPrefix " + arg0 + " " + arg1);
}
writer.setPrefix(arg0, arg1);
}
public void writeAttribute(String prefix, String uri, String localName, String value)
throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeAttribute " + prefix + " " + uri + " " + localName + " " + value);
}
writer.writeAttribute(prefix, uri, localName, value);
}
public void writeAttribute(String arg0, String arg1, String arg2)
throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeAttribute " + arg0 + " " + arg1 + " " + arg2 );
}
writer.writeAttribute(arg0, arg1, arg2);
}
public void writeAttribute(String arg0, String arg1) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeAttribute " + arg0 + " " + arg1);
}
writer.writeAttribute(arg0, arg1);
}
public void writeCData(String arg0) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeCData " + arg0 );
}
writer.writeCData(arg0);
}
public void writeCharacters(char[] arg0, int arg1, int arg2)
throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeCharacters " + arg0 + " " + arg1 + " " + arg2 );
}
writer.writeCharacters(arg0, arg1, arg2);
}
public void writeCharacters(String arg0) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeCharacters " + arg0);
}
writer.writeCharacters(arg0);
}
public void writeComment(String arg0) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeComment " + arg0 );
}
writer.writeComment(arg0);
}
public void writeDefaultNamespace(String uri) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeDefaultNamespace (" + uri + ")" );
}
if (isBuffering) {
if (DEBUG_ENABLED) {
log.debug(" Supress writeDefaultNamespace on top element");
}
} else if (depth >= 2) {
writer.writeDefaultNamespace(uri);
}
}
public void writeDTD(String arg0) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeDTD " + arg0 );
}
writer.writeDTD(arg0);
}
public void writeEmptyElement(String arg0, String arg1, String arg2)
throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeEmptyElement" + arg0 + " " + arg1 + " " + arg2 );
}
writeStartElement(arg0, arg1, arg2);
writeEndElement();
}
public void writeEmptyElement(String arg0, String arg1) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeEmptyElement " + arg0 + " " + arg1);
}
writeStartElement(arg0, arg1);
writeEndElement();
}
public void writeEmptyElement(String arg0) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeEmptyElement " + arg0 );
}
writeStartElement(arg0);
writeEndElement();
}
public void writeEndDocument() throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeEndDocument " );
}
writer.writeEndDocument();
}
public void writeEndElement() throws XMLStreamException {
depth--;
if (DEBUG_ENABLED) {
log.debug("writeEndElement " );
}
if (depth != 0) {
writer.writeEndElement();
} else {
if (DEBUG_ENABLED) {
log.debug(" Suppress writeEndElement for the top element..but performing a flush");
}
writer.flush();
}
}
public void writeEntityRef(String arg0) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeEntityRef " + arg0 );
}
writer.writeEntityRef(arg0);
}
public void writeNamespace(String prefix, String uri) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeNamespace (" + prefix + ") (" + uri + ")" );
}
// The namespaces are buffered while reading the root element.
// They will be written out by writeCommands when the child
// elements are encountered.
if (isBuffering) {
if ("".equals(prefix)) {
if (DEBUG_ENABLED) {
log.debug(" Supress default write namespace on top element to avoid collision");
}
} else {
List<String> command = new ArrayList<String>();
command.add("writeNamespace");
command.add(prefix);
command.add(uri);
bufferCommand(command);
}
} else if (depth == 2 && XSI_URI.equals(uri) && "xsi".equals(prefix)) {
// The operation element already has an xsi namespace declaration;
// thus this one is redundant and only makes the message larger.
if (DEBUG_ENABLED) {
log.debug(" Supressing xsi namespace declaration on array item");
}
} else if (depth >= 2) {
writer.writeNamespace(prefix, uri);
}
}
public void writeProcessingInstruction(String arg0, String arg1)
throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeProcessingInstruction " + arg0 + " " + arg1 );
}
writer.writeProcessingInstruction(arg0, arg1);
}
public void writeProcessingInstruction(String arg0) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeProcessingInstruction " + arg0);
}
writer.writeProcessingInstruction(arg0);
}
public void writeStartDocument() throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeStartDocument " );
}
writer.writeStartDocument();
}
public void writeStartDocument(String arg0, String arg1) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeStartDocument " + arg0 + " " + arg1 );
}
writer.writeStartDocument(arg0, arg1);
}
public void writeStartDocument(String arg0) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeStartDocument " + arg0 );
}
writer.writeStartDocument(arg0);
}
public void writeStartElement(String arg0, String arg1, String arg2)
throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeStartElement " + arg0 + " " + arg1 + " " + arg2 );
}
depth++;
if (depth > 1) {
isBuffering = false;
}
// The start element is buffered when we encounter the
// outermost element. The buffered event is written
// (instead of the child element tag).
if (isBuffering) {
List<String> command = new ArrayList<String>();
command.add("writeStartElement");
command.add(arg0);
command.add(arg1);
command.add(arg2);
bufferCommand(command);
} else if (depth == 2) {
writeCommands();
} else {
writer.writeStartElement(arg0, arg1, arg2);
}
}
public void writeStartElement(String arg0, String arg1) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeStartElement " + arg0 + " " + arg1);
}
depth++;
if (depth > 1) {
isBuffering = false;
}
// The start element is buffered when we encounter the
// outermost element. The buffered event is written
// (instead of the child element tag).
if (isBuffering) {
List<String> command = new ArrayList<String>();
command.add("writeStartElement");
command.add(arg0);
command.add(arg1);
bufferCommand(command);
} else if (depth == 2) {
writeCommands();
} else {
writer.writeStartElement(arg0, arg1);
}
}
public void writeStartElement(String arg0) throws XMLStreamException {
if (DEBUG_ENABLED) {
log.debug("writeStartElement " + arg0 );
}
depth++;
if (depth > 1) {
isBuffering = false;
}
// The start element is buffered when we encounter the
// outermost element. The buffered event is written
// (instead of the child element tag).
if (isBuffering) {
List<String> command = new ArrayList<String>();
command.add("writeStartElement");
command.add(arg0);
bufferCommand(command);
} else if (depth == 2) {
writeCommands();
} else {
writer.writeStartElement(arg0);
}
}
void bufferCommand(List<String> command) {
if (DEBUG_ENABLED) {
log.debug(" Buffering command " + command);
}
bufferedCommands.add(command);
}
void writeCommands() throws XMLStreamException {
for (int i=0;i<bufferedCommands.size(); i++) {
List<String> command = bufferedCommands.get(i);
String m = command.get(0);
if ("writeNamespace".equals(m)) {
if (DEBUG_ENABLED) {
log.debug(" Sending buffered writeNamespace " + command.get(1) +
" " + command.get(2));
}
writer.writeNamespace(command.get(1), command.get(2));
} else if ("writeStartElement".equals(m)) {
int len = command.size();
if (len == 2) {
if (DEBUG_ENABLED) {
log.debug(" Sending buffered writeStartElement " + command.get(1));
}
writer.writeStartElement(command.get(1));
} else if (len ==3) {
if (DEBUG_ENABLED) {
log.debug(" Sending buffered writeStartElement " + command.get(1) +
" " + command.get(2));
}
writer.writeStartElement(command.get(1), command.get(2));
} else if (len == 4) {
if (DEBUG_ENABLED) {
log.debug(" Sending buffered writeStartElement " + command.get(1) +
" " + command.get(2) + " " + command.get(3));
}
writer.writeStartElement(command.get(1), command.get(2), command.get(3));
}
}
}
}
}
| |
/*
* Copyright 2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.powermock.modules.test.junit4.rule.xstream;
import org.junit.Rule;
import org.junit.Test;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.rule.PowerMockRule;
import org.powermock.reflect.Whitebox;
import org.powermock.reflect.exceptions.ConstructorNotFoundException;
import samples.Service;
import samples.expectnew.ExpectNewDemo;
import samples.expectnew.ExpectNewServiceUser;
import samples.expectnew.VarArgsConstructorDemo;
import samples.newmocking.MyClass;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.mockito.Mockito.doNothing;
import static org.powermock.api.mockito.PowerMockito.mock;
import static org.powermock.api.mockito.PowerMockito.*;
import static org.powermock.api.mockito.PowerMockito.when;
import static org.powermock.api.support.membermodification.MemberMatcher.constructor;
/**
* Test class to demonstrate new instance mocking using whenConstructionOf(..).
*
*/
@PrepareForTest({ MyClass.class, ExpectNewDemo.class, DataInputStream.class })
public class WhenNewTest {
@Rule
public PowerMockRule powerMockRule = new PowerMockRule();
@Test
public void testNewWithCheckedException() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
final String expectedFailMessage = "testing checked exception";
whenNew(MyClass.class).withNoArguments().thenThrow(new IOException(expectedFailMessage));
try {
tested.throwExceptionAndWrapInRunTimeWhenInvoction();
fail("Should throw a checked Exception!");
} catch (RuntimeException e) {
assertTrue(e.getCause() instanceof IOException);
assertEquals(expectedFailMessage, e.getMessage());
}
verifyNew(MyClass.class).withNoArguments();
}
@Test
public void testGetMessage() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock);
String expected = "Hello altered World";
when(myClassMock.getMessage()).thenReturn("Hello altered World");
String actual = tested.getMessage();
verify(myClassMock).getMessage();
verifyNew(MyClass.class).withNoArguments();
assertEquals("Expected and actual did not match", expected, actual);
}
@Test
public void testGetMessageWithArgument() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock);
String expected = "Hello altered World";
when(myClassMock.getMessage("test")).thenReturn("Hello altered World");
String actual = tested.getMessageWithArgument();
verify(myClassMock).getMessage("test");
verifyNew(MyClass.class).withNoArguments();
assertEquals("Expected and actual did not match", expected, actual);
}
@Test
public void testInvokeVoidMethod() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock);
doNothing().when(myClassMock).voidMethod();
tested.invokeVoidMethod();
verify(myClassMock).voidMethod();
verifyNew(MyClass.class).withNoArguments();
}
@Test
public void testNewWithRuntimeException() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
final String expectedFailMessage = "testing";
whenNew(MyClass.class).withNoArguments().thenThrow(new RuntimeException(expectedFailMessage));
try {
tested.throwExceptionWhenInvoction();
fail("Should throw RuntimeException!");
} catch (RuntimeException e) {
assertEquals(expectedFailMessage, e.getMessage());
}
verifyNew(MyClass.class).withNoArguments();
}
@Test
public void testMultipleNew() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock);
when(myClassMock.getMessage()).thenReturn("Hello");
final String actual = tested.multipleNew();
verify(myClassMock, times(2)).getMessage();
verifyNew(MyClass.class, times(2)).withNoArguments();
assertEquals("HelloHello", actual);
}
@Test
public void testSimpleMultipleNew() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock);
tested.simpleMultipleNew();
verifyNew(MyClass.class, times(3)).withNoArguments();
}
@Test
public void testSimpleMultipleNew_tooManyTimesExpected() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock);
tested.simpleMultipleNew();
try {
verifyNew(MyClass.class, times(4)).withNoArguments();
fail("Should throw AssertionError.");
} catch (AssertionError e) {
assertEquals("samples.newmocking.MyClass();\nWanted 4 times but was 3 times.", e.getMessage());
}
}
@Test
public void testSimpleMultipleNew_tooFewTimesExpected() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock1 = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock1);
tested.simpleMultipleNew();
try {
verifyNew(MyClass.class, times(1)).withNoArguments();
fail("Should throw AssertionError.");
} catch (AssertionError e) {
assertEquals("samples.newmocking.MyClass();\nWanted 1 time but was 3 times.", e.getMessage());
}
}
/**
* Verifies that the issue
* http://code.google.com/p/powermock/issues/detail?id=10 is solved.
*/
@Test
public void testSimpleMultipleNewPrivate_tooFewTimesExpected() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock1 = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock1);
Whitebox.invokeMethod(tested, "simpleMultipleNewPrivate");
try {
verifyNew(MyClass.class, times(2)).withNoArguments();
fail("Should throw AssertionError.");
} catch (AssertionError e) {
assertEquals("samples.newmocking.MyClass();\nWanted 2 times but was 3 times.", e.getMessage());
}
}
@Test
public void testSimpleMultipleNewPrivate_ok() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock1 = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock1);
Whitebox.invokeMethod(tested, "simpleMultipleNewPrivate");
verifyNew(MyClass.class, times(3)).withNoArguments();
}
@Test
public void testSimpleSingleNew_withOnce() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock1 = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock1);
tested.simpleSingleNew();
verifyNew(MyClass.class).withNoArguments();
}
@Test
public void testSimpleSingleNew_withAtLeastOnce() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock1 = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock1);
tested.simpleSingleNew();
verifyNew(MyClass.class, atLeastOnce()).withNoArguments();
}
@Test
public void testSimpleMultipleNew_withAtLeastOnce() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock1 = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock1);
tested.simpleMultipleNew();
verifyNew(MyClass.class, atLeastOnce()).withNoArguments();
}
//
@Test
public void testAlternativeFlow() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
whenNew(DataInputStream.class).withArguments(null).thenThrow(new RuntimeException("error"));
InputStream stream = tested.alternativePath();
verifyNew(DataInputStream.class).withArguments(null);
assertNotNull("The returned inputstream should not be null.", stream);
assertTrue("The returned inputstream should be an instance of ByteArrayInputStream.",
stream instanceof ByteArrayInputStream);
}
@Test
public void testSimpleMultipleNewPrivate_tooManyTimesExpected() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock1 = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock1);
Whitebox.invokeMethod(tested, "simpleMultipleNewPrivate");
try {
verifyNew(MyClass.class, times(4)).withNoArguments();
fail("Should throw an exception!.");
} catch (AssertionError e) {
assertEquals("samples.newmocking.MyClass();\nWanted 4 times but was 3 times.", e.getMessage());
}
}
@Test
public void testNewWithArguments() throws Exception {
final int numberOfTimes = 2;
final String expected = "used";
ExpectNewDemo tested = new ExpectNewDemo();
ExpectNewServiceUser expectNewServiceImplMock = mock(ExpectNewServiceUser.class);
Service serviceMock = mock(Service.class);
whenNew(ExpectNewServiceUser.class).withArguments(serviceMock, numberOfTimes).thenReturn(
expectNewServiceImplMock);
when(expectNewServiceImplMock.useService()).thenReturn(expected);
assertEquals(expected, tested.newWithArguments(serviceMock, numberOfTimes));
verifyNew(ExpectNewServiceUser.class).withArguments(serviceMock, numberOfTimes);
}
@Test
public void testNewWithParameterTypesAndArguments() throws Exception {
final int numberOfTimes = 2;
final String expected = "used";
ExpectNewDemo tested = new ExpectNewDemo();
ExpectNewServiceUser expectNewServiceImplMock = mock(ExpectNewServiceUser.class);
Service serviceMock = mock(Service.class);
whenNew(ExpectNewServiceUser.class).withParameterTypes(Service.class, int.class)
.withArguments(serviceMock, numberOfTimes).thenReturn(expectNewServiceImplMock);
when(expectNewServiceImplMock.useService()).thenReturn(expected);
assertEquals(expected, tested.newWithArguments(serviceMock, numberOfTimes));
verifyNew(ExpectNewServiceUser.class).withArguments(serviceMock, numberOfTimes);
}
@Test
public void testNewWithConstructorUsingParameterTypesAndArguments() throws Exception {
final int numberOfTimes = 2;
final String expected = "used";
ExpectNewDemo tested = new ExpectNewDemo();
ExpectNewServiceUser expectNewServiceImplMock = mock(ExpectNewServiceUser.class);
Service serviceMock = mock(Service.class);
whenNew(constructor(ExpectNewServiceUser.class, Service.class, int.class)).withArguments(serviceMock,
numberOfTimes).thenReturn(expectNewServiceImplMock);
when(expectNewServiceImplMock.useService()).thenReturn(expected);
assertEquals(expected, tested.newWithArguments(serviceMock, numberOfTimes));
verifyNew(ExpectNewServiceUser.class).withArguments(serviceMock, numberOfTimes);
}
@Test
public void testNewUsingConstructorWithArguments() throws Exception {
final int numberOfTimes = 2;
final String expected = "used";
ExpectNewDemo tested = new ExpectNewDemo();
ExpectNewServiceUser expectNewServiceImplMock = mock(ExpectNewServiceUser.class);
Service serviceMock = mock(Service.class);
whenNew(constructor(ExpectNewServiceUser.class)).withArguments(serviceMock, numberOfTimes).thenReturn(
expectNewServiceImplMock);
when(expectNewServiceImplMock.useService()).thenReturn(expected);
assertEquals(expected, tested.newWithArguments(serviceMock, numberOfTimes));
verifyNew(ExpectNewServiceUser.class).withArguments(serviceMock, numberOfTimes);
}
@Test
public void testNewWithVarArgs() throws Exception {
final String firstString = "hello";
final String secondString = "world";
ExpectNewDemo tested = new ExpectNewDemo();
VarArgsConstructorDemo varArgsConstructorDemoMock = mock(VarArgsConstructorDemo.class);
whenNew(VarArgsConstructorDemo.class).withArguments(firstString, secondString).thenReturn(
varArgsConstructorDemoMock);
when(varArgsConstructorDemoMock.getAllMessages()).thenReturn(new String[] { firstString, secondString });
String[] varArgs = tested.newVarArgs(firstString, secondString);
assertEquals(2, varArgs.length);
assertEquals(firstString, varArgs[0]);
assertEquals(secondString, varArgs[1]);
verifyNew(VarArgsConstructorDemo.class).withArguments(firstString, secondString);
}
@Test
public void testNewWhenTheExpectedConstructorIsNotFound() throws Exception {
final Object object = new Object();
try {
whenNew(VarArgsConstructorDemo.class).withArguments(object);
fail("Should throw ConstructorNotFoundException!");
} catch (ConstructorNotFoundException e) {
assertEquals("No constructor found in class '" + VarArgsConstructorDemo.class.getName()
+ "' with parameter types: [ " + object.getClass().getName() + " ].", e.getMessage());
}
}
@Test
public void testNewWithVarArgsConstructorWhenOneArgumentIsOfASubType() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
Service serviceMock = mock(Service.class);
VarArgsConstructorDemo varArgsConstructorDemoMock = mock(VarArgsConstructorDemo.class);
final Service serviceSubTypeInstance = new Service() {
public String getServiceMessage() {
return "message";
}
};
whenNew(VarArgsConstructorDemo.class).withArguments(serviceSubTypeInstance, serviceMock).thenReturn(
varArgsConstructorDemoMock);
when(varArgsConstructorDemoMock.getAllServices()).thenReturn(new Service[] { serviceMock });
Service[] varArgs = tested.newVarArgs(serviceSubTypeInstance, serviceMock);
assertEquals(1, varArgs.length);
assertSame(serviceMock, varArgs[0]);
verifyNew(VarArgsConstructorDemo.class).withArguments(serviceSubTypeInstance, serviceMock);
}
@Test
public void testNewWithArrayVarArgs() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
VarArgsConstructorDemo varArgsConstructorDemoMock = mock(VarArgsConstructorDemo.class);
final byte[] byteArrayOne = new byte[] { 42 };
final byte[] byteArrayTwo = new byte[] { 17 };
whenNew(VarArgsConstructorDemo.class).withArguments(byteArrayOne, byteArrayTwo).thenReturn(
varArgsConstructorDemoMock);
when(varArgsConstructorDemoMock.getByteArrays()).thenReturn(new byte[][] { byteArrayOne });
byte[][] varArgs = tested.newVarArgs(byteArrayOne, byteArrayTwo);
assertEquals(1, varArgs.length);
assertSame(byteArrayOne, varArgs[0]);
verifyNew(VarArgsConstructorDemo.class).withArguments(byteArrayOne, byteArrayTwo);
}
@Test
public void testNewWithArrayVarArgsAndMatchers() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
VarArgsConstructorDemo varArgsConstructorDemoMock = mock(VarArgsConstructorDemo.class);
final byte[] byteArrayOne = new byte[] { 42 };
final byte[] byteArrayTwo = new byte[] { 17 };
whenNew(VarArgsConstructorDemo.class).withArguments(byteArrayOne, byteArrayTwo).thenReturn(
varArgsConstructorDemoMock);
when(varArgsConstructorDemoMock.getByteArrays()).thenReturn(new byte[][] { byteArrayOne });
byte[][] varArgs = tested.newVarArgsWithMatchers();
assertEquals(1, varArgs.length);
assertSame(byteArrayOne, varArgs[0]);
verifyNew(VarArgsConstructorDemo.class).withArguments(byteArrayOne, byteArrayTwo);
}
@Test
public void testNewWithArrayVarArgsWhenFirstArgumentIsNullAndSubseqentArgumentsAreNotNull() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
VarArgsConstructorDemo varArgsConstructorDemoMock = mock(VarArgsConstructorDemo.class);
final byte[] byteArrayOne = null;
final byte[] byteArrayTwo = new byte[] { 17 };
whenNew(VarArgsConstructorDemo.class).withArguments(byteArrayOne, byteArrayTwo).thenReturn(
varArgsConstructorDemoMock);
when(varArgsConstructorDemoMock.getByteArrays()).thenReturn(new byte[][] { byteArrayTwo });
byte[][] varArgs = tested.newVarArgs(byteArrayOne, byteArrayTwo);
assertEquals(1, varArgs.length);
assertSame(byteArrayTwo, varArgs[0]);
verifyNew(VarArgsConstructorDemo.class).withArguments(byteArrayOne, byteArrayTwo);
}
@Test
public void testNewWithArrayVarArgsWhenFirstArgumentIsNotNullButSubseqentArgumentsAreNull() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
VarArgsConstructorDemo varArgsConstructorDemoMock = mock(VarArgsConstructorDemo.class);
final byte[] byteArrayOne = new byte[] { 42 };
final byte[] byteArrayTwo = null;
whenNew(VarArgsConstructorDemo.class).withArguments(byteArrayOne, byteArrayTwo).thenReturn(
varArgsConstructorDemoMock);
when(varArgsConstructorDemoMock.getByteArrays()).thenReturn(new byte[][] { byteArrayOne });
byte[][] varArgs = tested.newVarArgs(byteArrayOne, byteArrayTwo);
assertEquals(1, varArgs.length);
assertSame(byteArrayOne, varArgs[0]);
verifyNew(VarArgsConstructorDemo.class).withArguments(byteArrayOne, byteArrayTwo);
}
@Test
public void testNewWithArrayVarArgsWhenFirstArgumentIsNullSecondArgumentIsNotNullAndThirdArgumentIsNull()
throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
VarArgsConstructorDemo varArgsConstructorDemoMock = mock(VarArgsConstructorDemo.class);
final byte[] byteArrayOne = null;
final byte[] byteArrayTwo = new byte[] { 42 };
final byte[] byteArrayThree = null;
whenNew(VarArgsConstructorDemo.class).withArguments(byteArrayOne, byteArrayTwo, byteArrayThree).thenReturn(
varArgsConstructorDemoMock);
when(varArgsConstructorDemoMock.getByteArrays()).thenReturn(new byte[][] { byteArrayTwo });
byte[][] varArgs = tested.newVarArgs(byteArrayOne, byteArrayTwo, byteArrayThree);
assertEquals(1, varArgs.length);
assertSame(byteArrayTwo, varArgs[0]);
verifyNew(VarArgsConstructorDemo.class).withArguments(byteArrayOne, byteArrayTwo, byteArrayThree);
}
@Test
public void testNewWithArrayVarArgsWhenAllArgumentsAreNull() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
VarArgsConstructorDemo varArgsConstructorDemoMock = mock(VarArgsConstructorDemo.class);
final byte[] byteArrayOne = null;
final byte[] byteArrayTwo = null;
whenNew(VarArgsConstructorDemo.class).withArguments(byteArrayOne, byteArrayTwo).thenReturn(
varArgsConstructorDemoMock);
when(varArgsConstructorDemoMock.getByteArrays()).thenReturn(new byte[][] { byteArrayTwo });
byte[][] varArgs = tested.newVarArgs(byteArrayOne, byteArrayTwo);
assertEquals(1, varArgs.length);
assertSame(byteArrayTwo, varArgs[0]);
verifyNew(VarArgsConstructorDemo.class).withArguments(byteArrayOne, byteArrayTwo);
}
@Test(expected = NullPointerException.class)
public void testNewWithWrongArgument() throws Exception {
final int numberOfTimes = 2;
final String expected = "used";
ExpectNewDemo tested = new ExpectNewDemo();
ExpectNewServiceUser expectNewServiceImplMock = mock(ExpectNewServiceUser.class);
Service serviceMock = mock(Service.class);
whenNew(ExpectNewServiceUser.class).withArguments(serviceMock, numberOfTimes).thenReturn(
expectNewServiceImplMock);
when(expectNewServiceImplMock.useService()).thenReturn(expected);
assertEquals(expected, tested.newWithWrongArguments(serviceMock, numberOfTimes));
verifyNew(ExpectNewServiceUser.class).withArguments(serviceMock, numberOfTimes);
/*
* Should throw NPE because the default behavior of Mockito when a
* something isn't expected is to return a default value. In this case
* whenConstructionOf
* (ExpectNewServiceUser.class).withArguments(serviceMock,
* numberOfTimes) is the wrong expectation and thus null is returned
* from the substitute mock which is the correct behavior.
*/
fail("Should throw NPE!");
}
@Test
public void testExpectNewButNoNewCallWasMade() throws Exception {
ExpectNewDemo tested = new ExpectNewDemo();
MyClass myClassMock1 = mock(MyClass.class);
whenNew(MyClass.class).withNoArguments().thenReturn(myClassMock1);
tested.makeDate();
try {
verifyNew(MyClass.class).withNoArguments();
fail("Should throw AssertionError!");
} catch (AssertionError e) {
assertEquals(
"Wanted but not invoked samples.newmocking.MyClass();\nActually, there were zero interactions with this mock.",
e.getMessage());
}
}
}
| |
package ev3dev.sensors;
import ev3dev.hardware.EV3DevFileSystem;
import ev3dev.hardware.EV3DevPlatform;
import fake_ev3dev.ev3dev.sensors.FakeBattery;
import fake_ev3dev.ev3dev.sensors.FakeLegoSensor;
import lejos.hardware.port.SensorPort;
import lejos.hardware.sensor.SensorMode;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.File;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
public class BaseSensorTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Before
public void resetTest() throws Exception {
FakeBattery.resetEV3DevInfrastructure();
System.setProperty(EV3DevFileSystem.EV3DEV_TESTING_KEY, FakeBattery.EV3DEV_FAKE_SYSTEM_PATH);
}
@Test
public void getModeOkWithStringTest() throws Exception {
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
assertThat(baseSensor.getMode("Test").getName(), is("Test"));
}
@Test
public void getModeOkWithIntegerTest() throws Exception {
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
assertThat(baseSensor.getMode(0).getName(), is("Test"));
}
@Test
public void getModeKoWithStringTest() throws Exception {
thrown.expect(IllegalArgumentException.class);
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
baseSensor.getMode("NoTest");
}
@Test
public void getModeKoWithIntegerTest() throws Exception {
thrown.expect(IllegalArgumentException.class);
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
baseSensor.getMode(-1);
}
@Test
public void getModeKoWithIntegerTest2() throws Exception {
thrown.expect(IllegalArgumentException.class);
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
baseSensor.getMode(2);
}
@Test
public void setModeTest() throws Exception {
System.setProperty(EV3DevFileSystem.EV3DEV_TESTING_KEY, FakeBattery.EV3DEV_FAKE_SYSTEM_PATH);
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
baseSensor.setCurrentMode("Test");
assertThat(baseSensor.getCurrentMode(), is(0));
}
@Test
public void setModeIntegerTest() throws Exception {
System.setProperty(EV3DevFileSystem.EV3DEV_TESTING_KEY, FakeBattery.EV3DEV_FAKE_SYSTEM_PATH);
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
baseSensor.setCurrentMode(0);
assertThat(baseSensor.getCurrentMode(), is(0));
}
@Test
public void setModeIntegerKoTest() throws Exception {
thrown.expect(IllegalArgumentException.class);
System.setProperty(EV3DevFileSystem.EV3DEV_TESTING_KEY, FakeBattery.EV3DEV_FAKE_SYSTEM_PATH);
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
baseSensor.setCurrentMode(99);
}
@Test
public void setModeCountTest() throws Exception {
System.setProperty(EV3DevFileSystem.EV3DEV_TESTING_KEY, FakeBattery.EV3DEV_FAKE_SYSTEM_PATH);
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
assertThat(baseSensor.getModeCount(), is(1));
}
@Test
public void setModeKoTest() throws Exception {
thrown.expect(IllegalArgumentException.class);
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
baseSensor.setCurrentMode("TestKo");
}
@Test
public void setNameTest() throws Exception {
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
baseSensor.setCurrentMode("Test");
assertThat(baseSensor.getName(), is("Test"));
}
@Test
public void getSampleSizeTest() throws Exception {
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
baseSensor.setCurrentMode("Test");
assertThat(baseSensor.sampleSize(), is(1));
}
@Test
public void geFetchSampleTest() throws Exception {
final FakeBattery fakeBattery = new FakeBattery(EV3DevPlatform.EV3BRICK);
final FakeLegoSensor fakeLegoSensor = new FakeLegoSensor(EV3DevPlatform.EV3BRICK);
BaseSensor baseSensor = new BaseSensor(SensorPort.S1, "");
baseSensor.setModes(new SensorMode[] {new ModeTestMode(new File(""))});
baseSensor.setCurrentMode("Test");
float [] sample = new float[baseSensor.sampleSize()];
baseSensor.fetchSample(sample, 0);
assertThat(sample[0], is(10f));
}
//Sensor Mode created for the tests
private class ModeTestMode implements SensorMode {
private static final String MODE = "TEST-MODE";
private final File pathDevice;
public ModeTestMode(final File pathDevice) {
this.pathDevice = pathDevice;
}
@Override
public int sampleSize() {
return 1;
}
@Override
public void fetchSample(float[] sample, int offset) {
sample[offset] = 10f;
}
@Override
public String getName() {
return "Test";
}
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertThat;
import com.facebook.buck.cli.BuildTargetNodeToBuildRuleTransformer;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.CommandTool;
import com.facebook.buck.rules.FakeBuildRuleParamsBuilder;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.rules.HashedFileTool;
import com.facebook.buck.rules.PathSourcePath;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.rules.RuleKeyBuilder;
import com.facebook.buck.rules.RuleKeyBuilderFactory;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.rules.args.RuleKeyAppendableFunction;
import com.facebook.buck.rules.coercer.FrameworkPath;
import com.facebook.buck.rules.keys.DefaultRuleKeyBuilderFactory;
import com.facebook.buck.testutil.FakeFileHashCache;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
public class CxxPreprocessAndCompileTest {
private static class PreprocessorWithColorSupport
extends DefaultPreprocessor {
static final String COLOR_FLAG = "-use-color-in-preprocessor";
public PreprocessorWithColorSupport(Tool tool) {
super(tool);
}
@Override
public Optional<ImmutableList<String>> getFlagsForColorDiagnostics() {
return Optional.of(ImmutableList.of(COLOR_FLAG));
}
}
private static class CompilerWithColorSupport
extends DefaultCompiler {
static final String COLOR_FLAG = "-use-color-in-compiler";
public CompilerWithColorSupport(Tool tool) {
super(tool);
}
@Override
public Optional<ImmutableList<String>> getFlagsForColorDiagnostics() {
return Optional.of(ImmutableList.of(COLOR_FLAG));
}
}
private static final Preprocessor DEFAULT_PREPROCESSOR =
new DefaultPreprocessor(new HashedFileTool(Paths.get("preprocessor")));
private static final Compiler DEFAULT_COMPILER =
new DefaultCompiler(new HashedFileTool(Paths.get("compiler")));
private static final Preprocessor PREPROCESSOR_WITH_COLOR_SUPPORT =
new PreprocessorWithColorSupport(new HashedFileTool(Paths.get("preprocessor")));
private static final Compiler COMPILER_WITH_COLOR_SUPPORT =
new CompilerWithColorSupport(new HashedFileTool(Paths.get("compiler")));
private static final CxxToolFlags DEFAULT_TOOL_FLAGS = CxxToolFlags.explicitBuilder()
.addPlatformFlags("-fsanitize=address")
.addRuleFlags("-O3")
.build();
private static final CxxToolFlags DEFAULT_PREPROCESSOR_FLAGS = CxxToolFlags.explicitBuilder()
.addRuleFlags("-DTEST")
.build();
private static final Path DEFAULT_OUTPUT = Paths.get("test.o");
private static final SourcePath DEFAULT_INPUT = new FakeSourcePath("test.cpp");
private static final CxxSource.Type DEFAULT_INPUT_TYPE = CxxSource.Type.CXX;
private static final ImmutableList<CxxHeaders> DEFAULT_INCLUDES =
ImmutableList.of(
CxxHeaders.builder()
.putNameToPathMap(Paths.get("test.h"), new FakeSourcePath("foo/test.h"))
.build());
private static final ImmutableSet<Path> DEFAULT_INCLUDE_ROOTS = ImmutableSet.of(
Paths.get("foo/bar"),
Paths.get("test"));
private static final ImmutableSet<Path> DEFAULT_SYSTEM_INCLUDE_ROOTS = ImmutableSet.of(
Paths.get("/usr/include"),
Paths.get("/include"));
private static final ImmutableSet<Path> DEFAULT_HEADER_MAPS = ImmutableSet.of(
Paths.get("some/thing.hmap"),
Paths.get("another/file.hmap"));
private static final ImmutableSet<FrameworkPath> DEFAULT_FRAMEWORK_ROOTS = ImmutableSet.of();
private static final DebugPathSanitizer DEFAULT_SANITIZER =
CxxPlatforms.DEFAULT_DEBUG_PATH_SANITIZER;
private static final Path DEFAULT_WORKING_DIR = Paths.get(System.getProperty("user.dir"));
private static final
RuleKeyAppendableFunction<FrameworkPath, Path> DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION =
new RuleKeyAppendableFunction<FrameworkPath, Path>() {
@Override
public RuleKeyBuilder appendToRuleKey(RuleKeyBuilder builder) {
return builder;
}
@Override
public Path apply(FrameworkPath input) {
return Paths.get("test", "framework", "path", input.toString());
}
};
@Test
public void inputChangesCauseRuleKeyChangesForCompilation() {
SourcePathResolver pathResolver =
new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()));
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
FakeFileHashCache hashCache = FakeFileHashCache.createFromStrings(
ImmutableMap.<String, String>builder()
.put("preprocessor", Strings.repeat("a", 40))
.put("compiler", Strings.repeat("a", 40))
.put("test.o", Strings.repeat("b", 40))
.put("test.cpp", Strings.repeat("c", 40))
.put("different", Strings.repeat("d", 40))
.put("foo/test.h", Strings.repeat("e", 40))
.put("path/to/a/plugin.so", Strings.repeat("f", 40))
.put("path/to/a/different/plugin.so", Strings.repeat("a0", 40))
.build());
// Generate a rule key for the defaults.
RuleKey defaultRuleKey = new DefaultRuleKeyBuilderFactory(hashCache, pathResolver).build(
CxxPreprocessAndCompile.compile(
params,
pathResolver,
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
DEFAULT_TOOL_FLAGS),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER));
// Verify that changing the compiler causes a rulekey change.
RuleKey compilerChange = new DefaultRuleKeyBuilderFactory(hashCache, pathResolver).build(
CxxPreprocessAndCompile.compile(
params,
pathResolver,
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
new DefaultCompiler(new HashedFileTool(Paths.get("different"))),
DEFAULT_TOOL_FLAGS),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER));
assertNotEquals(defaultRuleKey, compilerChange);
// Verify that changing the operation causes a rulekey change.
RuleKey operationChange = new DefaultRuleKeyBuilderFactory(hashCache, pathResolver).build(
CxxPreprocessAndCompile.preprocess(
params,
pathResolver,
new PreprocessorDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_WORKING_DIR,
DEFAULT_PREPROCESSOR,
DEFAULT_TOOL_FLAGS,
DEFAULT_INCLUDE_ROOTS,
DEFAULT_SYSTEM_INCLUDE_ROOTS,
DEFAULT_HEADER_MAPS,
DEFAULT_FRAMEWORK_ROOTS,
DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION,
Optional.<SourcePath>absent(),
DEFAULT_INCLUDES),
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
DEFAULT_TOOL_FLAGS),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER));
assertNotEquals(defaultRuleKey, operationChange);
// Verify that changing the platform flags causes a rulekey change.
RuleKey platformFlagsChange = new DefaultRuleKeyBuilderFactory(hashCache, pathResolver).build(
CxxPreprocessAndCompile.compile(
params,
pathResolver,
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
CxxToolFlags.explicitBuilder()
.addPlatformFlags("-different")
.setRuleFlags(DEFAULT_TOOL_FLAGS.getRuleFlags())
.build()),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER));
assertNotEquals(defaultRuleKey, platformFlagsChange);
// Verify that changing the rule flags causes a rulekey change.
RuleKey ruleFlagsChange = new DefaultRuleKeyBuilderFactory(hashCache, pathResolver).build(
CxxPreprocessAndCompile.compile(
params,
pathResolver,
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
CxxToolFlags.explicitBuilder()
.setPlatformFlags(DEFAULT_TOOL_FLAGS.getPlatformFlags())
.addRuleFlags("-other", "flags")
.build()),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER));
assertNotEquals(defaultRuleKey, ruleFlagsChange);
// Verify that changing the input causes a rulekey change.
RuleKey inputChange = new DefaultRuleKeyBuilderFactory(hashCache, pathResolver).build(
CxxPreprocessAndCompile.compile(
params,
pathResolver,
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
DEFAULT_TOOL_FLAGS),
DEFAULT_OUTPUT,
new FakeSourcePath("different"),
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER));
assertNotEquals(defaultRuleKey, inputChange);
}
@Test
public void inputChangesCauseRuleKeyChangesForPreprocessing() {
SourcePathResolver pathResolver =
new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()));
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
FakeFileHashCache hashCache = FakeFileHashCache.createFromStrings(
ImmutableMap.<String, String>builder()
.put("preprocessor", Strings.repeat("a", 40))
.put("compiler", Strings.repeat("a", 40))
.put("test.o", Strings.repeat("b", 40))
.put("test.cpp", Strings.repeat("c", 40))
.put("different", Strings.repeat("d", 40))
.put("foo/test.h", Strings.repeat("e", 40))
.put("path/to/a/plugin.so", Strings.repeat("f", 40))
.put("path/to/a/different/plugin.so", Strings.repeat("a0", 40))
.build());
// Generate a rule key for the defaults.
RuleKey defaultRuleKey = new DefaultRuleKeyBuilderFactory(hashCache, pathResolver).build(
CxxPreprocessAndCompile.preprocess(
params,
pathResolver,
new PreprocessorDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_WORKING_DIR,
DEFAULT_PREPROCESSOR,
DEFAULT_PREPROCESSOR_FLAGS,
DEFAULT_INCLUDE_ROOTS,
DEFAULT_SYSTEM_INCLUDE_ROOTS,
DEFAULT_HEADER_MAPS,
DEFAULT_FRAMEWORK_ROOTS,
DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION,
Optional.<SourcePath>absent(),
DEFAULT_INCLUDES),
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
CxxToolFlags.of()),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER));
// Verify that changing the includes does *not* cause a rulekey change, since we use a
// different mechanism to track header changes.
RuleKey includesChange = new DefaultRuleKeyBuilderFactory(hashCache, pathResolver).build(
CxxPreprocessAndCompile.preprocess(
params,
pathResolver,
new PreprocessorDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_WORKING_DIR,
DEFAULT_PREPROCESSOR,
DEFAULT_PREPROCESSOR_FLAGS,
ImmutableSet.of(Paths.get("different")),
DEFAULT_SYSTEM_INCLUDE_ROOTS,
DEFAULT_HEADER_MAPS,
DEFAULT_FRAMEWORK_ROOTS,
DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION,
Optional.<SourcePath>absent(),
DEFAULT_INCLUDES),
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
CxxToolFlags.of()),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER));
assertEquals(defaultRuleKey, includesChange);
// Verify that changing the system includes does *not* cause a rulekey change, since we use a
// different mechanism to track header changes.
RuleKey systemIncludesChange = new DefaultRuleKeyBuilderFactory(hashCache, pathResolver).build(
CxxPreprocessAndCompile.preprocess(
params,
pathResolver,
new PreprocessorDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_WORKING_DIR,
DEFAULT_PREPROCESSOR,
DEFAULT_PREPROCESSOR_FLAGS,
DEFAULT_INCLUDE_ROOTS,
ImmutableSet.of(Paths.get("different")),
DEFAULT_HEADER_MAPS,
DEFAULT_FRAMEWORK_ROOTS,
DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION,
Optional.<SourcePath>absent(),
DEFAULT_INCLUDES),
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
CxxToolFlags.of()),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER));
assertEquals(defaultRuleKey, systemIncludesChange);
// Verify that changing the header maps does *not* cause a rulekey change, since we use a
// different mechanism to track header changes.
RuleKey headerMapsChange = new DefaultRuleKeyBuilderFactory(hashCache, pathResolver).build(
CxxPreprocessAndCompile.preprocess(
params,
pathResolver,
new PreprocessorDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_WORKING_DIR,
DEFAULT_PREPROCESSOR,
DEFAULT_PREPROCESSOR_FLAGS,
DEFAULT_INCLUDE_ROOTS,
DEFAULT_SYSTEM_INCLUDE_ROOTS,
ImmutableSet.of(Paths.get("different")),
DEFAULT_FRAMEWORK_ROOTS,
DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION,
Optional.<SourcePath>absent(),
DEFAULT_INCLUDES),
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
CxxToolFlags.of()),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER));
assertEquals(defaultRuleKey, headerMapsChange);
// Verify that changing the framework roots causes a rulekey change.
RuleKey frameworkRootsChange = new DefaultRuleKeyBuilderFactory(hashCache, pathResolver).build(
CxxPreprocessAndCompile.preprocess(
params,
pathResolver,
new PreprocessorDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_WORKING_DIR,
DEFAULT_PREPROCESSOR,
DEFAULT_PREPROCESSOR_FLAGS,
DEFAULT_INCLUDE_ROOTS,
DEFAULT_SYSTEM_INCLUDE_ROOTS,
DEFAULT_HEADER_MAPS,
ImmutableSet.of(FrameworkPath.ofSourcePath(new FakeSourcePath("different"))),
DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION,
Optional.<SourcePath>absent(),
DEFAULT_INCLUDES),
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
CxxToolFlags.of()),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER));
assertNotEquals(defaultRuleKey, frameworkRootsChange);
}
@Test
public void sanitizedPathsInFlagsDoNotAffectRuleKey() {
SourcePathResolver pathResolver =
new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()));
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
RuleKeyBuilderFactory ruleKeyBuilderFactory =
new DefaultRuleKeyBuilderFactory(
FakeFileHashCache.createFromStrings(
ImmutableMap.<String, String>builder()
.put("preprocessor", Strings.repeat("a", 40))
.put("compiler", Strings.repeat("a", 40))
.put("test.o", Strings.repeat("b", 40))
.put("test.cpp", Strings.repeat("c", 40))
.put("different", Strings.repeat("d", 40))
.put("foo/test.h", Strings.repeat("e", 40))
.put("path/to/a/plugin.so", Strings.repeat("f", 40))
.put("path/to/a/different/plugin.so", Strings.repeat("a0", 40))
.build()),
pathResolver);
// Set up a map to sanitize the differences in the flags.
int pathSize = 10;
DebugPathSanitizer sanitizer1 = new DebugPathSanitizer(
pathSize,
File.separatorChar,
Paths.get("PWD"),
ImmutableBiMap.of(Paths.get("something"), Paths.get("A")));
DebugPathSanitizer sanitizer2 = new DebugPathSanitizer(
pathSize,
File.separatorChar,
Paths.get("PWD"),
ImmutableBiMap.of(Paths.get("different"), Paths.get("A")));
// Generate a rule key for the defaults.
CxxToolFlags flags1 = CxxToolFlags.explicitBuilder()
.addPlatformFlags("-Isomething/foo")
.addRuleFlags("-Isomething/bar")
.build();
RuleKey ruleKey1 = ruleKeyBuilderFactory.build(
CxxPreprocessAndCompile.preprocess(
params,
pathResolver,
new PreprocessorDelegate(
pathResolver,
sanitizer1,
DEFAULT_WORKING_DIR,
DEFAULT_PREPROCESSOR,
flags1,
DEFAULT_INCLUDE_ROOTS,
DEFAULT_SYSTEM_INCLUDE_ROOTS,
DEFAULT_HEADER_MAPS,
DEFAULT_FRAMEWORK_ROOTS,
DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION,
Optional.<SourcePath>absent(),
DEFAULT_INCLUDES),
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
CxxToolFlags.of()),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
sanitizer1));
// Generate a rule key for the defaults.
CxxToolFlags flags2 = CxxToolFlags.explicitBuilder()
.addPlatformFlags("-Idifferent/foo")
.addRuleFlags("-Idifferent/bar")
.build();
RuleKey ruleKey2 = ruleKeyBuilderFactory.build(
CxxPreprocessAndCompile.preprocess(
params,
pathResolver,
new PreprocessorDelegate(
pathResolver,
sanitizer2,
DEFAULT_WORKING_DIR,
DEFAULT_PREPROCESSOR,
flags2,
DEFAULT_INCLUDE_ROOTS,
DEFAULT_SYSTEM_INCLUDE_ROOTS,
DEFAULT_HEADER_MAPS,
DEFAULT_FRAMEWORK_ROOTS,
DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION,
Optional.<SourcePath>absent(),
DEFAULT_INCLUDES),
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
CxxToolFlags.of()),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
sanitizer2));
assertEquals(ruleKey1, ruleKey2);
}
@Test
public void usesCorrectCommandForCompile() {
// Setup some dummy values for inputs to the CxxPreprocessAndCompile.
SourcePathResolver pathResolver =
new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()));
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
CxxToolFlags flags = CxxToolFlags.explicitBuilder()
.addPlatformFlags("-ffunction-sections")
.addRuleFlags("-O3")
.build();
Path output = Paths.get("test.o");
Path depFile = Paths.get("test.o.dep");
Path input = Paths.get("test.ii");
CxxPreprocessAndCompile buildRule =
CxxPreprocessAndCompile.compile(
params,
pathResolver,
new CompilerDelegate(pathResolver, DEFAULT_SANITIZER, DEFAULT_COMPILER, flags),
output,
new FakeSourcePath(input.toString()),
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER);
ImmutableList<String> expectedCompileCommand = ImmutableList.<String>builder()
.add("compiler")
.add("-ffunction-sections")
.add("-O3")
.add("-x", "c++")
.add("-c")
.add("-MD")
.add("-MF")
.add(depFile.toString() + ".tmp")
.add(input.toString())
.add("-o", output.toString())
.build();
ImmutableList<String> actualCompileCommand = buildRule.makeMainStep().getCommand();
assertEquals(expectedCompileCommand, actualCompileCommand);
}
@Test
public void usesCorrectCommandForPreprocess() {
// Setup some dummy values for inputs to the CxxPreprocessAndCompile.
SourcePathResolver pathResolver =
new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()));
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
ProjectFilesystem filesystem = new FakeProjectFilesystem();
CxxToolFlags preprocessorFlags = CxxToolFlags.explicitBuilder()
.addPlatformFlags("-Dtest=blah")
.addRuleFlags("-Dfoo=bar")
.build();
Path output = Paths.get("test.ii");
Path depFile = Paths.get("test.ii.dep");
Path input = Paths.get("test.cpp");
Path prefixHeader = Paths.get("prefix.pch");
CxxPreprocessAndCompile buildRule =
CxxPreprocessAndCompile.preprocess(
params,
pathResolver,
new PreprocessorDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_WORKING_DIR,
DEFAULT_PREPROCESSOR,
preprocessorFlags,
ImmutableSet.<Path>of(),
ImmutableSet.<Path>of(),
ImmutableSet.<Path>of(),
DEFAULT_FRAMEWORK_ROOTS,
DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION,
Optional.<SourcePath>of(new FakeSourcePath(filesystem, prefixHeader.toString())),
ImmutableList.of(CxxHeaders.builder().build())),
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
CxxToolFlags.of()),
output,
new FakeSourcePath(input.toString()),
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER);
// Verify it uses the expected command.
ImmutableList<String> expectedPreprocessCommand = ImmutableList.<String>builder()
.add("preprocessor")
.add("-Dtest=blah")
.add("-Dfoo=bar")
.add("-include")
.add(filesystem.resolve(prefixHeader).toString())
.add("-x", "c++")
.add("-E")
.add("-MD")
.add("-MF")
.add(depFile.toString() + ".tmp")
.add(input.toString())
.build();
ImmutableList<String> actualPreprocessCommand = buildRule.makeMainStep().getCommand();
assertEquals(expectedPreprocessCommand, actualPreprocessCommand);
}
@Test
public void compilerAndPreprocessorAreAlwaysReturnedFromGetInputsAfterBuildingLocally()
throws IOException {
ProjectFilesystem filesystem = new FakeProjectFilesystem();
SourcePath preprocessor = new PathSourcePath(filesystem, Paths.get("preprocessor"));
Tool preprocessorTool =
new CommandTool.Builder()
.addInput(preprocessor)
.build();
SourcePath compiler = new PathSourcePath(filesystem, Paths.get("compiler"));
Tool compilerTool =
new CommandTool.Builder()
.addInput(compiler)
.build();
SourcePathResolver pathResolver =
new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()));
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
CxxPreprocessAndCompile cxxPreprocess =
CxxPreprocessAndCompile.preprocess(
params,
pathResolver,
new PreprocessorDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_WORKING_DIR,
new DefaultPreprocessor(preprocessorTool),
CxxToolFlags.of(),
DEFAULT_INCLUDE_ROOTS,
DEFAULT_SYSTEM_INCLUDE_ROOTS,
DEFAULT_HEADER_MAPS,
DEFAULT_FRAMEWORK_ROOTS,
DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION,
Optional.<SourcePath>absent(),
DEFAULT_INCLUDES),
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
CxxToolFlags.of()),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER);
assertThat(
cxxPreprocess.getInputsAfterBuildingLocally(),
hasItem(preprocessor));
CxxPreprocessAndCompile cxxCompile =
CxxPreprocessAndCompile.compile(
params,
pathResolver,
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
new DefaultCompiler(compilerTool),
CxxToolFlags.of()),
DEFAULT_OUTPUT,
DEFAULT_INPUT,
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER);
assertThat(
cxxCompile.getInputsAfterBuildingLocally(),
hasItem(compiler));
}
@Test
public void usesColorFlagForCompilationWhenRequested() {
SourcePathResolver pathResolver =
new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()));
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
Path output = Paths.get("test.o");
Path input = Paths.get("test.ii");
CompilerDelegate compilerDelegate = new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
COMPILER_WITH_COLOR_SUPPORT,
CxxToolFlags.of());
CxxPreprocessAndCompile buildRule =
CxxPreprocessAndCompile.compile(
params,
pathResolver,
compilerDelegate,
output,
new FakeSourcePath(input.toString()),
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER);
ImmutableList<String> command = buildRule.makeMainStep().makeCompileCommand(
input.toString(),
"c++",
/* preprocessable */ true,
/* allowColorsInDiagnostics */ false);
assertThat(command, not(hasItem(CompilerWithColorSupport.COLOR_FLAG)));
command = buildRule.makeMainStep().makeCompileCommand(
input.toString(),
"c++",
/* preprocessable */ true,
/* allowColorsInDiagnostics */ true);
assertThat(command, hasItem(CompilerWithColorSupport.COLOR_FLAG));
}
@Test
public void usesColorFlagForPreprocessingWhenRequested() {
SourcePathResolver pathResolver =
new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()));
BuildTarget target = BuildTargetFactory.newInstance("//foo:bar");
BuildRuleParams params = new FakeBuildRuleParamsBuilder(target).build();
ProjectFilesystem filesystem = new FakeProjectFilesystem();
Path output = Paths.get("test.ii");
Path input = Paths.get("test.cpp");
Path prefixHeader = Paths.get("prefix.pch");
CxxPreprocessAndCompile buildRule =
CxxPreprocessAndCompile.preprocess(
params,
pathResolver,
new PreprocessorDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_WORKING_DIR,
PREPROCESSOR_WITH_COLOR_SUPPORT,
CxxToolFlags.of(),
ImmutableSet.<Path>of(),
ImmutableSet.<Path>of(),
ImmutableSet.<Path>of(),
DEFAULT_FRAMEWORK_ROOTS,
DEFAULT_FRAMEWORK_PATH_SEARCH_PATH_FUNCTION,
Optional.<SourcePath>of(new FakeSourcePath(filesystem, prefixHeader.toString())),
ImmutableList.of(CxxHeaders.builder().build())),
new CompilerDelegate(
pathResolver,
DEFAULT_SANITIZER,
DEFAULT_COMPILER,
CxxToolFlags.of()),
output,
new FakeSourcePath(input.toString()),
DEFAULT_INPUT_TYPE,
DEFAULT_SANITIZER);
ImmutableList<String> command = buildRule.makeMainStep().makePreprocessCommand(
/* allowColorsInDiagnostics */ false);
assertThat(command, not(hasItem(PreprocessorWithColorSupport.COLOR_FLAG)));
command = buildRule.makeMainStep().makePreprocessCommand(
/* allowColorsInDiagnostics */ true);
assertThat(command, hasItem(PreprocessorWithColorSupport.COLOR_FLAG));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.coprocessor;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
import org.apache.hadoop.hbase.regionserver.wal.WALCoprocessorHost;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.EnvironmentEdge;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tests invocation of the
* {@link org.apache.hadoop.hbase.coprocessor.MasterObserver} interface hooks at
* all appropriate times during normal HMaster operations.
*/
@Category({CoprocessorTests.class, MediumTests.class})
public class TestWALObserver {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestWALObserver.class);
private static final Logger LOG = LoggerFactory.getLogger(TestWALObserver.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static byte[] TEST_TABLE = Bytes.toBytes("observedTable");
private static byte[][] TEST_FAMILY = { Bytes.toBytes("fam1"),
Bytes.toBytes("fam2"), Bytes.toBytes("fam3"), };
private static byte[][] TEST_QUALIFIER = { Bytes.toBytes("q1"),
Bytes.toBytes("q2"), Bytes.toBytes("q3"), };
private static byte[][] TEST_VALUE = { Bytes.toBytes("v1"),
Bytes.toBytes("v2"), Bytes.toBytes("v3"), };
private static byte[] TEST_ROW = Bytes.toBytes("testRow");
@Rule
public TestName currentTest = new TestName();
private Configuration conf;
private FileSystem fs;
private Path hbaseRootDir;
private Path hbaseWALRootDir;
private Path oldLogDir;
private Path logDir;
private WALFactory wals;
@BeforeClass
public static void setupBeforeClass() throws Exception {
Configuration conf = TEST_UTIL.getConfiguration();
conf.setStrings(CoprocessorHost.WAL_COPROCESSOR_CONF_KEY,
SampleRegionWALCoprocessor.class.getName());
conf.set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
SampleRegionWALCoprocessor.class.getName());
conf.setInt("dfs.client.block.recovery.retries", 2);
TEST_UTIL.startMiniCluster(1);
Path hbaseRootDir = TEST_UTIL.getDFSCluster().getFileSystem()
.makeQualified(new Path("/hbase"));
Path hbaseWALRootDir = TEST_UTIL.getDFSCluster().getFileSystem()
.makeQualified(new Path("/hbaseLogRoot"));
LOG.info("hbase.rootdir=" + hbaseRootDir);
CommonFSUtils.setRootDir(conf, hbaseRootDir);
CommonFSUtils.setWALRootDir(conf, hbaseWALRootDir);
}
@AfterClass
public static void teardownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
@Before
public void setUp() throws Exception {
this.conf = HBaseConfiguration.create(TEST_UTIL.getConfiguration());
// this.cluster = TEST_UTIL.getDFSCluster();
this.fs = TEST_UTIL.getDFSCluster().getFileSystem();
this.hbaseRootDir = CommonFSUtils.getRootDir(conf);
this.hbaseWALRootDir = CommonFSUtils.getWALRootDir(conf);
this.oldLogDir = new Path(this.hbaseWALRootDir,
HConstants.HREGION_OLDLOGDIR_NAME);
String serverName = ServerName.valueOf(currentTest.getMethodName(), 16010,
System.currentTimeMillis()).toString();
this.logDir = new Path(this.hbaseWALRootDir,
AbstractFSWALProvider.getWALDirectoryName(serverName));
if (TEST_UTIL.getDFSCluster().getFileSystem().exists(this.hbaseRootDir)) {
TEST_UTIL.getDFSCluster().getFileSystem().delete(this.hbaseRootDir, true);
}
if (TEST_UTIL.getDFSCluster().getFileSystem().exists(this.hbaseWALRootDir)) {
TEST_UTIL.getDFSCluster().getFileSystem().delete(this.hbaseWALRootDir, true);
}
this.wals = new WALFactory(conf, serverName);
}
@After
public void tearDown() throws Exception {
try {
wals.shutdown();
} catch (IOException exception) {
// one of our tests splits out from under our wals.
LOG.warn("Ignoring failure to close wal factory. " + exception.getMessage());
LOG.debug("details of failure to close wal factory.", exception);
}
TEST_UTIL.getDFSCluster().getFileSystem().delete(this.hbaseRootDir, true);
TEST_UTIL.getDFSCluster().getFileSystem().delete(this.hbaseWALRootDir, true);
}
/**
* Test WAL write behavior with WALObserver. The coprocessor monitors a
* WALEdit written to WAL, and ignore, modify, and add KeyValue's for the
* WALEdit.
*/
@Test
public void testWALObserverWriteToWAL() throws Exception {
final WAL log = wals.getWAL(null);
verifyWritesSeen(log, getCoprocessor(log, SampleRegionWALCoprocessor.class), false);
}
private void verifyWritesSeen(final WAL log, final SampleRegionWALCoprocessor cp,
final boolean seesLegacy) throws Exception {
RegionInfo hri = createBasicHRegionInfo(Bytes.toString(TEST_TABLE));
TableDescriptor htd = createBasic3FamilyHTD(Bytes
.toString(TEST_TABLE));
NavigableMap<byte[], Integer> scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (byte[] fam : htd.getColumnFamilyNames()) {
scopes.put(fam, 0);
}
Path basedir = new Path(this.hbaseRootDir, Bytes.toString(TEST_TABLE));
deleteDir(basedir);
fs.mkdirs(new Path(basedir, hri.getEncodedName()));
// TEST_FAMILY[0] shall be removed from WALEdit.
// TEST_FAMILY[1] value shall be changed.
// TEST_FAMILY[2] shall be added to WALEdit, although it's not in the put.
cp.setTestValues(TEST_TABLE, TEST_ROW, TEST_FAMILY[0], TEST_QUALIFIER[0],
TEST_FAMILY[1], TEST_QUALIFIER[1], TEST_FAMILY[2], TEST_QUALIFIER[2]);
assertFalse(cp.isPreWALWriteCalled());
assertFalse(cp.isPostWALWriteCalled());
// TEST_FAMILY[2] is not in the put, however it shall be added by the tested
// coprocessor.
// Use a Put to create familyMap.
Put p = creatPutWith2Families(TEST_ROW);
Map<byte[], List<Cell>> familyMap = p.getFamilyCellMap();
WALEdit edit = new WALEdit();
edit.add(familyMap);
boolean foundFamily0 = false;
boolean foundFamily2 = false;
boolean modifiedFamily1 = false;
List<Cell> cells = edit.getCells();
for (Cell cell : cells) {
if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[0])) {
foundFamily0 = true;
}
if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[2])) {
foundFamily2 = true;
}
if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[1])) {
if (!Arrays.equals(CellUtil.cloneValue(cell), TEST_VALUE[1])) {
modifiedFamily1 = true;
}
}
}
assertTrue(foundFamily0);
assertFalse(foundFamily2);
assertFalse(modifiedFamily1);
// it's where WAL write cp should occur.
long now = EnvironmentEdgeManager.currentTime();
// we use HLogKey here instead of WALKeyImpl directly to support legacy coprocessors.
long txid = log.appendData(hri, new WALKeyImpl(hri.getEncodedNameAsBytes(), hri.getTable(), now,
new MultiVersionConcurrencyControl(), scopes), edit);
log.sync(txid);
// the edit shall have been change now by the coprocessor.
foundFamily0 = false;
foundFamily2 = false;
modifiedFamily1 = false;
for (Cell cell : cells) {
if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[0])) {
foundFamily0 = true;
}
if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[2])) {
foundFamily2 = true;
}
if (Arrays.equals(CellUtil.cloneFamily(cell), TEST_FAMILY[1])) {
if (!Arrays.equals(CellUtil.cloneValue(cell), TEST_VALUE[1])) {
modifiedFamily1 = true;
}
}
}
assertFalse(foundFamily0);
assertTrue(foundFamily2);
assertTrue(modifiedFamily1);
assertTrue(cp.isPreWALWriteCalled());
assertTrue(cp.isPostWALWriteCalled());
}
/**
* Coprocessors shouldn't get notice of empty waledits.
*/
@Test
public void testEmptyWALEditAreNotSeen() throws Exception {
RegionInfo hri = createBasicHRegionInfo(Bytes.toString(TEST_TABLE));
TableDescriptor htd = createBasic3FamilyHTD(Bytes.toString(TEST_TABLE));
MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl();
NavigableMap<byte[], Integer> scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for(byte[] fam : htd.getColumnFamilyNames()) {
scopes.put(fam, 0);
}
WAL log = wals.getWAL(null);
try {
SampleRegionWALCoprocessor cp = getCoprocessor(log, SampleRegionWALCoprocessor.class);
cp.setTestValues(TEST_TABLE, null, null, null, null, null, null, null);
assertFalse(cp.isPreWALWriteCalled());
assertFalse(cp.isPostWALWriteCalled());
final long now = EnvironmentEdgeManager.currentTime();
long txid = log.appendData(hri,
new WALKeyImpl(hri.getEncodedNameAsBytes(), hri.getTable(), now, mvcc, scopes),
new WALEdit());
log.sync(txid);
assertFalse("Empty WALEdit should skip coprocessor evaluation.", cp.isPreWALWriteCalled());
assertFalse("Empty WALEdit should skip coprocessor evaluation.", cp.isPostWALWriteCalled());
} finally {
log.close();
}
}
/**
* Test WAL replay behavior with WALObserver.
*/
@Test
public void testWALCoprocessorReplay() throws Exception {
// WAL replay is handled at HRegion::replayRecoveredEdits(), which is
// ultimately called by HRegion::initialize()
TableName tableName = TableName.valueOf(currentTest.getMethodName());
TableDescriptor htd = getBasic3FamilyHTableDescriptor(tableName);
MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl();
// final HRegionInfo hri =
// createBasic3FamilyHRegionInfo(Bytes.toString(tableName));
// final HRegionInfo hri1 =
// createBasic3FamilyHRegionInfo(Bytes.toString(tableName));
RegionInfo hri = RegionInfoBuilder.newBuilder(tableName).build();
final Path basedir = CommonFSUtils.getTableDir(this.hbaseRootDir, tableName);
deleteDir(basedir);
fs.mkdirs(new Path(basedir, hri.getEncodedName()));
final Configuration newConf = HBaseConfiguration.create(this.conf);
// WAL wal = new WAL(this.fs, this.dir, this.oldLogDir, this.conf);
WAL wal = wals.getWAL(null);
// Put p = creatPutWith2Families(TEST_ROW);
WALEdit edit = new WALEdit();
long now = EnvironmentEdgeManager.currentTime();
final int countPerFamily = 1000;
NavigableMap<byte[], Integer> scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (byte[] fam : htd.getColumnFamilyNames()) {
scopes.put(fam, 0);
}
for (byte[] fam : htd.getColumnFamilyNames()) {
addWALEdits(tableName, hri, TEST_ROW, fam, countPerFamily,
EnvironmentEdgeManager.getDelegate(), wal, scopes, mvcc);
}
wal.appendData(hri, new WALKeyImpl(hri.getEncodedNameAsBytes(), tableName, now, mvcc, scopes),
edit);
// sync to fs.
wal.sync();
User user = HBaseTestingUtility.getDifferentUser(newConf,
".replay.wal.secondtime");
user.runAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
Path p = runWALSplit(newConf);
LOG.info("WALSplit path == " + p);
// Make a new wal for new region open.
final WALFactory wals2 = new WALFactory(conf,
ServerName.valueOf(currentTest.getMethodName() + "2", 16010, System.currentTimeMillis())
.toString());
WAL wal2 = wals2.getWAL(null);
HRegion region = HRegion.openHRegion(newConf, FileSystem.get(newConf), hbaseRootDir,
hri, htd, wal2, TEST_UTIL.getHBaseCluster().getRegionServer(0), null);
SampleRegionWALCoprocessor cp2 =
region.getCoprocessorHost().findCoprocessor(SampleRegionWALCoprocessor.class);
// TODO: asserting here is problematic.
assertNotNull(cp2);
assertTrue(cp2.isPreWALRestoreCalled());
assertTrue(cp2.isPostWALRestoreCalled());
region.close();
wals2.close();
return null;
}
});
}
/**
* Test to see CP loaded successfully or not. There is a duplication at
* TestHLog, but the purpose of that one is to see whether the loaded CP will
* impact existing WAL tests or not.
*/
@Test
public void testWALObserverLoaded() throws Exception {
WAL log = wals.getWAL(null);
assertNotNull(getCoprocessor(log, SampleRegionWALCoprocessor.class));
}
@Test
public void testWALObserverRoll() throws Exception {
final WAL wal = wals.getWAL(null);
final SampleRegionWALCoprocessor cp = getCoprocessor(wal, SampleRegionWALCoprocessor.class);
cp.setTestValues(TEST_TABLE, null, null, null, null, null, null, null);
assertFalse(cp.isPreWALRollCalled());
assertFalse(cp.isPostWALRollCalled());
wal.rollWriter(true);
assertTrue(cp.isPreWALRollCalled());
assertTrue(cp.isPostWALRollCalled());
}
private SampleRegionWALCoprocessor getCoprocessor(WAL wal,
Class<? extends SampleRegionWALCoprocessor> clazz) throws Exception {
WALCoprocessorHost host = wal.getCoprocessorHost();
Coprocessor c = host.findCoprocessor(clazz.getName());
return (SampleRegionWALCoprocessor) c;
}
/**
* Creates an HRI around an HTD that has <code>tableName</code>.
* @param tableName Name of table to use.
*/
private RegionInfo createBasicHRegionInfo(String tableName) {
return RegionInfoBuilder.newBuilder(TableName.valueOf(tableName)).build();
}
/*
* @param p Directory to cleanup
*/
private void deleteDir(final Path p) throws IOException {
if (this.fs.exists(p)) {
if (!this.fs.delete(p, true)) {
throw new IOException("Failed remove of " + p);
}
}
}
private Put creatPutWith2Families(byte[] row) throws IOException {
Put p = new Put(row);
for (int i = 0; i < TEST_FAMILY.length - 1; i++) {
p.addColumn(TEST_FAMILY[i], TEST_QUALIFIER[i], TEST_VALUE[i]);
}
return p;
}
private Path runWALSplit(final Configuration c) throws IOException {
List<Path> splits = WALSplitter.split(
hbaseRootDir, logDir, oldLogDir, FileSystem.get(c), c, wals);
// Split should generate only 1 file since there's only 1 region
assertEquals(1, splits.size());
// Make sure the file exists
assertTrue(fs.exists(splits.get(0)));
LOG.info("Split file=" + splits.get(0));
return splits.get(0);
}
private void addWALEdits(final TableName tableName, final RegionInfo hri, final byte[] rowName,
final byte[] family, final int count, EnvironmentEdge ee, final WAL wal,
final NavigableMap<byte[], Integer> scopes, final MultiVersionConcurrencyControl mvcc)
throws IOException {
String familyStr = Bytes.toString(family);
long txid = -1;
for (int j = 0; j < count; j++) {
byte[] qualifierBytes = Bytes.toBytes(Integer.toString(j));
byte[] columnBytes = Bytes.toBytes(familyStr + ":" + Integer.toString(j));
WALEdit edit = new WALEdit();
edit.add(new KeyValue(rowName, family, qualifierBytes, ee.currentTime(), columnBytes));
// uses WALKeyImpl instead of HLogKey on purpose. will only work for tests where we don't care
// about legacy coprocessors
txid = wal.appendData(hri,
new WALKeyImpl(hri.getEncodedNameAsBytes(), tableName, ee.currentTime(), mvcc), edit);
}
if (-1 != txid) {
wal.sync(txid);
}
}
private TableDescriptor getBasic3FamilyHTableDescriptor(TableName tableName) {
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
Arrays.stream(TEST_FAMILY).map(ColumnFamilyDescriptorBuilder::of)
.forEachOrdered(builder::setColumnFamily);
return builder.build();
}
private TableDescriptor createBasic3FamilyHTD(String tableName) {
return TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName))
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("a"))
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("b"))
.setColumnFamily(ColumnFamilyDescriptorBuilder.of("c")).build();
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.bugs;
import com.intellij.codeInsight.daemon.impl.UnusedSymbolUtil;
import com.intellij.psi.*;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.Processor;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspection;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.psiutils.CollectionUtils;
import com.siyeh.ig.psiutils.ConstructionUtils;
import com.siyeh.ig.psiutils.ParenthesesUtils;
import com.siyeh.ig.psiutils.VariableAccessUtils;
import com.siyeh.ig.ui.ExternalizableStringSet;
import org.intellij.lang.annotations.Pattern;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
public class MismatchedCollectionQueryUpdateInspectionBase extends BaseInspection {
static final Set<String> QUERY_EXCLUDES = Collections.singleton("java.util.Collections");
static final Set<String> UPDATE_EXCLUDES = new HashSet<>(CollectionUtils.getAllCollectionNames());
static {
UPDATE_EXCLUDES.add("java.util.Collections");
}
@SuppressWarnings("PublicField")
public final ExternalizableStringSet queryNames =
new ExternalizableStringSet("compute", "copyInto", "drainTo", "forEach", "parallelStream", "propertyNames", "replaceAll", "save",
"store", "stream", "toArray", "write");
@SuppressWarnings("PublicField")
public final ExternalizableStringSet updateNames =
new ExternalizableStringSet("add", "clear", "compute", "drainTo", "insert", "load", "merge", "offer", "poll", "push", "put", "remove",
"replace", "retain", "set", "take");
@SuppressWarnings("PublicField")
public final ExternalizableStringSet ignoredClasses = new ExternalizableStringSet();
static boolean isEmptyCollectionInitializer(PsiExpression initializer) {
if (!(initializer instanceof PsiNewExpression)) {
return ConstructionUtils.isEmptyCollectionInitializer(initializer);
}
final PsiNewExpression newExpression = (PsiNewExpression)initializer;
final PsiExpressionList argumentList = newExpression.getArgumentList();
if (argumentList == null) {
return false;
}
final PsiExpression[] arguments = argumentList.getExpressions();
for (final PsiExpression argument : arguments) {
final PsiType argumentType = argument.getType();
if (argumentType == null) {
return false;
}
if (CollectionUtils.isCollectionClassOrInterface(argumentType)) {
return false;
}
if (argumentType instanceof PsiArrayType) {
return false;
}
}
return true;
}
@Pattern(VALID_ID_PATTERN)
@Override
@NotNull
public String getID() {
return "MismatchedQueryAndUpdateOfCollection";
}
@Override
@NotNull
public String getDisplayName() {
return InspectionGadgetsBundle.message("mismatched.update.collection.display.name");
}
@Override
@NotNull
public String buildErrorString(Object... infos) {
final boolean updated = ((Boolean)infos[0]).booleanValue();
if (updated) {
return InspectionGadgetsBundle.message("mismatched.update.collection.problem.descriptor.updated.not.queried");
}
else {
return InspectionGadgetsBundle.message("mismatched.update.collection.problem.description.queried.not.updated");
}
}
@Override
public boolean isEnabledByDefault() {
return true;
}
@Override
public boolean runForWholeFile() {
return true;
}
@Override
public BaseInspectionVisitor buildVisitor() {
return new MismatchedCollectionQueryUpdateVisitor();
}
private static class CollectionQueriedByAssignmentVisitor extends JavaRecursiveElementWalkingVisitor {
private boolean mayBeQueried;
@NotNull private final PsiVariable variable;
CollectionQueriedByAssignmentVisitor(@NotNull PsiVariable variable) {
this.variable = variable;
}
@Override
public void visitElement(@NotNull PsiElement element) {
if (mayBeQueried) {
return;
}
super.visitElement(element);
}
@Override
public void visitReferenceExpression(PsiReferenceExpression expression) {
if (mayBeQueried) {
return;
}
super.visitReferenceExpression(expression);
final PsiElement parent = ParenthesesUtils.getParentSkipParentheses(expression);
if (!(parent instanceof PsiPolyadicExpression)) {
return;
}
final PsiPolyadicExpression polyadicExpression = (PsiPolyadicExpression)parent;
final IElementType tokenType = polyadicExpression.getOperationTokenType();
if (JavaTokenType.PLUS != tokenType) {
return;
}
final PsiElement target = expression.resolve();
if (!variable.equals(target)) {
return;
}
final PsiType type = polyadicExpression.getType();
if (type == null || !type.equalsToText(CommonClassNames.JAVA_LANG_STRING)) {
return;
}
mayBeQueried = true; // query by concatenation ("" + list)
}
@Override
public void visitAssignmentExpression(@NotNull PsiAssignmentExpression assignment) {
if (mayBeQueried) {
return;
}
super.visitAssignmentExpression(assignment);
final PsiExpression lhs = assignment.getLExpression();
if (!VariableAccessUtils.mayEvaluateToVariable(lhs, variable)) {
return;
}
final PsiExpression rhs = assignment.getRExpression();
if (isEmptyCollectionInitializer(rhs)) {
return;
}
mayBeQueried = true;
}
boolean mayBeQueried() {
return mayBeQueried;
}
}
private class MismatchedCollectionQueryUpdateVisitor extends BaseInspectionVisitor {
@Override
public void visitField(@NotNull PsiField field) {
super.visitField(field);
if (!field.hasModifierProperty(PsiModifier.PRIVATE)) {
return;
}
final PsiClass containingClass = PsiUtil.getTopLevelClass(field);
if (!checkVariable(field, containingClass)) {
return;
}
final boolean written = collectionContentsAreUpdated(field, containingClass);
final boolean read = collectionContentsAreQueried(field, containingClass);
if (read == written || UnusedSymbolUtil.isImplicitWrite(field)) {
return;
}
registerFieldError(field, Boolean.valueOf(written));
}
@Override
public void visitLocalVariable(@NotNull PsiLocalVariable variable) {
super.visitLocalVariable(variable);
final PsiCodeBlock codeBlock = PsiTreeUtil.getParentOfType(variable, PsiCodeBlock.class);
if (!checkVariable(variable, codeBlock)) {
return;
}
final boolean written = collectionContentsAreUpdated(variable, codeBlock);
final boolean read = collectionContentsAreQueried(variable, codeBlock);
if (read != written) {
registerVariableError(variable, Boolean.valueOf(written));
}
}
private boolean checkVariable(PsiVariable variable, PsiElement context) {
if (context == null) {
return false;
}
final PsiType type = variable.getType();
if (!CollectionUtils.isCollectionClassOrInterface(type)) {
return false;
}
if (VariableAccessUtils.variableIsAssignedFrom(variable, context)) {
return false;
}
if (VariableAccessUtils.variableIsReturned(variable, context)) {
return false;
}
if (VariableAccessUtils.variableIsUsedInArrayInitializer(variable, context)) {
return false;
}
return ignoredClasses.stream().noneMatch(className -> InheritanceUtil.isInheritor(type, className));
}
private boolean collectionContentsAreUpdated(PsiVariable variable, PsiElement context) {
if (VariableAccessUtils.variableIsPassedAsMethodArgument(variable, context, new UpdateCallProcessor()) ||
collectionUpdateCalled(variable, context)) {
return true;
}
final PsiExpression initializer = variable.getInitializer();
if (initializer != null && !isEmptyCollectionInitializer(initializer)) {
return true;
}
if (initializer instanceof PsiNewExpression) {
final PsiNewExpression newExpression = (PsiNewExpression)initializer;
final PsiAnonymousClass anonymousClass = newExpression.getAnonymousClass();
if (anonymousClass != null) {
if (collectionUpdateCalled(null, anonymousClass)) {
return true;
}
final ThisPassedAsArgumentVisitor visitor = new ThisPassedAsArgumentVisitor();
anonymousClass.accept(visitor);
if (visitor.isPassed()) {
return true;
}
}
}
return VariableAccessUtils.variableIsAssigned(variable, context);
}
private boolean collectionContentsAreQueried(PsiVariable variable, PsiElement context) {
if (VariableAccessUtils.variableIsPassedAsMethodArgument(variable, context, new QueryCallProcessor()) ||
collectionQueryCalled(variable, context)) {
return true;
}
final PsiExpression initializer = variable.getInitializer();
if (initializer != null && !isEmptyCollectionInitializer(initializer)) {
return true;
}
return collectionQueriedByAssignment(variable, context);
}
private boolean collectionQueryCalled(PsiVariable variable, PsiElement context) {
final CollectionQueryUpdateCalledVisitor visitor = new CollectionQueryUpdateCalledVisitor(variable, queryNames, true);
context.accept(visitor);
return visitor.isQueriedUpdated();
}
private boolean collectionUpdateCalled(@Nullable PsiVariable variable, PsiElement context) {
final CollectionQueryUpdateCalledVisitor visitor = new CollectionQueryUpdateCalledVisitor(variable, updateNames, false);
context.accept(visitor);
return visitor.isQueriedUpdated();
}
private boolean collectionQueriedByAssignment(@NotNull PsiVariable variable, @NotNull PsiElement context) {
final CollectionQueriedByAssignmentVisitor visitor = new CollectionQueriedByAssignmentVisitor(variable);
context.accept(visitor);
return visitor.mayBeQueried();
}
}
private static class QueryCallProcessor implements Processor<PsiCall> {
@Override
public boolean process(PsiCall call) {
final PsiMethod method = call.resolveMethod();
if (method == null) {
return false;
}
final PsiClass aClass = method.getContainingClass();
return aClass != null && QUERY_EXCLUDES.contains(aClass.getQualifiedName());
}
}
private static class UpdateCallProcessor implements Processor<PsiCall> {
@Override
public boolean process(PsiCall call) {
final PsiMethod method = call.resolveMethod();
if (method == null) {
return false;
}
final PsiClass aClass = method.getContainingClass();
if (aClass == null || !UPDATE_EXCLUDES.contains(aClass.getQualifiedName())) {
return false;
}
return !"drainTo".equals(method.getName()) || !InheritanceUtil.isInheritor(aClass, "java.util.concurrent.BlockingQueue");
}
}
}
| |
/*
* Copyright 2014 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.logic.behavior.nui;
import com.google.common.collect.Lists;
import org.terasology.entitySystem.Component;
import org.terasology.entitySystem.entity.EntityRef;
import org.terasology.logic.behavior.BehaviorComponent;
import org.terasology.logic.behavior.BehaviorNodeComponent;
import org.terasology.logic.behavior.BehaviorNodeFactory;
import org.terasology.logic.behavior.BehaviorSystem;
import org.terasology.logic.behavior.asset.BehaviorTree;
import org.terasology.logic.behavior.tree.Interpreter;
import org.terasology.registry.In;
import org.terasology.rendering.nui.CoreScreenLayer;
import org.terasology.rendering.nui.NUIManager;
import org.terasology.rendering.nui.UIWidget;
import org.terasology.rendering.nui.WidgetUtil;
import org.terasology.rendering.nui.databinding.Binding;
import org.terasology.rendering.nui.databinding.ReadOnlyBinding;
import org.terasology.rendering.nui.itemRendering.ToStringTextRenderer;
import org.terasology.rendering.nui.layers.mainMenu.EnterTextPopup;
import org.terasology.rendering.nui.layouts.PropertyLayout;
import org.terasology.rendering.nui.properties.OneOfProviderFactory;
import org.terasology.rendering.nui.properties.PropertyProvider;
import org.terasology.rendering.nui.widgets.ActivateEventListener;
import org.terasology.rendering.nui.widgets.UIDropdown;
import org.terasology.rendering.nui.widgets.UIList;
import java.awt.*;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.StringSelection;
import java.util.List;
/**
*/
public class BehaviorEditorScreen extends CoreScreenLayer {
public static final String PALETTE_ITEM_OPEN = "--";
public static final String PALETTE_ITEM_CLOSE = "++";
private PropertyLayout entityProperties;
private BehaviorEditor behaviorEditor;
private PropertyLayout properties;
private UIDropdown<BehaviorTree> selectTree;
private UIDropdown<Interpreter> selectEntity;
private UIList<BehaviorNodeComponent> palette;
private BehaviorTree selectedTree;
private Interpreter selectedInterpreter;
private RenderableNode selectedNode;
private BehaviorDebugger debugger;
private List<BehaviorNodeComponent> paletteItems;
@In
private NUIManager nuiManager;
@In
private BehaviorNodeFactory nodeFactory;
@In
private BehaviorSystem behaviorSystem;
@In
private OneOfProviderFactory providerFactory;
@Override
public void initialise() {
debugger = new BehaviorDebugger(nodeFactory);
entityProperties = find("entity_properties", PropertyLayout.class);
behaviorEditor = find("tree", BehaviorEditor.class);
properties = find("properties", PropertyLayout.class);
selectTree = find("select_tree", UIDropdown.class);
selectEntity = find("select_entity", UIDropdown.class);
palette = find("palette", UIList.class);
behaviorEditor.bindSelection(new Binding<RenderableNode>() {
private PropertyProvider provider = new PropertyProvider();
@Override
public RenderableNode get() {
return selectedNode;
}
@Override
public void set(RenderableNode value) {
selectedNode = value;
properties.clear();
if (value != null) {
properties.addProperties("Behavior Node", provider.createProperties(value.getNode()));
}
}
});
Binding<List<BehaviorTree>> treeBinding = new ReadOnlyBinding<List<BehaviorTree>>() {
@Override
public List<BehaviorTree> get() {
return behaviorSystem.getTrees();
}
};
selectTree.bindOptions(treeBinding);
providerFactory.register("behaviorTrees", treeBinding);
selectTree.bindSelection(new Binding<BehaviorTree>() {
@Override
public BehaviorTree get() {
return behaviorEditor.getTree();
}
@Override
public void set(BehaviorTree value) {
selectedTree = value;
behaviorEditor.setTree(value);
updateDebugger();
}
});
selectEntity.bindOptions(new ReadOnlyBinding<List<Interpreter>>() {
@Override
public List<Interpreter> get() {
return behaviorSystem.getInterpreter();
}
});
selectEntity.bindSelection(new Binding<Interpreter>() {
private PropertyProvider provider = new PropertyProvider();
@Override
public Interpreter get() {
return selectedInterpreter;
}
@Override
public void set(Interpreter value) {
if (selectedInterpreter != null) {
selectedInterpreter.setDebugger(null);
}
selectedInterpreter = value;
if (selectedInterpreter != null) {
EntityRef minion = value.actor().minion();
entityProperties.clear();
for (Component component : minion.iterateComponents()) {
String name = component.getClass().getSimpleName().replace("Component", "");
entityProperties.addProperties(name, provider.createProperties(component));
}
}
updateDebugger();
}
});
palette.bindSelection(new Binding<BehaviorNodeComponent>() {
@Override
public BehaviorNodeComponent get() {
return null;
}
@Override
public void set(BehaviorNodeComponent value) {
switch (value.name.substring(0, 2)) {
case PALETTE_ITEM_OPEN:
int pos = paletteItems.indexOf(value) + 1;
while (pos < paletteItems.size() && !paletteItems.get(pos).name.startsWith(PALETTE_ITEM_OPEN)) {
paletteItems.remove(pos);
}
paletteItems.remove(pos - 1);
paletteItems.add(pos - 1, createCategory(value.category, false));
break;
case PALETTE_ITEM_CLOSE:
pos = paletteItems.indexOf(value);
paletteItems.remove(pos);
BehaviorNodeComponent categoryItem = createCategory(value.category, true);
paletteItems.add(pos, categoryItem);
paletteItems.addAll(pos + 1, nodeFactory.getNodesComponents(value.category));
break;
default:
behaviorEditor.createNode(value);
break;
}
}
});
palette.bindList(new ReadOnlyBinding<List<BehaviorNodeComponent>>() {
@Override
public List<BehaviorNodeComponent> get() {
return paletteItems;
}
});
palette.setItemRenderer(new ToStringTextRenderer<BehaviorNodeComponent>() {
@Override
public String getTooltip(BehaviorNodeComponent value) {
return value.description;
}
});
WidgetUtil.trySubscribe(this, "copy", new ActivateEventListener() {
@Override
public void onActivated(UIWidget button) {
Clipboard systemClipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
String data = behaviorEditor.save();
StringSelection contents = new StringSelection(data);
systemClipboard.setContents(contents, contents);
}
});
WidgetUtil.trySubscribe(this, "layout", new ActivateEventListener() {
@Override
public void onActivated(UIWidget button) {
BehaviorTree selection = selectTree.getSelection();
if (selection != null) {
selection.layout(selectedNode);
}
}
});
WidgetUtil.trySubscribe(this, "new", new ActivateEventListener() {
@Override
public void onActivated(UIWidget button) {
if (selectedNode != null) {
nuiManager.pushScreen("engine:enterTextPopup", EnterTextPopup.class).bindInput(new Binding<String>() {
@Override
public String get() {
return null;
}
@Override
public void set(String value) {
behaviorSystem.createTree(value, selectedNode.getNode());
}
});
}
}
});
WidgetUtil.trySubscribe(this, "assign", new ActivateEventListener() {
@Override
public void onActivated(UIWidget button) {
if (selectedTree != null && selectedInterpreter != null) {
EntityRef minion = selectedInterpreter.actor().minion();
minion.removeComponent(BehaviorComponent.class);
BehaviorComponent component = new BehaviorComponent();
component.tree = selectedTree;
minion.addComponent(component);
List<Interpreter> interpreter = behaviorSystem.getInterpreter();
selectEntity.setSelection(null);
for (Interpreter i : interpreter) {
if (i.actor().minion() == minion) {
selectEntity.setSelection(i);
break;
}
}
}
}
});
WidgetUtil.trySubscribe(this, "remove", new ActivateEventListener() {
@Override
public void onActivated(UIWidget button) {
if (selectedNode != null && selectedTree != null) {
RenderableNode targetNode = selectedNode.getInputPort().getTargetNode();
if (targetNode != null) {
for (int i = 0; i < targetNode.getChildrenCount(); i++) {
if (targetNode.getChild(i) == selectedNode) {
targetNode.withModel().removeChild(i);
break;
}
}
}
removeWidget(selectedNode);
behaviorEditor.nodeClicked(null);
behaviorSystem.treeModified(selectedTree);
}
}
});
WidgetUtil.trySubscribe(this, "debug_run", new ActivateEventListener() {
@Override
public void onActivated(UIWidget button) {
if (debugger != null) {
debugger.run();
}
}
});
WidgetUtil.trySubscribe(this, "debug_pause", new ActivateEventListener() {
@Override
public void onActivated(UIWidget button) {
if (debugger != null) {
debugger.pause();
}
}
});
WidgetUtil.trySubscribe(this, "debug_reset", new ActivateEventListener() {
@Override
public void onActivated(UIWidget button) {
if (selectedInterpreter != null) {
selectedInterpreter.reset();
}
}
});
WidgetUtil.trySubscribe(this, "debug_step", new ActivateEventListener() {
@Override
public void onActivated(UIWidget button) {
if (debugger != null) {
debugger.step();
}
}
});
paletteItems = findPaletteItems();
}
@Override
public boolean isLowerLayerVisible() {
return false;
}
private void removeWidget(RenderableNode node) {
behaviorEditor.removeWidget(node);
for (RenderableNode renderableNode : node.children()) {
removeWidget(renderableNode);
}
}
private void updateDebugger() {
if (selectedInterpreter != null && selectedTree != null) {
debugger.setTree(selectedTree);
selectedInterpreter.setDebugger(debugger);
}
}
private List<BehaviorNodeComponent> findPaletteItems() {
List<BehaviorNodeComponent> items = Lists.newArrayList();
for (String category : nodeFactory.getCategories()) {
BehaviorNodeComponent categoryItem = createCategory(category, true);
items.add(categoryItem);
items.addAll(nodeFactory.getNodesComponents(category));
}
return items;
}
private BehaviorNodeComponent createCategory(String category, boolean open) {
String prefix = open ? PALETTE_ITEM_OPEN : PALETTE_ITEM_CLOSE;
BehaviorNodeComponent categoryItem = new BehaviorNodeComponent();
categoryItem.category = category;
categoryItem.name = prefix + category.toUpperCase() + prefix;
return categoryItem;
}
}
| |
/* ownCloud Android Library is available under MIT license
* Copyright (C) 2014 ownCloud Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
package com.owncloud.android.lib.common;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.commons.httpclient.cookie.CookiePolicy;
import android.accounts.Account;
import android.accounts.AuthenticatorException;
import android.accounts.OperationCanceledException;
import android.content.Context;
import android.net.Uri;
import android.util.Log;
import com.owncloud.android.lib.common.OwnCloudClient;
import com.owncloud.android.lib.common.OwnCloudClientFactory;
import com.owncloud.android.lib.common.accounts.AccountUtils;
import com.owncloud.android.lib.common.accounts.AccountUtils.AccountNotFoundException;
/**
* Implementation of {@link OwnCloudClientManager}
*
* TODO check multithreading safety
*
* @author David A. Velasco
* @author masensio
*/
public class SingleSessionManager implements OwnCloudClientManager {
private static final String TAG = SingleSessionManager.class.getSimpleName();
private Map<String, OwnCloudClient> mClientsWithKnownUsername =
new HashMap<String, OwnCloudClient>();
private Map<String, OwnCloudClient> mClientsWithUnknownUsername =
new HashMap<String, OwnCloudClient>();
@Override
public synchronized OwnCloudClient getClientFor(OwnCloudAccount account, Context context) {
Log.d(TAG, "getClientFor(OwnCloudAccount ... : ");
if (account == null) {
throw new IllegalArgumentException("Cannot get an OwnCloudClient for a null account");
}
OwnCloudClient client = null;
String accountName = account.getName();
String sessionName = AccountUtils.buildAccountName(
account.getBaseUri(),
account.getCredentials().getAuthToken());
if (accountName != null) {
client = mClientsWithKnownUsername.get(accountName);
}
boolean reusingKnown = false; // just for logs
if (client == null) {
if (accountName != null) {
client = mClientsWithUnknownUsername.remove(sessionName);
if (client != null) {
Log.d(TAG, " reusing client {" + sessionName + ", " +
client.hashCode() + "}");
mClientsWithKnownUsername.put(accountName, client);
Log.d(TAG, " moved client to {" + accountName + ", " +
client.hashCode() + "}");
}
} else {
client = mClientsWithUnknownUsername.get(sessionName);
}
} else {
Log.d(TAG, " reusing client {" + accountName + ", " + client.hashCode() + "}");
reusingKnown = true;
}
if (client == null) {
// no client to reuse - create a new one
client = OwnCloudClientFactory.createOwnCloudClient(
account.getBaseUri(),
context.getApplicationContext(),
true); // TODO remove dependency on OwnCloudClientFactory
client.getParams().setCookiePolicy(CookiePolicy.BROWSER_COMPATIBILITY);
// enable cookie tracking
// Restore Cookies ??
AccountUtils.restoreCookies(accountName, client, context);
client.setCredentials(account.getCredentials());
if (accountName != null) {
mClientsWithKnownUsername.put(accountName, client);
Log.d(TAG, " new client {" + accountName + ", " + client.hashCode() + "}");
} else {
mClientsWithUnknownUsername.put(sessionName, client);
Log.d(TAG, " new client {" + sessionName + ", " + client.hashCode() + "}");
}
} else {
if (!reusingKnown) {
Log.d(TAG, " reusing client {" + sessionName + ", " + client.hashCode() + "}");
}
keepCredentialsUpdated(account, client);
keepUriUpdated(account, client);
}
return client;
}
@Override
public OwnCloudClient removeClientFor(OwnCloudAccount account) {
if (account == null) {
return null;
}
OwnCloudClient client = null;
String accountName = account.getName();
if (accountName != null) {
client = mClientsWithKnownUsername.remove(accountName);
if (client != null) {
Log.d(TAG, "Removed client {" + accountName + ", " + client.hashCode() + "}");
return client;
} else {
Log.d(TAG, "No client tracked for {" + accountName + "}");
}
}
String sessionName = AccountUtils.buildAccountName(
account.getBaseUri(),
account.getCredentials().getAuthToken());
client = mClientsWithUnknownUsername.remove(sessionName);
if (client != null) {
Log.d(TAG, "Removed client {" + sessionName + ", " + client.hashCode() + "}");
return client;
}
Log.d(TAG, "No client tracked for {" + sessionName + "}");
Log.d(TAG, "No client removed");
return null;
}
@Override
public synchronized void saveAllClients(Context context, String accountType)
throws AccountNotFoundException, AuthenticatorException, IOException,
OperationCanceledException {
Iterator<String> accountNames = mClientsWithKnownUsername.keySet().iterator();
String accountName = null;
Account account = null;
while (accountNames.hasNext()) {
accountName = accountNames.next();
account = new Account(accountName, accountType);
AccountUtils.saveClient(
mClientsWithKnownUsername.get(accountName),
account,
context);
}
}
private void keepCredentialsUpdated(OwnCloudAccount account, OwnCloudClient reusedClient) {
OwnCloudCredentials recentCredentials = account.getCredentials();
if (!recentCredentials.getAuthToken().equals(
reusedClient.getCredentials().getAuthToken())) {
reusedClient.setCredentials(recentCredentials);
}
}
// this method is just a patch; we need to distinguish accounts in the same host but
// different paths; but that requires updating the accountNames for apps upgrading
private void keepUriUpdated(OwnCloudAccount account, OwnCloudClient reusedClient) {
Uri recentUri = account.getBaseUri();
if (!recentUri.equals(reusedClient.getBaseUri())) {
reusedClient.setBaseUri(recentUri);
}
}
}
| |
/**
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "{}"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright {yyyy} {name of copyright owner}
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.deleidos.rtws.core.framework;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import net.sf.json.JSONObject;
import org.apache.log4j.Logger;
import com.deleidos.rtws.commons.exception.InitializationException;
import com.deleidos.rtws.commons.util.Configurable;
import com.deleidos.rtws.commons.util.Initializable;
import com.deleidos.rtws.core.framework.factory.ParserFactory;
import com.deleidos.rtws.core.framework.factory.PipelineFactory;
import com.deleidos.rtws.core.framework.parser.CanonicalFormat;
import com.deleidos.rtws.core.framework.parser.Parser;
import com.deleidos.rtws.core.management.ManagedPipeline;
import com.deleidos.rtws.core.management.ManagedPipelineName;
import com.deleidos.rtws.core.management.ManagementContext;
public class ParsingPipeline implements Configurable, Initializable {
private static final Logger log = Logger.getLogger(ParsingPipeline.class);
private static final String MESSAGE_FORMAT_HEADER_NAME_SYSTEM_PROPERTY_NAME = "messaging.header.format";
private Properties systemProperties = new Properties();
private ParserFactory parserFactory;
private PipelineFactory pipelineFactory;
private ManagementContext jmx;
private InputStream stream;
private Properties streamProperties;
private String jmsDataFormatHeader;
public ParsingPipeline() {
super();
}
public Properties getProperties() {
return systemProperties;
}
public void setProperties(Properties value) {
systemProperties = value;
}
public void setParserFactory(ParserFactory value) {
parserFactory = value;
}
public void setPipelineFactory(PipelineFactory value) {
pipelineFactory = value;
}
public void setManagementContext(ManagementContext value) {
jmx = value;
}
public void setStream(InputStream value) {
stream = value;
}
public void setStreamProperties(Properties value) {
streamProperties = value;
}
public void initialize() {
jmsDataFormatHeader = systemProperties.getProperty(MESSAGE_FORMAT_HEADER_NAME_SYSTEM_PROPERTY_NAME);
if(jmsDataFormatHeader == null) {
throw new InitializationException("Required system property is not set '" + MESSAGE_FORMAT_HEADER_NAME_SYSTEM_PROPERTY_NAME + "'.");
}
parserFactory.setProperties(systemProperties);
pipelineFactory.setProperties(systemProperties);
parserFactory.initialize();
pipelineFactory.initialize();
}
public void dispose() {
parserFactory.dispose();
pipelineFactory.flush();
pipelineFactory.dispose();
}
public List<PipelineError> runPipeline() {
List<PipelineError> pipelineErrors = new ArrayList<PipelineError>();
String format = streamProperties.getProperty(jmsDataFormatHeader);
Parser parser = parserFactory.getInstance(format);
if(parser == null) {
log.warn("No parser is defined for specified data format '" + format + "'.");
StringBuilder record = new StringBuilder();
byte[] buff = new byte[512];
int read=0;
try{
while((read=stream.read(buff))>0){
record.append(new String(buff));
}
}catch(Exception e){
//ignore
}
PipelineError err = new PipelineError(record.toString(), "No parser is defined for specified data format '" + format + "'.");
pipelineErrors.add(err);
return pipelineErrors;
}
ProcessorName name = new ProcessorName(0, format, parser.getClass());
JSONObject record;
parser.setInputStream(stream);
parser.setStreamProperties(streamProperties);
parser.parseHeaders();
while (true) {
try {
long begin = System.currentTimeMillis();
record = parser.parse();
long parseComplete = System.currentTimeMillis();
if (record != null) {
String model = CanonicalFormat.getSimpleModelName(record);
long parseDuration = parseComplete - begin;
ManagedPipeline stats = jmx.pipeline(new ManagedPipelineName(format, model));
stats.accumulate(name, parseDuration);
ProcessingPipeline workflow = pipelineFactory.getInstance(model);
if(workflow != null) {
workflow.runPipeline(record, stats);
} else {
log.warn("No procesing pipeline is defined for the specified model '" + model + "'.");
throw new PipelineException("No procesing pipeline is defined for the specified model '" + model + "'.");
}
long pipelineComplete = System.currentTimeMillis();
stats.total(pipelineComplete - begin);
} else {
break;
}
} catch (PipelineException pe) {
pipelineErrors.add(new PipelineError(parser.getCurrentRecord(), pe.getMessage()));
}
}
// TODO Rethink. This flush is intended to preserve the original bundling of a set
// of json records, so that what comes in in a single message goes back out in a
// single message. This however prevents one from being able to elect to rebundle
// message to optimize size (i.e. if one expects the size to change significantly
// during the pipeline processing). Not sure which is best.
pipelineFactory.flush();
return pipelineErrors;
}
}
| |
/*
* Copyright (c) 2005-2011 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.framework;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.net.MalformedURLException;
import java.sql.Connection;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import junit.framework.ComparisonFailure;
import org.dbunit.Assertion;
import org.dbunit.database.DatabaseConnection;
import org.dbunit.dataset.FilteredDataSet;
import org.dbunit.dataset.IDataSet;
import org.dbunit.dataset.filter.ExcludeTableFilter;
import org.dbunit.dataset.xml.FlatXmlDataSet;
import org.dbunit.dataset.xml.FlatXmlDataSetBuilder;
import org.hibernate.SessionFactory;
import org.hibernate.stat.Statistics;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.runner.RunWith;
import org.mifos.application.master.business.MifosCurrency;
import org.mifos.config.FiscalCalendarRules;
import org.mifos.customers.office.business.OfficeBO;
import org.mifos.customers.office.persistence.OfficePersistence;
import org.mifos.customers.personnel.business.PersonnelBO;
import org.mifos.customers.personnel.persistence.LegacyPersonnelDao;
import org.mifos.customers.personnel.util.helpers.PersonnelConstants;
import org.mifos.framework.exceptions.PersistenceException;
import org.mifos.framework.hibernate.helper.AuditInterceptorFactory;
import org.mifos.framework.hibernate.helper.DatabaseDependentTest;
import org.mifos.framework.hibernate.helper.StaticHibernateUtil;
import org.mifos.framework.util.ConfigurationLocator;
import org.mifos.framework.util.StandardTestingService;
import org.mifos.framework.util.helpers.FilePaths;
import org.mifos.framework.util.helpers.Money;
import org.mifos.framework.util.helpers.TestCaseInitializer;
import org.mifos.framework.util.helpers.TestObjectFactory;
import org.mifos.service.test.TestMode;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.util.Log4jConfigurer;
/**
* All classes extending this class must be names as <b>*IntegrationTest.java</b> to support maven-surefire-plugin autofind
* feature.
* <br />
* <br />
* This base class initializes the database and various other things and so any class derived from this is an
* integration test. If a test is not an integration test and does not need the database, then it should not derive from
* this class.
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "/integration-test-context.xml",
"/org/mifos/config/resources/applicationContext.xml",
"/org/mifos/config/resources/apponly-services.xml",
"classpath*:META-INF/spring/DbUpgradeContext.xml"})
public class MifosIntegrationTestCase {
private static Boolean isTestingModeSet = false;
private static IDataSet latestDataDump;
@Autowired
protected LegacyPersonnelDao legacyPersonnelDao;
@Autowired
protected SessionFactory sessionFactory;
/**
* This is a switch to enable verification of database (cleanup) at the end of an integration tests. i.e. if a test
* leaves database in dirty state (not rolling back properly) then it will force it to fail. This is used for
* figuring out which test leaving database in inconsistent state for an another test causing it to fail.
*/
protected static boolean verifyDatabaseState;
protected static ExcludeTableFilter excludeTables = new ExcludeTableFilter();
private static String savedFiscalCalendarRulesWorkingDays;
@BeforeClass
public static void init() throws Exception {
Log4jConfigurer.initLogging(new ConfigurationLocator().getFilePath(FilePaths.LOG_CONFIGURATION_FILE));
verifyDatabaseState = false;
if (!isTestingModeSet) {
new StandardTestingService().setTestMode(TestMode.INTEGRATION);
isTestingModeSet = true;
}
}
@Before
public void before() throws Exception {
new TestCaseInitializer().initialize(sessionFactory);
dbVerificationSetUp();
DatabaseDependentTest.before(new AuditInterceptorFactory(), sessionFactory);
Money.setDefaultCurrency(TestUtils.RUPEE);
}
@After
public void after() throws Exception {
diableCustomWorkingDays();
TestUtils.dereferenceObjects(this);
DatabaseDependentTest.after(new AuditInterceptorFactory(), sessionFactory);
dbVerificationTearDown();
}
private Statistics statisticsService;
protected void assertEquals(String s, Money one, Money two) {
if (one.equals(two)) {
return;
}
throw new ComparisonFailure(s, one.toString(), two.toString());
}
protected Date getDate(String date) throws ParseException {
SimpleDateFormat format = new SimpleDateFormat("dd/MM/yyyy");
return format.parse(date);
}
protected Statistics getStatisticsService() {
return this.statisticsService;
}
protected void setStatisticsService(Statistics service) {
this.statisticsService = service;
}
protected void initializeStatisticsService() {
statisticsService = StaticHibernateUtil.getSessionFactory().getStatistics();
statisticsService.setStatisticsEnabled(true);
}
/*
* Gets the test data office with office_id == 1
*/
protected OfficeBO getHeadOffice() {
try {
return new OfficePersistence().getOffice(TestObjectFactory.HEAD_OFFICE);
} catch (PersistenceException e) {
throw new RuntimeException(e);
}
}
/*
* Gets the test data office with office_id == 3
*/
protected OfficeBO getBranchOffice() {
try {
return new OfficePersistence().getOffice(TestObjectFactory.SAMPLE_BRANCH_OFFICE);
} catch (PersistenceException e) {
throw new RuntimeException(e);
}
}
/*
* Gets the test data user personnel_id == 1
*/
protected PersonnelBO getSystemUser() {
try {
return legacyPersonnelDao.getPersonnel(PersonnelConstants.SYSTEM_USER);
} catch (PersistenceException e) {
throw new RuntimeException(e);
}
}
/*
* Gets the test data user personnel_id == 3
*/
protected PersonnelBO getTestUser() {
try {
return legacyPersonnelDao.getPersonnel(PersonnelConstants.TEST_USER);
} catch (PersistenceException e) {
throw new RuntimeException(e);
}
}
/**
* see MIFOS-2659 <br><br>
* This will be disabled automatically at the end of a test case
*
*/
protected static void enableCustomWorkingDays() {
savedFiscalCalendarRulesWorkingDays = new FiscalCalendarRules().getWorkingDaysAsString();
new FiscalCalendarRules().setWorkingDays("MONDAY,TUESDAY,WEDNESDAY,THURSDAY,FRIDAY,SATURDAY,SUNDAY");
}
/**
* see MIFOS-2659
*/
private static void diableCustomWorkingDays() {
if(savedFiscalCalendarRulesWorkingDays != null) {
new FiscalCalendarRules().setWorkingDays(savedFiscalCalendarRulesWorkingDays);
}
savedFiscalCalendarRulesWorkingDays = null;
}
private void dbVerificationSetUp() throws Exception {
if (verifyDatabaseState) {
excludeTables.excludeTable("BATCH_JOB_EXECUTION");
Connection connection = StaticHibernateUtil.getSessionTL().connection();
connection.setAutoCommit(false);
DatabaseConnection dbUnitConnection = new DatabaseConnection(connection);
latestDataDump = new FilteredDataSet(excludeTables, dbUnitConnection.createDataSet());
String tmpDir = System.getProperty("java.io.tmpdir") + System.getProperty("file.separator");
FlatXmlDataSet.write(latestDataDump, new FileOutputStream(tmpDir + "latestDataDump.xml"));
FlatXmlDataSetBuilder fxmlBuilder = new FlatXmlDataSetBuilder();
latestDataDump = fxmlBuilder.build(new File(tmpDir + "latestDataDump.xml"));
}
}
private void dbVerificationTearDown() throws Exception,
FileNotFoundException, MalformedURLException {
if (verifyDatabaseState) {
Connection connection = StaticHibernateUtil.getSessionTL().connection();
connection.setAutoCommit(false);
DatabaseConnection dbUnitConnection = new DatabaseConnection(connection);
IDataSet upgradeDataDump = new FilteredDataSet(excludeTables, dbUnitConnection.createDataSet());
String tmpDir = System.getProperty("java.io.tmpdir") + System.getProperty("file.separator");
FlatXmlDataSet.write(upgradeDataDump, new FileOutputStream(tmpDir + "upgradeDataDump.xml"));
FlatXmlDataSetBuilder fxmlBuilder = new FlatXmlDataSetBuilder();
upgradeDataDump = fxmlBuilder.build(new File(tmpDir + "upgradeDataDump.xml"));
Assertion.assertEquals(latestDataDump, upgradeDataDump);
}
}
public MifosCurrency getCurrency() {
// TODO: will be replaced by a better way to get currency for integration tests
// NOTE: TestObjectFactory.getCurrency also exists
return Money.getDefaultCurrency();
}
}
| |
package no.ssb.vtl.script.operations;
/*-
* ========================LICENSE_START=================================
* Java VTL
* %%
* Copyright (C) 2016 - 2018 Hadrien Kohl
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =========================LICENSE_END==================================
*/
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.assertj.core.api.Assertions.assertThat;
public class ForwardingStreamTest {
@Test
public void filter() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.filter(integer -> integer > 2)).containsExactly(3);
}
@Test
public void map() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.map(integer -> integer * 2)).containsExactly(2, 4, 6);
}
@Test
public void mapToInt() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.mapToInt(integer -> integer).sum()).isEqualTo(6);
}
@Test
public void mapToLong() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.mapToLong(integer -> integer).sum()).isEqualTo(6);
}
@Test
public void mapToDouble() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.mapToDouble(integer -> integer * 0.5).sum()).isEqualTo(3);
}
@Test
public void flatMap() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.flatMap(integer -> Collections.nCopies(integer, integer).stream())).containsExactly(
1, 2, 2, 3, 3, 3
);
}
@Test
public void flatMapToInt() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.flatMapToInt(
integer -> Collections.nCopies(integer, integer).stream().mapToInt(value -> value)).sum()).isEqualTo(
14
);
}
@Test
public void flatMapToLong() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.flatMapToLong(
integer -> Collections.nCopies(integer, integer).stream().mapToLong(value -> value)).sum()).isEqualTo(
14
);
}
@Test
public void flatMapToDouble() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.flatMapToDouble(
integer -> Collections.nCopies(integer, integer).stream().mapToDouble(value -> value * 0.5)).sum()).isEqualTo(
7
);
}
@Test
public void distinct() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 1, 2, 3);
assertThat(forwardedStream.distinct()).containsExactly(
1, 2, 3
);
}
private ForwardingStream<Integer> forwardedStreamOf(Integer... integers) {
Stream<Integer> stream = Stream.of(integers);
return new ForwardingStream<Integer>() {
@Override
protected Stream<Integer> delegate() {
return stream;
}
};
}
@Test
public void sorted() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(3, 2, 1);
assertThat(forwardedStream.sorted()).containsExactly(
1, 2, 3
);
}
@Test
public void peek() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
AtomicInteger integer = new AtomicInteger(0);
Stream<Integer> peekedStream = forwardedStream.peek(integer::getAndAdd);
assertThat(integer.get()).isEqualTo(0);
assertThat(peekedStream).containsExactly(1, 2, 3);
assertThat(integer.get()).isEqualTo(6);
}
@Test
public void limit() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.limit(2)).containsExactly(1, 2);
}
@Test
public void skip() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.skip(2)).containsExactly(3);
}
@Test
public void forEach() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
AtomicInteger integer = new AtomicInteger(0);
forwardedStream.forEach(integer::getAndAdd);
assertThat(integer.get()).isEqualTo(6);
}
@Test
public void forEachOrdered() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
AtomicInteger integer = new AtomicInteger(0);
forwardedStream.forEachOrdered(integer::getAndAdd);
assertThat(integer.get()).isEqualTo(6);
}
@Test
public void toArray() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.toArray()).isEqualTo(new Object[]{
1, 2, 3
});
}
@Test
public void toArray1() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
Integer[] dest = new Integer[3];
assertThat(forwardedStream.toArray(value -> dest)).isEqualTo(new Object[]{
1, 2, 3
});
assertThat(dest).isEqualTo(new Object[]{
1, 2, 3
});
}
@Test
public void reduce() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.reduce(-1, (a, b) -> a + b)).isEqualTo(5);
}
@Test
public void reduce1() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.reduce((a, b) -> a + b)).contains(6);
}
@Test
public void reduce2() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.reduce(-1, (a, b) -> a * b, (a, b) -> a + b)).isEqualTo(-6);
}
@Test
public void collect() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.collect(Collectors.toList())).containsExactly(
1, 2, 3
);
}
@Test
public void collect1() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.collect(
(Supplier<ArrayList<Object>>) ArrayList::new,
ArrayList::add,
ArrayList::addAll))
.containsExactly(
1, 2, 3
);
}
@Test
public void min() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.min(Integer::compareTo)).contains(1);
}
@Test
public void max() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.max(Integer::compareTo)).contains(3);
}
@Test
public void count() {
ForwardingStream<Integer> forwardedStream = forwardedStreamOf(1, 2, 3);
assertThat(forwardedStream.count()).isEqualTo(3);
}
@Test
public void anyMatch() {
assertThat(forwardedStreamOf(1, 2, 3).anyMatch(integer -> integer.equals(1))).isTrue();
assertThat(forwardedStreamOf(1, 2, 3).anyMatch(integer -> integer.equals(-1))).isFalse();
}
@Test
public void allMatch() {
assertThat(forwardedStreamOf(1, 2, 3).allMatch(integer -> integer > 0)).isTrue();
assertThat(forwardedStreamOf(1, 2, 3).allMatch(integer -> integer < 0)).isFalse();
}
@Test
public void noneMatch() {
assertThat(forwardedStreamOf(1, 2, 3).noneMatch(integer -> integer == 0)).isTrue();
assertThat(forwardedStreamOf(1, 2, 3).noneMatch(integer -> integer != 0)).isFalse();
}
@Test
public void findFirst() {
ForwardingStream<Integer> forwardedStream = new ForwardingStream<Integer>() {
@Override
protected Stream<Integer> delegate() {
return Stream.of(1, 2, 3);
}
};
assertThat(forwardedStream.findFirst()).contains(1);
}
@Test
public void findAny() {
ForwardingStream<Integer> forwardedStream = new ForwardingStream<Integer>() {
@Override
protected Stream<Integer> delegate() {
return Stream.of(1, 2, 3);
}
};
assertThat(forwardedStream.findAny()).isPresent();
}
@Test
public void iterator() {
ForwardingStream<Integer> forwardedStream = new ForwardingStream<Integer>() {
@Override
protected Stream<Integer> delegate() {
return Stream.of(1, 2, 3);
}
};
assertThat(forwardedStream.iterator()).toIterable().containsExactly(
1, 2, 3
);
}
@Test
public void spliterator() {
ForwardingStream<Integer> forwardedStream = new ForwardingStream<Integer>() {
@Override
protected Stream<Integer> delegate() {
return Stream.of(1, 2, 3);
}
};
AtomicInteger integer = new AtomicInteger(0);
forwardedStream.spliterator().forEachRemaining(integer::getAndAdd);
assertThat(integer.get()).isEqualTo(6);
}
@Test
public void isParallel() {
ForwardingStream<Integer> forwardedStream = new ForwardingStream<Integer>() {
@Override
protected Stream<Integer> delegate() {
return Stream.of(1, 2, 3).parallel();
}
};
assertThat(forwardedStream.isParallel()).isTrue();
}
@Test
public void sequential() {
ForwardingStream<Integer> forwardedStream = new ForwardingStream<Integer>() {
@Override
protected Stream<Integer> delegate() {
return Stream.of(1, 2, 3).parallel();
}
};
assertThat(forwardedStream.sequential().isParallel()).isFalse();
}
@Test
public void parallel() {
ForwardingStream<Integer> forwardedStream = new ForwardingStream<Integer>() {
@Override
protected Stream<Integer> delegate() {
return Stream.of(1, 2, 3).sequential();
}
};
assertThat(forwardedStream.parallel().isParallel()).isTrue();
}
@Test
public void unordered() {
ForwardingStream<Integer> forwardedStream = new ForwardingStream<Integer>() {
@Override
protected Stream<Integer> delegate() {
return Stream.of(1, 2, 3).sequential();
}
};
assertThat(forwardedStream.unordered()).containsExactlyInAnyOrder(
1, 2, 3
);
}
@Test
public void onClose() {
ForwardingStream<Integer> forwardedStream = new ForwardingStream<Integer>() {
@Override
protected Stream<Integer> delegate() {
return Stream.of(1, 2, 3).sequential();
}
};
AtomicBoolean closed = new AtomicBoolean(false);
Stream<Integer> onClose = forwardedStream.onClose(() -> closed.set(true));
assertThat(closed.get()).isFalse();
onClose.close();
assertThat(closed.get()).isTrue();
}
}
| |
package org.apache.maven.settings.validation;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.List;
import junit.framework.TestCase;
import org.apache.maven.settings.Mirror;
import org.apache.maven.settings.Profile;
import org.apache.maven.settings.Repository;
import org.apache.maven.settings.Server;
import org.apache.maven.settings.Settings;
import org.apache.maven.settings.building.SettingsProblemCollector;
import org.apache.maven.settings.building.SettingsProblem.Severity;
/**
* @author mkleint
*/
public class DefaultSettingsValidatorTest
extends TestCase
{
private DefaultSettingsValidator validator;
protected void setUp()
throws Exception
{
super.setUp();
validator = new DefaultSettingsValidator();
}
protected void tearDown()
throws Exception
{
validator = null;
super.tearDown();
}
private void assertContains( String msg, String substring )
{
assertTrue( "\"" + substring + "\" was not found in: " + msg, msg.contains( substring ) );
}
public void testValidate()
{
Settings model = new Settings();
Profile prof = new Profile();
prof.setId( "xxx" );
model.addProfile( prof );
SimpleProblemCollector problems = new SimpleProblemCollector();
validator.validate( model, problems );
assertEquals( 0, problems.messages.size() );
Repository repo = new Repository();
prof.addRepository( repo );
problems = new SimpleProblemCollector();
validator.validate( model, problems );
assertEquals( 2, problems.messages.size() );
repo.setUrl( "http://xxx.xxx.com" );
problems = new SimpleProblemCollector();
validator.validate( model, problems );
assertEquals( 1, problems.messages.size() );
repo.setId( "xxx" );
problems = new SimpleProblemCollector();
validator.validate( model, problems );
assertEquals( 0, problems.messages.size() );
}
public void testValidateMirror()
throws Exception
{
Settings settings = new Settings();
Mirror mirror = new Mirror();
mirror.setId( "local" );
settings.addMirror( mirror );
mirror = new Mirror();
mirror.setId( "illegal\\:/chars" );
mirror.setUrl( "http://void" );
mirror.setMirrorOf( "void" );
settings.addMirror( mirror );
SimpleProblemCollector problems = new SimpleProblemCollector();
validator.validate( settings, problems );
assertEquals( 4, problems.messages.size() );
assertContains( problems.messages.get( 0 ), "'mirrors.mirror.id' must not be 'local'" );
assertContains( problems.messages.get( 1 ), "'mirrors.mirror.url' for local is missing" );
assertContains( problems.messages.get( 2 ), "'mirrors.mirror.mirrorOf' for local is missing" );
assertContains( problems.messages.get( 3 ), "'mirrors.mirror.id' must not contain any of these characters" );
}
public void testValidateRepository()
throws Exception
{
Profile profile = new Profile();
Repository repo = new Repository();
repo.setId( "local" );
profile.addRepository( repo );
repo = new Repository();
repo.setId( "illegal\\:/chars" );
repo.setUrl( "http://void" );
profile.addRepository( repo );
Settings settings = new Settings();
settings.addProfile( profile );
SimpleProblemCollector problems = new SimpleProblemCollector();
validator.validate( settings, problems );
assertEquals( 3, problems.messages.size() );
assertContains( problems.messages.get( 0 ),
"'profiles.profile[default].repositories.repository.id' must not be 'local'" );
assertContains( problems.messages.get( 1 ),
"'profiles.profile[default].repositories.repository.url' for local is missing" );
assertContains( problems.messages.get( 2 ),
"'profiles.profile[default].repositories.repository.id' must not contain any of these characters" );
}
public void testValidateUniqueServerId()
throws Exception
{
Settings settings = new Settings();
Server server1 = new Server();
server1.setId( "test" );
settings.addServer( server1 );
Server server2 = new Server();
server2.setId( "test" );
settings.addServer( server2 );
SimpleProblemCollector problems = new SimpleProblemCollector();
validator.validate( settings, problems );
assertEquals( 1, problems.messages.size() );
assertContains( problems.messages.get( 0 ),
"'servers.server.id' must be unique but found duplicate server with id test" );
}
public void testValidateUniqueProfileId()
throws Exception
{
Settings settings = new Settings();
Profile profile1 = new Profile();
profile1.setId( "test" );
settings.addProfile( profile1 );
Profile profile2 = new Profile();
profile2.setId( "test" );
settings.addProfile( profile2 );
SimpleProblemCollector problems = new SimpleProblemCollector();
validator.validate( settings, problems );
assertEquals( 1, problems.messages.size() );
assertContains( problems.messages.get( 0 ),
"'profiles.profile.id' must be unique but found duplicate profile with id test" );
}
public void testValidateUniqueRepositoryId()
throws Exception
{
Settings settings = new Settings();
Profile profile = new Profile();
profile.setId( "pro" );
settings.addProfile( profile );
Repository repo1 = new Repository();
repo1.setUrl( "http://apache.org/" );
repo1.setId( "test" );
profile.addRepository( repo1 );
Repository repo2 = new Repository();
repo2.setUrl( "http://apache.org/" );
repo2.setId( "test" );
profile.addRepository( repo2 );
SimpleProblemCollector problems = new SimpleProblemCollector();
validator.validate( settings, problems );
assertEquals( 1, problems.messages.size() );
assertContains( problems.messages.get( 0 ), "'profiles.profile[pro].repositories.repository.id' must be unique"
+ " but found duplicate repository with id test" );
}
private static class SimpleProblemCollector
implements SettingsProblemCollector
{
public List<String> messages = new ArrayList<String>();
public void add( Severity severity, String message, int line, int column, Exception cause )
{
messages.add( message );
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.base.Preconditions;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
import org.apache.hadoop.hdfs.protocol.SnapshotException;
import org.apache.hadoop.hdfs.server.namenode.FSDirectory.DirOp;
import java.io.IOException;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.List;
import static org.apache.hadoop.util.Time.now;
/**
* Restrictions for a concat operation:
* <pre>
* 1. the src file and the target file are in the same dir
* 2. all the source files are not in snapshot
* 3. any source file cannot be the same with the target file
* 4. source files cannot be under construction or empty
* 5. source file's preferred block size cannot be greater than the target file
* </pre>
*/
class FSDirConcatOp {
static FileStatus concat(FSDirectory fsd, FSPermissionChecker pc,
String target, String[] srcs, boolean logRetryCache) throws IOException {
validatePath(target, srcs);
assert srcs != null;
if (FSDirectory.LOG.isDebugEnabled()) {
FSDirectory.LOG.debug("concat {} to {}", Arrays.toString(srcs), target);
}
final INodesInPath targetIIP = fsd.resolvePath(pc, target, DirOp.WRITE);
// write permission for the target
if (fsd.isPermissionEnabled()) {
fsd.checkPathAccess(pc, targetIIP, FsAction.WRITE);
}
// check the target
verifyTargetFile(fsd, target, targetIIP);
// check the srcs
INodeFile[] srcFiles = verifySrcFiles(fsd, srcs, targetIIP, pc);
if(NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* NameSystem.concat: " +
Arrays.toString(srcs) + " to " + target);
}
long timestamp = now();
fsd.writeLock();
try {
unprotectedConcat(fsd, targetIIP, srcFiles, timestamp);
} finally {
fsd.writeUnlock();
}
fsd.getEditLog().logConcat(target, srcs, timestamp, logRetryCache);
return fsd.getAuditFileInfo(targetIIP);
}
private static void validatePath(String target, String[] srcs)
throws IOException {
Preconditions.checkArgument(!target.isEmpty(), "Target file name is empty");
Preconditions.checkArgument(srcs != null && srcs.length > 0,
"No sources given");
if (FSDirectory.isReservedRawName(target)
|| FSDirectory.isReservedInodesName(target)) {
throw new IOException("Concat operation doesn't support "
+ FSDirectory.DOT_RESERVED_STRING + " relative path : " + target);
}
for (String srcPath : srcs) {
if (FSDirectory.isReservedRawName(srcPath)
|| FSDirectory.isReservedInodesName(srcPath)) {
throw new IOException("Concat operation doesn't support "
+ FSDirectory.DOT_RESERVED_STRING + " relative path : " + srcPath);
}
}
}
private static void verifyTargetFile(FSDirectory fsd, final String target,
final INodesInPath targetIIP) throws IOException {
// check the target
if (FSDirEncryptionZoneOp.getEZForPath(fsd, targetIIP) != null) {
throw new HadoopIllegalArgumentException(
"concat can not be called for files in an encryption zone.");
}
final INodeFile targetINode = INodeFile.valueOf(targetIIP.getLastINode(),
target);
if(targetINode.isUnderConstruction()) {
throw new HadoopIllegalArgumentException("concat: target file "
+ target + " is under construction");
}
}
private static INodeFile[] verifySrcFiles(FSDirectory fsd, String[] srcs,
INodesInPath targetIIP, FSPermissionChecker pc) throws IOException {
// to make sure no two files are the same
Set<INodeFile> si = new LinkedHashSet<>();
final INodeFile targetINode = targetIIP.getLastINode().asFile();
final INodeDirectory targetParent = targetINode.getParent();
// now check the srcs
for(String src : srcs) {
final INodesInPath iip = fsd.resolvePath(pc, src, DirOp.WRITE);
// permission check for srcs
if (pc != null) {
fsd.checkPathAccess(pc, iip, FsAction.READ); // read the file
fsd.checkParentAccess(pc, iip, FsAction.WRITE); // for delete
}
final INode srcINode = iip.getLastINode();
final INodeFile srcINodeFile = INodeFile.valueOf(srcINode, src);
// make sure the src file and the target file are in the same dir
if (srcINodeFile.getParent() != targetParent) {
throw new HadoopIllegalArgumentException("Source file " + src
+ " is not in the same directory with the target "
+ targetIIP.getPath());
}
// make sure all the source files are not in snapshot
if (srcINode.isInLatestSnapshot(iip.getLatestSnapshotId())) {
throw new SnapshotException("Concat: the source file " + src
+ " is in snapshot");
}
// check if the file has other references.
if (srcINode.isReference() && ((INodeReference.WithCount)
srcINode.asReference().getReferredINode()).getReferenceCount() > 1) {
throw new SnapshotException("Concat: the source file " + src
+ " is referred by some other reference in some snapshot.");
}
// source file cannot be the same with the target file
if (srcINode == targetINode) {
throw new HadoopIllegalArgumentException("concat: the src file " + src
+ " is the same with the target file " + targetIIP.getPath());
}
// source file cannot be under construction or empty
if(srcINodeFile.isUnderConstruction() || srcINodeFile.numBlocks() == 0) {
throw new HadoopIllegalArgumentException("concat: source file " + src
+ " is invalid or empty or underConstruction");
}
// source file's preferred block size cannot be greater than the target
// file
if (srcINodeFile.getPreferredBlockSize() >
targetINode.getPreferredBlockSize()) {
throw new HadoopIllegalArgumentException("concat: source file " + src
+ " has preferred block size " + srcINodeFile.getPreferredBlockSize()
+ " which is greater than the target file's preferred block size "
+ targetINode.getPreferredBlockSize());
}
if(srcINodeFile.getErasureCodingPolicyID() !=
targetINode.getErasureCodingPolicyID()) {
throw new HadoopIllegalArgumentException("Source file " + src
+ " and target file " + targetIIP.getPath()
+ " have different erasure coding policy");
}
si.add(srcINodeFile);
}
// make sure no two files are the same
if(si.size() < srcs.length) {
// it means at least two files are the same
throw new HadoopIllegalArgumentException(
"concat: at least two of the source files are the same");
}
return si.toArray(new INodeFile[si.size()]);
}
private static QuotaCounts computeQuotaDeltas(FSDirectory fsd,
INodeFile target, INodeFile[] srcList) {
QuotaCounts deltas = new QuotaCounts.Builder().build();
final short targetRepl = target.getPreferredBlockReplication();
for (INodeFile src : srcList) {
short srcRepl = src.getFileReplication();
long fileSize = src.computeFileSize();
if (targetRepl != srcRepl) {
deltas.addStorageSpace(fileSize * (targetRepl - srcRepl));
BlockStoragePolicy bsp =
fsd.getBlockStoragePolicySuite().getPolicy(src.getStoragePolicyID());
if (bsp != null) {
List<StorageType> srcTypeChosen = bsp.chooseStorageTypes(srcRepl);
for (StorageType t : srcTypeChosen) {
if (t.supportTypeQuota()) {
deltas.addTypeSpace(t, -fileSize);
}
}
List<StorageType> targetTypeChosen = bsp.chooseStorageTypes(targetRepl);
for (StorageType t : targetTypeChosen) {
if (t.supportTypeQuota()) {
deltas.addTypeSpace(t, fileSize);
}
}
}
}
}
deltas.addNameSpace(-srcList.length);
return deltas;
}
private static void verifyQuota(FSDirectory fsd, INodesInPath targetIIP,
QuotaCounts deltas) throws QuotaExceededException {
if (!fsd.getFSNamesystem().isImageLoaded() || fsd.shouldSkipQuotaChecks()) {
// Do not check quota if editlog is still being processed
return;
}
FSDirectory.verifyQuota(targetIIP, targetIIP.length() - 1, deltas, null);
}
/**
* Concat all the blocks from srcs to trg and delete the srcs files
* @param fsd FSDirectory
*/
static void unprotectedConcat(FSDirectory fsd, INodesInPath targetIIP,
INodeFile[] srcList, long timestamp) throws IOException {
assert fsd.hasWriteLock();
if (NameNode.stateChangeLog.isDebugEnabled()) {
NameNode.stateChangeLog.debug("DIR* FSNamesystem.concat to "
+ targetIIP.getPath());
}
final INodeFile trgInode = targetIIP.getLastINode().asFile();
QuotaCounts deltas = computeQuotaDeltas(fsd, trgInode, srcList);
verifyQuota(fsd, targetIIP, deltas);
// the target file can be included in a snapshot
trgInode.recordModification(targetIIP.getLatestSnapshotId());
INodeDirectory trgParent = targetIIP.getINode(-2).asDirectory();
trgInode.concatBlocks(srcList, fsd.getBlockManager());
// since we are in the same dir - we can use same parent to remove files
int count = 0;
for (INodeFile nodeToRemove : srcList) {
if(nodeToRemove != null) {
nodeToRemove.clearBlocks();
// Ensure the nodeToRemove is cleared from snapshot diff list
nodeToRemove.getParent().removeChild(nodeToRemove,
targetIIP.getLatestSnapshotId());
fsd.getINodeMap().remove(nodeToRemove);
count++;
}
}
trgInode.setModificationTime(timestamp, targetIIP.getLatestSnapshotId());
trgParent.updateModificationTime(timestamp, targetIIP.getLatestSnapshotId());
// update quota on the parent directory with deltas
FSDirectory.unprotectedUpdateCount(targetIIP, targetIIP.length() - 1, deltas);
}
}
| |
package macroutils.creator;
import java.util.ArrayList;
import macroutils.MacroUtils;
import macroutils.StaticDeclarations;
import macroutils.UserDeclarations;
import star.base.neo.DoubleVector;
import star.base.neo.NamedObject;
import star.common.FieldFunction;
import star.common.Region;
import star.common.Simulation;
import star.common.Units;
import star.vis.Annotation;
import star.vis.AnnotationProp;
import star.vis.AnnotationPropManager;
import star.vis.DisplayLocationMode;
import star.vis.Displayer;
import star.vis.DisplayerManager;
import star.vis.FixedAspectAnnotationProp;
import star.vis.PartColorMode;
import star.vis.PartDisplayer;
import star.vis.ReportAnnotation;
import star.vis.ScalarDisplayQuantity;
import star.vis.ScalarDisplayer;
import star.vis.ScalarFillMode;
import star.vis.Scene;
import star.vis.SimpleAnnotation;
import star.vis.StreamDisplayer;
import star.vis.StreamDisplayerMode;
import star.vis.StreamPart;
import star.vis.VectorDisplayMode;
import star.vis.VectorDisplayer;
/**
* Low-level class for creating Scenes with MacroUtils.
*
* @since April of 2016
* @author Fabio Kasper
*/
public class CreateScene {
private MainCreator _add = null;
private macroutils.checker.MainChecker _chk = null;
private macroutils.getter.MainGetter _get = null;
private macroutils.io.MainIO _io = null;
private MacroUtils _mu = null;
private macroutils.setter.MainSetter _set = null;
private Simulation _sim = null;
private macroutils.UserDeclarations _ud = null;
/**
* Main constructor for this class.
*
* @param m given MacroUtils object.
*/
public CreateScene(MacroUtils m) {
_mu = m;
_sim = m.getSimulation();
}
/**
* Creates a Report Annotation in a Scene.
*
* @param scn given Scene.
* @param ra given ReportAnnotation.
* @param height given font height. If 0, it will be ignored.
* @param fmt given string format. E.g.: new double[] {0.5, 0.2, 0}.
* @param pos given 3-components of position. E.g.: new double[] {0.5, 0.2, 0}.
* @return The FixedAspectAnnotationProp within the Scene.
*/
public FixedAspectAnnotationProp annotation(Scene scn, ReportAnnotation ra, double height,
String fmt, double[] pos) {
_creatingAnnot(scn, "Report Annotation", fmt);
ra.setNumberFormat(fmt);
if (scn == null) {
return null;
}
AnnotationPropManager apm = scn.getAnnotationPropManager();
if (apm.hasPropForAnnotation(ra)) {
_io.say.msg(true, "Annotation already exists. Skipping creation...");
return (FixedAspectAnnotationProp) apm.getAnnotationProp(ra.getPresentationName());
}
return _createAnnot(scn, ra, height, pos);
}
/**
* Creates a Text Annotation in a Scene.
*
* @param scn given Scene.
* @param text given text.
* @param height given font height. If 0, it will be ignored.
* @param pos given 3-components of position. E.g.: new double[] {0.5, 0.2, 0}.
* @return The FixedAspectAnnotationProp within the Scene.
*/
public FixedAspectAnnotationProp annotation(Scene scn, String text, double height,
double[] pos) {
_io.say.action("Creating a Simple Annotation text in a Scene", true);
_io.say.object(scn, true);
_io.say.value("String", text, true, true);
String ns = text.replace(" ", "");
if (_sim.getAnnotationManager().has(ns)) {
_io.say.msg(true, "Annotation already exists...");
return (FixedAspectAnnotationProp) scn.getAnnotationPropManager().getAnnotationProp(ns);
}
SimpleAnnotation annot = _sim.getAnnotationManager().createSimpleAnnotation();
annot.setText(text);
annot.setPresentationName(ns);
return _createAnnot(scn, annot, height, pos);
}
/**
* Creates a Time annotation and adds it into a Scene.
*
* @param scn given Scene.
* @param u given time Units.
* @param fmt given time format string.
* @param pos 2-components position array. E.g.: new double[] {0.4, 0.8}
*/
public void annotation_Time(Scene scn, Units u, String fmt, double[] pos) {
annotation(scn, _add.tools.annotation_Time(fmt), 0, fmt, pos);
}
/**
* Adds a new Displayer into a Scene.
*
* @param scn given Scene.
* @param dt given Displayer Type. See {@link macroutils.StaticDeclarations.Displayer} for
* options.
* @param ano given ArrayList of NamedObjects.
* @param ff given Field Function, if applicable.
* @param u given variable Unit, if applicable.
* @return The Displayer.
*/
public Displayer displayer(Scene scn, StaticDeclarations.Displayer dt,
ArrayList<NamedObject> ano, FieldFunction ff, Units u) {
return _createDisplayer(scn, dt, ano, ff, u, true);
}
/**
* Creates a Geometry Displayer containing all Parts in a given Scene.
*
* @param scn given Scene.
* @return The PartDisplayer.
*/
public PartDisplayer displayer_Geometry(Scene scn) {
return _createDisplayer_Part(scn, _getGeometryObjects(), true);
}
/**
* Creates a Geometry Displayer containing the given Parts for a given Scene.
*
* @param scn given Scene.
* @param ano given ArrayList of NamedObjects.
* @return The PartDisplayer.
*/
public PartDisplayer displayer_Geometry(Scene scn, ArrayList<NamedObject> ano) {
return _createDisplayer_Part(scn, ano, true);
}
/**
* Creates a Streamline Displayer for a given Scene and with the given Objects, such as:
* <ul>
* <li> Objects that are 3D in Space will be assigned as Input Parts. E.g.: Regions or Parts;
* <li> Objects that are 2D in Space will be assigned as Seed Parts. E.g.: Boundaries or Part
* Surfaces;
* <li> The Streamline will be based on the Velocity field;
* </ul>
*
* @param scn given Scene.
* @param ano given ArrayList of NamedObjects.
* @param tubeOpt use tubes to represent the Streamlines.
* @return The StreamDisplayer.
*/
public StreamDisplayer displayer_Streamline(Scene scn, ArrayList<NamedObject> ano,
boolean tubeOpt) {
StreamPart sp = _add.derivedPart.streamline_PartSeed(ano);
StreamDisplayer sd = _createDisplayer_Streamline(scn, _get.objects.arrayList(sp),
_getVelocity(), _ud.defUnitVel, true);
sd.setFieldFunction(sp.getFieldFunction().getMagnitudeFunction());
if (tubeOpt) {
sd.setMode(StreamDisplayerMode.TUBES);
sd.setWidth(_ud.postStreamlinesTubesWidth);
}
return sd;
}
/**
* Creates an empty Scene with no Displayers.
*
* @return The Scene.
*/
public Scene empty() {
return _createScene(StaticDeclarations.Scene.EMPTY, new ArrayList<>(), null, null, true);
}
/**
* Creates a Geometry Scene containing all Parts.
*
* @return The Scene.
*/
public Scene geometry() {
return _createScene(StaticDeclarations.Scene.GEOMETRY, new ArrayList<>(), null, null, true);
}
/**
* Creates a Geometry Scene containing the given input Objects.
*
* @param ano given ArrayList of NamedObjects.
* @return The Scene.
*/
public Scene geometry(ArrayList<NamedObject> ano) {
return _createScene(StaticDeclarations.Scene.GEOMETRY, ano, null, null, true);
}
/**
* Creates a Mesh Scene containing all Parts.
*
* @return The Scene.
*/
public Scene mesh() {
return _createScene(StaticDeclarations.Scene.MESH, new ArrayList<>(), null, null, true);
}
/**
* Creates a Mesh Scene containing the given input Objects.
*
* @param ano given ArrayList of NamedObjects.
* @return The Scene.
*/
public Scene mesh(ArrayList<NamedObject> ano) {
return _createScene(StaticDeclarations.Scene.MESH, ano, null, null, true);
}
/**
* Creates a Scalar Scene containing the given input Objects.
*
* @param ano given ArrayList of NamedObjects.
* @param ff given Field Function.
* @param u given variable Unit.
* @param sf Smooth Fill Displayer?
* @return The Scene.
*/
public Scene scalar(ArrayList<NamedObject> ano, FieldFunction ff, Units u, boolean sf) {
Scene scn = _createScene(StaticDeclarations.Scene.SCALAR, _getScalarObjects(ano), ff, u, true);
if (sf) {
ScalarDisplayer sd = (ScalarDisplayer) _get.scenes.displayerByREGEX(scn,
"Scalar.*", false);
sd.setFillMode(ScalarFillMode.NODE_FILLED);
}
return scn;
}
/**
* Creates a Streamline Scene of Seed Part type with the given Objects, such as:
* <ul>
* <li> Objects that are 3D in Space will be assigned as Input Parts. E.g.: Regions or Parts;
* <li> Objects that are 2D in Space will be assigned as Seed Parts. E.g.: Boundaries or Part
* Surfaces;
* <li> The Streamline will be based on the Velocity field;
* <li> For the 3D objects, a Part Displayer will be automatically created using a Geometry
* Representation and with an Opacity of 0.2 and the default Color. See
* {@link UserDeclarations#defColor}.
* </ul>
*
* @param ano given ArrayList of NamedObjects. E.g.: a Region and an Inlet Boundary.
* @param tubeOpt use tubes to represent the Streamlines.
* @return The Scene.
*/
public Scene streamline(ArrayList<NamedObject> ano, boolean tubeOpt) {
ArrayList<NamedObject> asp = new ArrayList<>();
StreamPart sp = _add.derivedPart.streamline_PartSeed(ano);
asp.add(sp);
Scene scn = _createScene(StaticDeclarations.Scene.STREAMLINE, asp, _getVelocity(),
_ud.defUnitVel, tubeOpt);
StreamDisplayer sd = (StreamDisplayer) _mu.get.scenes.displayerByREGEX(scn, ".*", false);
PartDisplayer pd = _createDisplayer_Part(scn, _getInputPartsChildren(sp), true);
pd.setRepresentation(_get.mesh.geometry());
pd.setOpacity(0.2);
pd.setColorMode(PartColorMode.CONSTANT);
pd.setDisplayerColorColor(_ud.defColor);
if (tubeOpt) {
sd.setMode(StreamDisplayerMode.TUBES);
sd.setWidth(_ud.postStreamlinesTubesWidth);
}
return scn;
}
/**
* This method is called automatically by {@link MacroUtils}.
*/
public void updateInstances() {
_add = _mu.add;
_chk = _mu.check;
_get = _mu.get;
_io = _mu.io;
_set = _mu.set;
_ud = _mu.userDeclarations;
}
/**
* Creates a Vector Scene containing the given input Objects.
*
* @param ano given ArrayList of NamedObjects.
* @param licOpt Linear Integral Convolution option?
* @return The Scene.
*/
public Scene vector(ArrayList<NamedObject> ano, boolean licOpt) {
Scene scn = _createScene(StaticDeclarations.Scene.VECTOR, ano,
_get.objects.fieldFunction(StaticDeclarations.Vars.VEL), _ud.defUnitVel, true);
if (licOpt) {
VectorDisplayer vd = (VectorDisplayer) _get.scenes.displayerByREGEX(scn,
"Vector", false);
vd.setDisplayMode(VectorDisplayMode.VECTOR_DISPLAY_MODE_LIC);
}
return scn;
}
private FixedAspectAnnotationProp _createAnnot(Scene scn, Annotation an, double h,
double[] pos) {
an.setFont(StaticDeclarations.Fonts.DEFAULT.getFont());
if (h != 0.) {
an.setDefaultHeight(h);
_io.say.value("Height", an.getDefaultHeight(), true);
}
an.setDefaultPosition(new DoubleVector(pos));
_io.say.value("Position", an.getDefaultPosition(), true);
_io.say.object(an, true);
_io.say.ok(true);
scn.getAnnotationPropManager().getAnnotationGroup().add(an);
return (FixedAspectAnnotationProp) scn.getAnnotationPropManager().getAnnotationProp(an);
}
private Displayer _createDisplayer(Scene scn, StaticDeclarations.Displayer type,
ArrayList<NamedObject> ano, FieldFunction ff, Units u, boolean vo) {
_creatingDisplayer(null, ano, ff, u, vo);
switch (type) {
case GEOMETRY:
return _createDisplayer_Part(scn, ano, vo);
case SCALAR:
return _createDisplayer_Scalar(scn, ano, ff, u, vo);
case STREAMLINE:
return _createDisplayer_Streamline(scn, ano, ff, u, vo);
case VECTOR:
return _createDisplayer_Vector(scn, ano, ff, u, vo);
default:
return null;
}
}
private PartDisplayer _createDisplayer_Part(Scene scn, ArrayList<NamedObject> ano, boolean vo) {
PartDisplayer pd = _getDM(scn)
.createPartDisplayer(StaticDeclarations.Displayer.GEOMETRY.getType());
pd.initialize();
pd.setColorMode(PartColorMode.DP);
pd.setOutline(false);
pd.setSurface(true);
pd.addParts(ano);
_createdDisplayer(pd, ano, vo);
return pd;
}
private ScalarDisplayer _createDisplayer_Scalar(Scene scn, ArrayList<NamedObject> ano,
FieldFunction ff, Units u, boolean vo) {
ScalarDisplayer sd = _getDM(scn)
.createScalarDisplayer(StaticDeclarations.Displayer.SCALAR.getType());
sd.initialize();
_setSDQ(sd.getScalarDisplayQuantity(), ff);
if (u != null) {
sd.getScalarDisplayQuantity().setUnits(u);
}
sd.addParts(ano);
_createdDisplayer(sd, ano, vo);
return sd;
}
private StreamDisplayer _createDisplayer_Streamline(Scene scn, ArrayList<NamedObject> ano,
FieldFunction ff, Units u, boolean vo) {
StreamDisplayer sd = _getDM(scn)
.createStreamDisplayer(StaticDeclarations.Displayer.STREAMLINE.getType());
sd.initialize();
_setSDQ(sd.getScalarDisplayQuantity(), ff);
if (u != null) {
sd.getScalarDisplayQuantity().setUnits(u);
}
//-- Here, ano is a collection of StreamPart's.
sd.addParts(ano);
_createdDisplayer(sd, ano, vo);
return sd;
}
private VectorDisplayer _createDisplayer_Vector(Scene scn, ArrayList<NamedObject> ano,
FieldFunction ff, Units u, boolean vo) {
VectorDisplayer vd = _getDM(scn).createVectorDisplayer("Vector");
vd.initialize();
if (!_chk.is.vector(ff)) {
_io.say.value("Field Function is not a Vector. Type",
ff.getType().getSelected().name(), true, true);
}
vd.getVectorDisplayQuantity().setFieldFunction(ff);
if (u != null) {
vd.getVectorDisplayQuantity().setUnits(u);
vd.getVectorDisplayQuantity().getMinimumValue().setUnits(u);
vd.getVectorDisplayQuantity().getMaximumValue().setUnits(u);
}
vd.addParts(ano);
_createdDisplayer(vd, ano, vo);
return vd;
}
private Scene _createScene(StaticDeclarations.Scene type, ArrayList<NamedObject> ano,
FieldFunction ff, Units u, boolean vo) {
Scene scn = _initScene(type, vo);
if (ano.isEmpty()) {
ano.addAll(_getGeometryObjects());
}
switch (type) {
case EMPTY:
break;
case GEOMETRY:
PartDisplayer geometry = _add.scene._createDisplayer_Part(scn, ano, vo);
geometry.setRepresentation(_get.mesh.latestSurfaceRepresentation());
break;
case MESH:
PartDisplayer mesh = _createDisplayer_Part(scn, ano, vo);
mesh.setMesh(true);
break;
case SCALAR:
_createDisplayer_Scalar(scn, ano, ff, u, vo);
break;
case STREAMLINE:
_createDisplayer_Streamline(scn, ano, ff, u, vo);
break;
case VECTOR:
_createDisplayer_Vector(scn, ano, ff, u, vo);
break;
}
_finalizeScene(scn, vo);
return scn;
}
private void _createdDisplayer(Displayer d, ArrayList<NamedObject> ano, boolean vo) {
d.setPresentationName(d.getPresentationName().split(" ")[0]);
_io.say.objects(ano, "Objects in Displayer", vo);
_io.say.created(d, vo);
}
private void _creatingAnnot(Scene scn, String what, String fmt) {
_io.say.action(String.format("Creating a %s in a Scene", what), true);
_io.say.object(scn, true);
_io.say.value("Format", fmt, false, true);
}
private void _creatingDisplayer(Displayer d, ArrayList<NamedObject> ano,
FieldFunction ff, Units u, boolean vo) {
_io.say.action(String.format("Creating a %s Displayer", d.getPresentationName()), vo);
_io.say.objects(ano, "Parts", vo);
if (ff != null) {
_io.say.object(ff, vo);
_io.say.object(u, vo);
}
}
private void _finalizeScene(Scene scn, boolean vo) {
_set.scene.cameraView(scn, _ud.defCamView, vo);
//--
//-- Automatic camera assignment based on Scene name.
_set.scene.cameraView(scn, _get.cameras.byREGEX(scn.getPresentationName(), false), false);
_io.say.created(scn, vo);
}
private DisplayerManager _getDM(Scene scn) {
return scn.getDisplayerManager();
}
private ArrayList<NamedObject> _getGeometryObjects() {
boolean isEmpty = _sim.getRegionManager().isEmpty();
return new ArrayList<>(isEmpty ? _get.partSurfaces.all(false) : _get.boundaries.all(false));
}
private ArrayList<NamedObject> _getInputPartsChildren(StreamPart sp) {
return _get.objects.children(new ArrayList<>(sp.getInputPartsCollection()), false);
}
private ArrayList<NamedObject> _getScalarObjects(ArrayList<NamedObject> ano) {
ArrayList<NamedObject> ano2 = new ArrayList<>();
for (NamedObject no : ano) {
if (no instanceof Region) {
Region r = (Region) no;
// Insert Regions when in 2D.
if (r.getPhysicsContinuum().getModelManager().has("Two Dimensional")) {
return ano;
}
// Insert Boundaries otherwise.
ano2.addAll(r.getBoundaryManager().getBoundaries());
continue;
}
ano2.add(no);
}
return ano2;
}
private FieldFunction _getVelocity() {
return _get.objects.fieldFunction(StaticDeclarations.Vars.VEL.getVar(), false);
}
private Scene _initScene(StaticDeclarations.Scene type, boolean vo) {
_io.say.action("Creating a Scene", vo);
_io.say.value("Type", type.getType(), true, vo);
Scene scn = _sim.getSceneManager().createScene();
scn.setPresentationName(type.getType());
((PartDisplayer) scn.getCreatorDisplayer()).initialize();
scn.initializeAndWait();
scn.resetCamera();
_get.objects.hardcopyProperties(scn, false).setUseCurrentResolution(false);
scn.setDepthPeel(false);
AnnotationProp ap = scn.getAnnotationPropManager().getAnnotationProp("Logo");
((FixedAspectAnnotationProp) ap).setLocation(DisplayLocationMode.FOREGROUND);
return scn;
}
private void _setSDQ(ScalarDisplayQuantity sdq, FieldFunction ff) {
if (_chk.is.vector(ff) || _chk.is.position(ff)) {
sdq.setFieldFunction(ff.getMagnitudeFunction());
return;
}
sdq.setFieldFunction(ff);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import org.apache.ignite.*;
import org.apache.ignite.cache.store.*;
import org.apache.ignite.configuration.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.processors.cache.store.*;
import org.apache.ignite.internal.processors.cache.transactions.*;
import org.apache.ignite.internal.processors.cache.version.*;
import org.apache.ignite.internal.util.*;
import org.apache.ignite.internal.util.lang.*;
import org.apache.ignite.internal.util.tostring.*;
import org.apache.ignite.internal.util.typedef.*;
import org.apache.ignite.internal.util.typedef.internal.*;
import org.apache.ignite.lang.*;
import org.apache.ignite.lifecycle.*;
import org.apache.ignite.transactions.*;
import org.jetbrains.annotations.*;
import javax.cache.*;
import javax.cache.integration.*;
import java.util.*;
/**
* Store manager.
*/
@SuppressWarnings("AssignmentToCatchBlockParameter")
public class GridCacheStoreManager extends GridCacheManagerAdapter {
/** */
private static final UUID SES_ATTR = UUID.randomUUID();
/** */
private final CacheStore<Object, Object> store;
/** */
private final CacheStore<?, ?> cfgStore;
/** */
private final CacheStoreBalancingWrapper<Object, Object> singleThreadGate;
/** */
private final ThreadLocal<SessionData> sesHolder;
/** */
private final boolean locStore;
/** */
private final boolean writeThrough;
/** */
private boolean convertPortable;
/**
* @param ctx Kernal context.
* @param sesHolders Session holders map to use the same session holder for different managers if they use
* the same store instance.
* @param cfgStore Store provided in configuration.
* @param cfg Cache configuration.
* @throws IgniteCheckedException In case of error.
*/
@SuppressWarnings("unchecked")
public GridCacheStoreManager(
GridKernalContext ctx,
Map<CacheStore, ThreadLocal> sesHolders,
@Nullable CacheStore<Object, Object> cfgStore,
CacheConfiguration cfg
) throws IgniteCheckedException {
this.cfgStore = cfgStore;
store = cacheStoreWrapper(ctx, cfgStore, cfg);
singleThreadGate = store == null ? null : new CacheStoreBalancingWrapper<>(store);
writeThrough = cfg.isWriteThrough();
ThreadLocal<SessionData> sesHolder0 = null;
if (cfgStore != null) {
sesHolder0 = sesHolders.get(cfgStore);
if (sesHolder0 == null) {
ThreadLocalSession locSes = new ThreadLocalSession();
if (ctx.resource().injectStoreSession(cfgStore, locSes)) {
sesHolder0 = locSes.sesHolder;
sesHolders.put(cfgStore, sesHolder0);
}
}
}
sesHolder = sesHolder0;
locStore = U.hasAnnotation(cfgStore, CacheLocalStore.class);
}
/**
* @return {@code True} is write-through is enabled.
*/
public boolean writeThrough() {
return writeThrough;
}
/**
* @return Unwrapped store provided in configuration.
*/
public CacheStore<?, ?> configuredStore() {
return cfgStore;
}
/**
* Creates a wrapped cache store if write-behind cache is configured.
*
* @param ctx Kernal context.
* @param cfgStore Store provided in configuration.
* @param cfg Cache configuration.
* @return Instance if {@link GridCacheWriteBehindStore} if write-behind store is configured,
* or user-defined cache store.
*/
@SuppressWarnings({"unchecked"})
private CacheStore cacheStoreWrapper(GridKernalContext ctx,
@Nullable CacheStore cfgStore,
CacheConfiguration cfg) {
if (cfgStore == null || !cfg.isWriteBehindEnabled())
return cfgStore;
GridCacheWriteBehindStore store = new GridCacheWriteBehindStore(this,
ctx.gridName(),
cfg.getName(),
ctx.log(GridCacheWriteBehindStore.class),
cfgStore);
store.setFlushSize(cfg.getWriteBehindFlushSize());
store.setFlushThreadCount(cfg.getWriteBehindFlushThreadCount());
store.setFlushFrequency(cfg.getWriteBehindFlushFrequency());
store.setBatchSize(cfg.getWriteBehindBatchSize());
return store;
}
/** {@inheritDoc} */
@Override protected void start0() throws IgniteCheckedException {
if (store instanceof LifecycleAware) {
try {
// Avoid second start() call on store in case when near cache is enabled.
if (cctx.config().isWriteBehindEnabled()) {
if (!cctx.isNear())
((LifecycleAware)store).start();
}
}
catch (Exception e) {
throw new IgniteCheckedException("Failed to start cache store: " + e, e);
}
}
convertPortable = !cctx.cacheObjects().keepPortableInStore(cctx.name());
}
/** {@inheritDoc} */
@Override protected void stop0(boolean cancel) {
if (store instanceof LifecycleAware) {
try {
// Avoid second start() call on store in case when near cache is enabled.
if (cctx.config().isWriteBehindEnabled()) {
if (!cctx.isNear())
((LifecycleAware)store).stop();
}
}
catch (Exception e) {
U.error(log(), "Failed to stop cache store.", e);
}
}
}
/**
* @return Convert-portable flag.
*/
public boolean convertPortable() {
return convertPortable;
}
/**
* @param convertPortable Convert-portable flag.
*/
public void convertPortable(boolean convertPortable) {
this.convertPortable = convertPortable;
}
/**
* @return {@code true} If local store is configured.
*/
public boolean isLocalStore() {
return locStore;
}
/**
* @return {@code true} If store configured.
*/
public boolean configured() {
return store != null;
}
/**
* Loads data from persistent store.
*
* @param tx Cache transaction.
* @param key Cache key.
* @return Loaded value, possibly <tt>null</tt>.
* @throws IgniteCheckedException If data loading failed.
*/
@SuppressWarnings("unchecked")
@Nullable public Object loadFromStore(@Nullable IgniteInternalTx tx, KeyCacheObject key)
throws IgniteCheckedException {
return loadFromStore(tx, key, true);
}
/**
* Loads data from persistent store.
*
* @param tx Cache transaction.
* @param key Cache key.
* @param convert Convert flag.
* @return Loaded value, possibly <tt>null</tt>.
* @throws IgniteCheckedException If data loading failed.
*/
@SuppressWarnings("unchecked")
@Nullable private Object loadFromStore(@Nullable IgniteInternalTx tx,
KeyCacheObject key,
boolean convert)
throws IgniteCheckedException {
if (store != null) {
if (key.internal())
// Never load internal keys from store as they are never persisted.
return null;
Object storeKey = key.value(cctx.cacheObjectContext(), false);
if (convertPortable)
storeKey = cctx.unwrapPortableIfNeeded(storeKey, false);
if (log.isDebugEnabled())
log.debug("Loading value from store for key: " + storeKey);
initSession(tx);
boolean thewEx = true;
Object val = null;
try {
val = singleThreadGate.load(storeKey);
thewEx = false;
}
catch (ClassCastException e) {
handleClassCastException(e);
}
catch (CacheLoaderException e) {
throw new IgniteCheckedException(e);
}
catch (Exception e) {
throw new IgniteCheckedException(new CacheLoaderException(e));
}
finally {
endSession(tx, thewEx);
}
if (log.isDebugEnabled())
log.debug("Loaded value from store [key=" + key + ", val=" + val + ']');
if (convert) {
val = convert(val);
return val;
}
else
return val;
}
return null;
}
/**
* @param val Internal value.
* @return User value.
*/
@SuppressWarnings("unchecked")
private Object convert(Object val) {
if (val == null)
return null;
return locStore ? ((IgniteBiTuple<Object, GridCacheVersion>)val).get1() : val;
}
/**
* @return Whether DHT transaction can write to store from DHT.
*/
public boolean writeToStoreFromDht() {
return cctx.config().isWriteBehindEnabled() || locStore;
}
/**
* @param tx Cache transaction.
* @param keys Cache keys.
* @param vis Closure to apply for loaded elements.
* @throws IgniteCheckedException If data loading failed.
*/
public void localStoreLoadAll(@Nullable IgniteInternalTx tx,
Collection<? extends KeyCacheObject> keys,
final GridInClosure3<KeyCacheObject, Object, GridCacheVersion> vis)
throws IgniteCheckedException {
assert store != null;
assert locStore;
loadAllFromStore(tx, keys, null, vis);
}
/**
* Loads data from persistent store.
*
* @param tx Cache transaction.
* @param keys Cache keys.
* @param vis Closure.
* @return {@code True} if there is a persistent storage.
* @throws IgniteCheckedException If data loading failed.
*/
@SuppressWarnings({"unchecked"})
public boolean loadAllFromStore(@Nullable IgniteInternalTx tx,
Collection<? extends KeyCacheObject> keys,
final IgniteBiInClosure<KeyCacheObject, Object> vis) throws IgniteCheckedException {
if (store != null) {
loadAllFromStore(tx, keys, vis, null);
return true;
}
else {
for (KeyCacheObject key : keys)
vis.apply(key, null);
}
return false;
}
/**
* @param tx Cache transaction.
* @param keys Keys to load.
* @param vis Key/value closure (only one of vis or verVis can be specified).
* @param verVis Key/value/version closure (only one of vis or verVis can be specified).
* @throws IgniteCheckedException If failed.
*/
@SuppressWarnings("unchecked")
private void loadAllFromStore(@Nullable IgniteInternalTx tx,
Collection<? extends KeyCacheObject> keys,
@Nullable final IgniteBiInClosure<KeyCacheObject, Object> vis,
@Nullable final GridInClosure3<KeyCacheObject, Object, GridCacheVersion> verVis)
throws IgniteCheckedException {
assert vis != null ^ verVis != null;
assert verVis == null || locStore;
final boolean convert = verVis == null;
if (!keys.isEmpty()) {
if (keys.size() == 1) {
KeyCacheObject key = F.first(keys);
if (convert)
vis.apply(key, loadFromStore(tx, key));
else {
IgniteBiTuple<Object, GridCacheVersion> t =
(IgniteBiTuple<Object, GridCacheVersion>)loadFromStore(tx, key, false);
if (t != null)
verVis.apply(key, t.get1(), t.get2());
}
return;
}
Collection<Object> keys0;
if (convertPortable) {
keys0 = F.viewReadOnly(keys, new C1<KeyCacheObject, Object>() {
@Override public Object apply(KeyCacheObject key) {
return cctx.unwrapPortableIfNeeded(key.value(cctx.cacheObjectContext(), false), false);
}
});
}
else {
keys0 = F.viewReadOnly(keys, new C1<KeyCacheObject, Object>() {
@Override public Object apply(KeyCacheObject key) {
return key.value(cctx.cacheObjectContext(), false);
}
});
}
if (log.isDebugEnabled())
log.debug("Loading values from store for keys: " + keys0);
initSession(tx);
boolean thewEx = true;
try {
IgniteBiInClosure<Object, Object> c = new CI2<Object, Object>() {
@SuppressWarnings("ConstantConditions")
@Override public void apply(Object k, Object val) {
if (convert) {
Object v = convert(val);
vis.apply(cctx.toCacheKeyObject(k), v);
}
else {
IgniteBiTuple<Object, GridCacheVersion> v = (IgniteBiTuple<Object, GridCacheVersion>)val;
if (v != null)
verVis.apply(cctx.toCacheKeyObject(k), v.get1(), v.get2());
}
}
};
if (keys.size() > singleThreadGate.loadAllThreshold()) {
Map<Object, Object> map = store.loadAll(keys0);
if (map != null) {
for (Map.Entry<Object, Object> e : map.entrySet())
c.apply(cctx.toCacheKeyObject(e.getKey()), e.getValue());
}
}
else
singleThreadGate.loadAll(keys0, c);
thewEx = false;
}
catch (ClassCastException e) {
handleClassCastException(e);
}
catch (CacheLoaderException e) {
throw new IgniteCheckedException(e);
}
catch (Exception e) {
throw new IgniteCheckedException(new CacheLoaderException(e));
}
finally {
endSession(tx, thewEx);
}
if (log.isDebugEnabled())
log.debug("Loaded values from store for keys: " + keys0);
}
}
/**
* Loads data from persistent store.
*
* @param vis Closer to cache loaded elements.
* @param args User arguments.
* @return {@code True} if there is a persistent storage.
* @throws IgniteCheckedException If data loading failed.
*/
@SuppressWarnings({"ErrorNotRethrown", "unchecked"})
public boolean loadCache(final GridInClosure3<KeyCacheObject, Object, GridCacheVersion> vis, Object[] args)
throws IgniteCheckedException {
if (store != null) {
if (log.isDebugEnabled())
log.debug("Loading all values from store.");
initSession(null);
boolean thewEx = true;
try {
store.loadCache(new IgniteBiInClosure<Object, Object>() {
@Override public void apply(Object k, Object o) {
Object v;
GridCacheVersion ver = null;
if (locStore) {
IgniteBiTuple<Object, GridCacheVersion> t = (IgniteBiTuple<Object, GridCacheVersion>)o;
v = t.get1();
ver = t.get2();
}
else
v = o;
KeyCacheObject cacheKey = cctx.toCacheKeyObject(k);
vis.apply(cacheKey, v, ver);
}
}, args);
thewEx = false;
}
catch (CacheLoaderException e) {
throw new IgniteCheckedException(e);
}
catch (Exception e) {
throw new IgniteCheckedException(new CacheLoaderException(e));
}
finally {
endSession(null, thewEx);
}
if (log.isDebugEnabled())
log.debug("Loaded all values from store.");
return true;
}
LT.warn(log, null, "Calling Cache.loadCache() method will have no effect, " +
"CacheConfiguration.getStore() is not defined for cache: " + cctx.namexx());
return false;
}
/**
* Puts key-value pair into storage.
*
* @param tx Cache transaction.
* @param key Key.
* @param val Value.
* @param ver Version.
* @return {@code true} If there is a persistent storage.
* @throws IgniteCheckedException If storage failed.
*/
@SuppressWarnings("unchecked")
public boolean putToStore(@Nullable IgniteInternalTx tx, Object key, Object val, GridCacheVersion ver)
throws IgniteCheckedException {
if (store != null) {
// Never persist internal keys.
if (key instanceof GridCacheInternal)
return true;
if (convertPortable) {
key = cctx.unwrapPortableIfNeeded(key, false);
val = cctx.unwrapPortableIfNeeded(val, false);
}
if (log.isDebugEnabled())
log.debug("Storing value in cache store [key=" + key + ", val=" + val + ']');
initSession(tx);
boolean thewEx = true;
try {
store.write(new CacheEntryImpl<>(key, locStore ? F.t(val, ver) : val));
thewEx = false;
}
catch (ClassCastException e) {
handleClassCastException(e);
}
catch (CacheWriterException e) {
throw new IgniteCheckedException(e);
}
catch (Exception e) {
throw new IgniteCheckedException(new CacheWriterException(e));
}
finally {
endSession(tx, thewEx);
}
if (log.isDebugEnabled())
log.debug("Stored value in cache store [key=" + key + ", val=" + val + ']');
return true;
}
return false;
}
/**
* Puts key-value pair into storage.
*
* @param tx Cache transaction.
* @param map Map.
* @return {@code True} if there is a persistent storage.
* @throws IgniteCheckedException If storage failed.
*/
public boolean putAllToStore(@Nullable IgniteInternalTx tx,
Map<Object, IgniteBiTuple<Object, GridCacheVersion>> map)
throws IgniteCheckedException
{
if (F.isEmpty(map))
return true;
if (map.size() == 1) {
Map.Entry<Object, IgniteBiTuple<Object, GridCacheVersion>> e = map.entrySet().iterator().next();
return putToStore(tx, e.getKey(), e.getValue().get1(), e.getValue().get2());
}
else {
if (store != null) {
EntriesView entries = new EntriesView((Map)map);
if (log.isDebugEnabled())
log.debug("Storing values in cache store [entries=" + entries + ']');
initSession(tx);
boolean thewEx = true;
try {
store.writeAll(entries);
thewEx = false;
}
catch (ClassCastException e) {
handleClassCastException(e);
}
catch (Exception e) {
if (!(e instanceof CacheWriterException))
e = new CacheWriterException(e);
if (!entries.isEmpty()) {
List<Object> keys = new ArrayList<>(entries.size());
for (Cache.Entry<?, ?> entry : entries)
keys.add(entry.getKey());
throw new CacheStorePartialUpdateException(keys, e);
}
throw new IgniteCheckedException(e);
}
finally {
endSession(tx, thewEx);
}
if (log.isDebugEnabled())
log.debug("Stored value in cache store [entries=" + entries + ']');
return true;
}
return false;
}
}
/**
* @param tx Cache transaction.
* @param key Key.
* @return {@code True} if there is a persistent storage.
* @throws IgniteCheckedException If storage failed.
*/
@SuppressWarnings("unchecked")
public boolean removeFromStore(@Nullable IgniteInternalTx tx, Object key) throws IgniteCheckedException {
if (store != null) {
// Never remove internal key from store as it is never persisted.
if (key instanceof GridCacheInternal)
return false;
if (convertPortable)
key = cctx.unwrapPortableIfNeeded(key, false);
if (log.isDebugEnabled())
log.debug("Removing value from cache store [key=" + key + ']');
initSession(tx);
boolean thewEx = true;
try {
store.delete(key);
thewEx = false;
}
catch (ClassCastException e) {
handleClassCastException(e);
}
catch (CacheWriterException e) {
throw new IgniteCheckedException(e);
}
catch (Exception e) {
throw new IgniteCheckedException(new CacheWriterException(e));
}
finally {
endSession(tx, thewEx);
}
if (log.isDebugEnabled())
log.debug("Removed value from cache store [key=" + key + ']');
return true;
}
return false;
}
/**
* @param tx Cache transaction.
* @param keys Key.
* @return {@code True} if there is a persistent storage.
* @throws IgniteCheckedException If storage failed.
*/
@SuppressWarnings("unchecked")
public boolean removeAllFromStore(@Nullable IgniteInternalTx tx, Collection<Object> keys)
throws IgniteCheckedException {
if (F.isEmpty(keys))
return true;
if (keys.size() == 1) {
Object key = keys.iterator().next();
return removeFromStore(tx, key);
}
if (store != null) {
Collection<Object> keys0 = convertPortable ? cctx.unwrapPortablesIfNeeded(keys, false) : keys;
if (log.isDebugEnabled())
log.debug("Removing values from cache store [keys=" + keys0 + ']');
initSession(tx);
boolean thewEx = true;
try {
store.deleteAll(keys0);
thewEx = false;
}
catch (ClassCastException e) {
handleClassCastException(e);
}
catch (Exception e) {
if (!(e instanceof CacheWriterException))
e = new CacheWriterException(e);
if (!keys0.isEmpty())
throw new CacheStorePartialUpdateException(keys0, e);
throw new IgniteCheckedException(e);
}
finally {
endSession(tx, thewEx);
}
if (log.isDebugEnabled())
log.debug("Removed values from cache store [keys=" + keys0 + ']');
return true;
}
return false;
}
/**
* @return Store.
*/
public CacheStore<Object, Object> store() {
return store;
}
/**
* @throws IgniteCheckedException If failed.
*/
public void forceFlush() throws IgniteCheckedException {
if (store instanceof GridCacheWriteBehindStore)
((GridCacheWriteBehindStore)store).forceFlush();
}
/**
* @param tx Transaction.
* @param commit Commit.
* @throws IgniteCheckedException If failed.
*/
public void txEnd(IgniteInternalTx tx, boolean commit) throws IgniteCheckedException {
assert store != null;
initSession(tx);
try {
store.sessionEnd(commit);
}
finally {
if (sesHolder != null) {
sesHolder.set(null);
tx.removeMeta(SES_ATTR);
}
}
}
/**
* @param e Class cast exception.
* @throws IgniteCheckedException Thrown exception.
*/
private void handleClassCastException(ClassCastException e) throws IgniteCheckedException {
assert e != null;
if (e.getMessage() != null) {
throw new IgniteCheckedException("Cache store must work with portable objects if portables are " +
"enabled for cache [cacheName=" + cctx.namex() + ']', e);
}
else
throw e;
}
/**
* Clears session holder.
*/
void endSession(@Nullable IgniteInternalTx tx, boolean threwEx) throws IgniteCheckedException {
try {
if (tx == null)
store.sessionEnd(threwEx);
}
catch (Exception e) {
if (!threwEx)
throw U.cast(e);
}
finally {
if (sesHolder != null)
sesHolder.set(null);
}
}
/**
* @param tx Current transaction.
*/
void initSession(@Nullable IgniteInternalTx tx) {
if (sesHolder == null)
return;
assert sesHolder.get() == null;
SessionData ses;
if (tx != null) {
ses = tx.meta(SES_ATTR);
if (ses == null) {
ses = new SessionData(tx, cctx.name());
tx.addMeta(SES_ATTR, ses);
}
else
// Session cache name may change in cross-cache transaction.
ses.cacheName(cctx.name());
}
else
ses = new SessionData(null, cctx.name());
sesHolder.set(ses);
}
/**
*
*/
private static class SessionData {
/** */
@GridToStringExclude
private final IgniteInternalTx tx;
/** */
private String cacheName;
/** */
@GridToStringInclude
private Map<Object, Object> props;
/**
* @param tx Current transaction.
* @param cacheName Cache name.
*/
private SessionData(@Nullable IgniteInternalTx tx, @Nullable String cacheName) {
this.tx = tx;
this.cacheName = cacheName;
}
/**
* @return Transaction.
*/
@Nullable private Transaction transaction() {
return tx != null ? tx.proxy() : null;
}
/**
* @return Properties.
*/
private Map<Object, Object> properties() {
if (props == null)
props = new GridLeanMap<>();
return props;
}
/**
* @return Cache name.
*/
private String cacheName() {
return cacheName;
}
/**
* @param cacheName Cache name.
*/
private void cacheName(String cacheName) {
this.cacheName = cacheName;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(SessionData.class, this, "tx", CU.txString(tx));
}
}
/**
*
*/
private static class ThreadLocalSession implements CacheStoreSession {
/** */
private final ThreadLocal<SessionData> sesHolder = new ThreadLocal<>();
/** {@inheritDoc} */
@Nullable @Override public Transaction transaction() {
SessionData ses0 = sesHolder.get();
return ses0 != null ? ses0.transaction() : null;
}
/** {@inheritDoc} */
@Override public boolean isWithinTransaction() {
return transaction() != null;
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public <K1, V1> Map<K1, V1> properties() {
SessionData ses0 = sesHolder.get();
return ses0 != null ? (Map<K1, V1>)ses0.properties() : null;
}
/** {@inheritDoc} */
@Nullable @Override public String cacheName() {
SessionData ses0 = sesHolder.get();
return ses0 != null ? ses0.cacheName() : null;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(ThreadLocalSession.class, this);
}
}
/**
*
*/
@SuppressWarnings("unchecked")
private class EntriesView extends AbstractCollection<Cache.Entry<?, ?>> {
/** */
private final Map<?, IgniteBiTuple<?, GridCacheVersion>> map;
/** */
private Set<Object> rmvd;
/** */
private boolean cleared;
/**
* @param map Map.
*/
private EntriesView(Map<?, IgniteBiTuple<?, GridCacheVersion>> map) {
assert map != null;
this.map = map;
}
/** {@inheritDoc} */
@Override public int size() {
return cleared ? 0 : (map.size() - (rmvd != null ? rmvd.size() : 0));
}
/** {@inheritDoc} */
@Override public boolean isEmpty() {
return cleared || !iterator().hasNext();
}
/** {@inheritDoc} */
@Override public boolean contains(Object o) {
if (cleared || !(o instanceof Cache.Entry))
return false;
Cache.Entry<?, ?> e = (Cache.Entry<?, ?>)o;
return map.containsKey(e.getKey());
}
/** {@inheritDoc} */
@NotNull @Override public Iterator<Cache.Entry<?, ?>> iterator() {
if (cleared)
return F.emptyIterator();
final Iterator<Map.Entry<?, IgniteBiTuple<?, GridCacheVersion>>> it0 = (Iterator)map.entrySet().iterator();
return new Iterator<Cache.Entry<?, ?>>() {
/** */
private Cache.Entry<?, ?> cur;
/** */
private Cache.Entry<?, ?> next;
/**
*
*/
{
checkNext();
}
/**
*
*/
private void checkNext() {
while (it0.hasNext()) {
Map.Entry<?, IgniteBiTuple<?, GridCacheVersion>> e = it0.next();
Object k = e.getKey();
if (rmvd != null && rmvd.contains(k))
continue;
Object v = locStore ? e.getValue() : e.getValue().get1();
if (convertPortable) {
k = cctx.unwrapPortableIfNeeded(k, false);
v = cctx.unwrapPortableIfNeeded(v, false);
}
next = new CacheEntryImpl<>(k, v);
break;
}
}
@Override public boolean hasNext() {
return next != null;
}
@Override public Cache.Entry<?, ?> next() {
if (next == null)
throw new NoSuchElementException();
cur = next;
next = null;
checkNext();
return cur;
}
@Override public void remove() {
if (cur == null)
throw new IllegalStateException();
addRemoved(cur);
cur = null;
}
};
}
/** {@inheritDoc} */
@Override public boolean add(Cache.Entry<?, ?> entry) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public boolean addAll(Collection<? extends Cache.Entry<?, ?>> col) {
throw new UnsupportedOperationException();
}
/** {@inheritDoc} */
@Override public boolean remove(Object o) {
if (cleared || !(o instanceof Cache.Entry))
return false;
Cache.Entry<?, ?> e = (Cache.Entry<?, ?>)o;
if (rmvd != null && rmvd.contains(e.getKey()))
return false;
if (mapContains(e)) {
addRemoved(e);
return true;
}
return false;
}
/** {@inheritDoc} */
@Override public boolean containsAll(Collection<?> col) {
if (cleared)
return false;
for (Object o : col) {
if (contains(o))
return false;
}
return true;
}
/** {@inheritDoc} */
@Override public boolean removeAll(Collection<?> col) {
if (cleared)
return false;
boolean modified = false;
for (Object o : col) {
if (remove(o))
modified = true;
}
return modified;
}
/** {@inheritDoc} */
@Override public boolean retainAll(Collection<?> col) {
if (cleared)
return false;
boolean modified = false;
for (Cache.Entry<?, ?> e : this) {
if (!col.contains(e)) {
addRemoved(e);
modified = true;
}
}
return modified;
}
/** {@inheritDoc} */
@Override public void clear() {
cleared = true;
}
/**
* @param e Entry.
*/
private void addRemoved(Cache.Entry<?, ?> e) {
if (rmvd == null)
rmvd = new HashSet<>();
rmvd.add(e.getKey());
}
/**
* @param e Entry.
* @return {@code True} if original map contains entry.
*/
private boolean mapContains(Cache.Entry<?, ?> e) {
return map.containsKey(e.getKey());
}
/** {@inheritDoc} */
public String toString() {
Iterator<Cache.Entry<?, ?>> it = iterator();
if (!it.hasNext())
return "[]";
SB sb = new SB("[");
while (true) {
Cache.Entry<?, ?> e = it.next();
sb.a(e.toString());
if (!it.hasNext())
return sb.a(']').toString();
sb.a(", ");
}
}
}
}
| |
package com.onelio.connectu.API;
import android.content.Context;
import com.onelio.connectu.API.Networking.UAWebService;
import com.onelio.connectu.App;
import com.onelio.connectu.Common;
import com.onelio.connectu.Containers.CalendarEvent;
import com.onelio.connectu.Managers.DatabaseManager;
import com.onelio.connectu.R;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class ScheduleRequest {
// Private definitions
private static String DATES_URL =
"https://cvnet.cpd.ua.es/uaHorarios/Home/ObtenerEventosCalendarioJson?calendario=";
private static String SIGUA_URL = "https://www.sigua.ua.es/api/pub/estancia/";
// Public
public static String CALENDAR_DOCENCIA = "docenciaalu";
public static String CALENDAR_EVALUACION = "evaluaalu";
public static String CALENDAR_EXAMENES = "examenesalu";
public static String CAlENDAR_FESTIVOS = "festivos";
// Private content
// Session
private Context context;
private App app;
// Content
private JSONObject schedule;
// define callback interface
public interface ScheduleCallback {
void onCompleted(boolean onResult, String message);
}
public ScheduleRequest(Context context) {
app = (App) context.getApplicationContext();
this.context = context;
schedule = new JSONObject();
}
public void saveFullSchedule() { // Only do when all 4 horarios has been downloaded
DatabaseManager database = new DatabaseManager(context);
database.putString(Common.PREFERENCE_JSON_SCHEDULE, schedule.toString());
app.schedule = schedule;
}
public void loadSchedule(
long start, long stop, final String type, final ScheduleCallback callback) {
String url =
DATES_URL + type + "&start=" + String.valueOf(start) + "&end=" + String.valueOf(stop);
UAWebService.HttpWebGetRequest(
context,
url,
new UAWebService.WebCallBack() {
@Override
public void onNavigationComplete(boolean isSuccessful, String body) {
if (isSuccessful) {
try {
schedule.put(type, new JSONArray(body));
callback.onCompleted(true, "");
} catch (JSONException e) {
callback.onCompleted(false, e.getMessage());
}
} else {
callback.onCompleted(false, body);
}
}
});
}
private boolean happensHere(Date start, Date current, Date end) {
if (start.getYear() <= current.getYear()
&& start.getMonth() <= current.getMonth()
&& start.getDate() <= current.getDate()) {
if (current.getYear() <= end.getYear()
&& current.getMonth() <= end.getMonth()
&& current.getDate() <= end.getDate()) {
return true;
}
}
return false;
}
// See if a event happens before another and get his index
private int isNewerThan(CalendarEvent newevent, List<CalendarEvent> events, Date date) {
int newDif = (int) (newevent.getStart().getTime() - date.getTime());
for (int i = 0; i < events.size(); i++) {
CalendarEvent event = events.get(i);
int eventDif = (int) (event.getStart().getTime() - date.getTime());
if (newDif <= eventDif) {
return i;
}
}
return -1;
}
private List<CalendarEvent> getEvents(Date date, String type) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
List<CalendarEvent> events = new ArrayList<>();
try {
JSONArray array = app.schedule.getJSONArray(type);
for (int i = 0; i < array.length(); i++) {
CalendarEvent event = new CalendarEvent();
JSONObject jdata = array.getJSONObject(i);
String sstart = jdata.getString("start").replace("T", " ");
Date start = sdf.parse(sstart);
String send = jdata.getString("end").replace("T", " ");
Date end = sdf.parse(send);
event.setStart(start);
event.setEnd(end);
if (happensHere(start, date, end)) {
event.setType(type);
event.setTitle(jdata.getString("uaAssNomass"));
event.setText(jdata.getString("uaGacDesgac"));
event.setLoc(jdata.getString("uaIdAula"));
event.setSigua(jdata.getString("uaIdSigua"));
event.setFullData(jdata.toString());
try {
event.setAllDay(jdata.getBoolean("allDay"));
} catch (JSONException e) {
e.printStackTrace();
}
if (type.equals(CALENDAR_DOCENCIA)) {
event.setSubtitle(jdata.getString("uaCacDescac"));
}
if (type.equals(CALENDAR_EVALUACION)) {
event.setSubtitle(jdata.getString("uaEvaluaTipo") + ": " + jdata.getString("title"));
}
if (type.equals(CALENDAR_EXAMENES)) {
event.setTitle(jdata.getString("uaAssNomass"));
event.setSubtitle(jdata.getString("title"));
event.setText(context.getString(R.string.view_horario_official_test));
event.setLoc(context.getString(R.string.view_horario_lotofplaces));
}
if (type.equals(CAlENDAR_FESTIVOS)) {
event.setTitle(jdata.getString("title"));
event.setSubtitle(context.getString(R.string.view_horario_holiday));
event.setText(context.getString(R.string.view_horario_not_class));
event.setLoc("");
}
int index = isNewerThan(event, events, date);
if (index > -1) {
events.add(index, event);
} else {
events.add(event);
}
}
}
} catch (ParseException | JSONException e) {
e.printStackTrace();
}
return events;
}
public List<CalendarEvent> getDateEvent(Date date, String filter) {
List<CalendarEvent> result = new ArrayList<>();
date.setHours(0);
date.setMinutes(0);
date.setSeconds(0);
// Festivo
if (filter.contains(Common.SCHEDULE_FILTER_FESTIVO)) {
List<CalendarEvent> festivo = getEvents(date, CAlENDAR_FESTIVOS);
for (CalendarEvent event : festivo) {
result.add(event);
}
}
// Examenes
if (filter.contains(Common.SCHEDULE_FILTER_EXAMS)) {
List<CalendarEvent> examenes = getEvents(date, CALENDAR_EXAMENES);
for (CalendarEvent event : examenes) {
result.add(event);
}
}
// Evaluacion
if (filter.contains(Common.SCHEDULE_FILTER_EVALUACION)) {
List<CalendarEvent> eva = getEvents(date, CALENDAR_EVALUACION);
for (CalendarEvent event : eva) {
result.add(event);
}
}
// Docencia
if (filter.contains(Common.SCHEDULE_FILTER_DOCENCIA)) {
List<CalendarEvent> docencia = getEvents(date, CALENDAR_DOCENCIA);
for (CalendarEvent event : docencia) {
result.add(event);
}
}
return result;
}
public List<CalendarEvent> getDateEvents(Date date) {
List<CalendarEvent> result = new ArrayList<>();
date.setHours(0);
date.setMinutes(0);
date.setSeconds(0);
// Festivo
if (app.getPublicPreferenceB(Common.SCHEDULE_FILTER_FESTIVO)) {
List<CalendarEvent> festivo = getEvents(date, CAlENDAR_FESTIVOS);
for (CalendarEvent event : festivo) {
result.add(event);
}
}
// Examenes
if (app.getPublicPreferenceB(Common.SCHEDULE_FILTER_EXAMS)) {
List<CalendarEvent> examenes = getEvents(date, CALENDAR_EXAMENES);
for (CalendarEvent event : examenes) {
result.add(event);
}
}
// Evaluacion
if (app.getPublicPreferenceB(Common.SCHEDULE_FILTER_EVALUACION)) {
List<CalendarEvent> eva = getEvents(date, CALENDAR_EVALUACION);
for (CalendarEvent event : eva) {
result.add(event);
}
}
// Docencia
if (app.getPublicPreferenceB(Common.SCHEDULE_FILTER_DOCENCIA)) {
List<CalendarEvent> docencia = getEvents(date, CALENDAR_DOCENCIA);
for (CalendarEvent event : docencia) {
result.add(event);
}
}
return result;
}
public void getSIGUA(CalendarEvent event, final ScheduleCallback callback) {
String url = SIGUA_URL + event.getSigua();
UAWebService.HttpWebGetRequest(
context,
url,
new UAWebService.WebCallBack() {
@Override
public void onNavigationComplete(boolean isSuccessful, String body) {
callback.onCompleted(isSuccessful, body);
}
});
}
}
| |
package org.atlasapi.content;
import java.util.Optional;
import org.atlasapi.entity.Id;
import org.atlasapi.media.entity.Publisher;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.model.Column;
import com.netflix.astyanax.model.ColumnList;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import static org.hamcrest.Matchers.hasItems;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class AstyanaxProtobufContentMarshallerTest {
private final ContentMarshaller marshaller = AstyanaxProtobufContentMarshaller.create(
new ContentSerializer(new ContentSerializationVisitor())
);
@Test
@SuppressWarnings("unchecked")
public void testMarshallsAndUnmarshallsContentWithoutNullifyingEmptyRepeatedFields() {
Content content = new Episode();
content.setId(Id.valueOf(1234));
content.setPublisher(Publisher.BBC);
content.setTitle("title");
content.setActivelyPublished(false);
content.setGenericDescription(true);
ColumnListMutation<String> mutation = mock(ColumnListMutation.class);
marshaller.marshallInto(content.getId(), mutation, content, Optional.empty(), false);
ArgumentCaptor<String> col = ArgumentCaptor.forClass(String.class);
final ArgumentCaptor<byte[]> val = ArgumentCaptor.forClass(byte[].class);
verify(mutation, times(6)).putColumn(col.capture(), val.capture());
assertThat(col.getAllValues().size(), is(6));
assertThat(
col.getAllValues(),
hasItems(
"IDENTIFICATION",
"DESCRIPTION",
"SOURCE",
"TYPE",
"ACTIVELY_PUBLISHED",
"GENERIC_DESCRIPTION"
)
);
ImmutableList<Column<String>> columns = ImmutableList.of(
column(val.getAllValues().get(0)),
column(val.getAllValues().get(1)),
column(val.getAllValues().get(2)),
column(val.getAllValues().get(3)),
column(val.getAllValues().get(4)),
column(val.getAllValues().get(5))
);
ColumnList<String> cols = mock(ColumnList.class);
when(cols.iterator())
.thenReturn(
columns.iterator()
);
Content unmarshalled = marshaller.unmarshallCols(cols);
assertThat(unmarshalled.getId(), is(content.getId()));
assertThat(unmarshalled.getTitle(), is(content.getTitle()));
assertThat(unmarshalled.isActivelyPublished(), is(false));
}
@Test
@SuppressWarnings("unchecked")
public void testMarshallsAndUnmarshallsContent() {
Content content = new Episode();
content.setId(Id.valueOf(1234));
content.setPublisher(Publisher.BBC);
content.setTitle("title");
content.setActivelyPublished(false);
content.setGenericDescription(true);
ColumnListMutation<String> mutation = mock(ColumnListMutation.class);
marshaller.marshallInto(content.getId(), mutation, content, Optional.empty(), true);
ArgumentCaptor<String> col = ArgumentCaptor.forClass(String.class);
final ArgumentCaptor<byte[]> val = ArgumentCaptor.forClass(byte[].class);
verify(mutation, times(14)).putColumn(col.capture(), val.capture());
assertThat(col.getAllValues().size(), is(14));
assertThat(
col.getAllValues(),
hasItems(
"IDENTIFICATION",
"DESCRIPTION",
"SOURCE",
"TYPE",
"ACTIVELY_PUBLISHED",
"GENERIC_DESCRIPTION"
)
);
ImmutableList<Column<String>> columns = ImmutableList.of(
column(val.getAllValues().get(0)),
column(val.getAllValues().get(1)),
column(val.getAllValues().get(2)),
column(val.getAllValues().get(3)),
column(val.getAllValues().get(4)),
column(val.getAllValues().get(5)),
column(val.getAllValues().get(6)),
column(val.getAllValues().get(7)),
column(val.getAllValues().get(8)),
column(val.getAllValues().get(9)),
column(val.getAllValues().get(10)),
column(val.getAllValues().get(11)),
column(val.getAllValues().get(12)),
column(val.getAllValues().get(13))
);
ColumnList<String> cols = mock(ColumnList.class);
when(cols.iterator())
.thenReturn(
columns.iterator()
);
Content unmarshalled = marshaller.unmarshallCols(cols);
assertThat(unmarshalled.getId(), is(content.getId()));
assertThat(unmarshalled.getTitle(), is(content.getTitle()));
assertThat(unmarshalled.isActivelyPublished(), is(false));
}
@Test
@SuppressWarnings("unchecked")
public void testMarshallsAndUnmarshallsLocations() {
Item content = new Episode();
content.setId(Id.valueOf(1234));
content.setPublisher(Publisher.BBC);
content.setTitle("title");
content.setActivelyPublished(false);
content.setGenericDescription(true);
Policy policy = new Policy();
Encoding encoding = new Encoding();
Location location = new Location();
location.setPolicy(policy);
encoding.setAvailableAt(ImmutableSet.of(location));
// content.setManifestedAs(ImmutableSet.of(encoding));
ColumnListMutation<String> mutation = mock(ColumnListMutation.class);
marshaller.marshallInto(content.getId(), mutation, content, Optional.empty(), true);
ArgumentCaptor<String> col = ArgumentCaptor.forClass(String.class);
final ArgumentCaptor<byte[]> val = ArgumentCaptor.forClass(byte[].class);
verify(mutation, times(14)).putColumn(col.capture(), val.capture());
assertThat(col.getAllValues().size(), is(14));
assertThat(
col.getAllValues(),
hasItems(
"LOCATIONS",
"IDENTIFICATION",
"DESCRIPTION",
"SOURCE",
"TYPE",
"ACTIVELY_PUBLISHED",
"GENERIC_DESCRIPTION"
)
);
ImmutableList<Column<String>> columns = ImmutableList.of(
column(val.getAllValues().get(0)),
column(val.getAllValues().get(1)),
column(val.getAllValues().get(2)),
column(val.getAllValues().get(3)),
column(val.getAllValues().get(4)),
column(val.getAllValues().get(6)),
column(val.getAllValues().get(7)),
column(val.getAllValues().get(8)),
column(val.getAllValues().get(9)),
column(val.getAllValues().get(10)),
column(val.getAllValues().get(11)),
column(val.getAllValues().get(12)),
column(val.getAllValues().get(13))
);
ColumnList<String> cols = mock(ColumnList.class);
when(cols.iterator())
.thenReturn(
columns.iterator()
);
Content unmarshalled = marshaller.unmarshallCols(cols);
assertThat(unmarshalled.getId(), is(content.getId()));
assertThat(unmarshalled.getTitle(), is(content.getTitle()));
assertThat(unmarshalled.isActivelyPublished(), is(false));
}
@SuppressWarnings("unchecked")
private Column<String> column(byte[] bytes) {
Column<String> mock = mock(Column.class);
when(mock.getByteArrayValue()).thenReturn(bytes);
return mock;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.compute.v2020_06_01;
import com.microsoft.azure.arm.model.HasInner;
import com.microsoft.azure.management.compute.v2020_06_01.implementation.GalleryApplicationInner;
import com.microsoft.azure.arm.model.Indexable;
import com.microsoft.azure.arm.model.Refreshable;
import com.microsoft.azure.arm.model.Updatable;
import com.microsoft.azure.arm.model.Appliable;
import com.microsoft.azure.arm.model.Creatable;
import com.microsoft.azure.arm.resources.models.HasManager;
import com.microsoft.azure.management.compute.v2020_06_01.implementation.ComputeManager;
import java.util.Map;
import org.joda.time.DateTime;
/**
* Type representing GalleryApplication.
*/
public interface GalleryApplication extends HasInner<GalleryApplicationInner>, Indexable, Refreshable<GalleryApplication>, Updatable<GalleryApplication.Update>, HasManager<ComputeManager> {
/**
* @return the description value.
*/
String description();
/**
* @return the endOfLifeDate value.
*/
DateTime endOfLifeDate();
/**
* @return the eula value.
*/
String eula();
/**
* @return the id value.
*/
String id();
/**
* @return the location value.
*/
String location();
/**
* @return the name value.
*/
String name();
/**
* @return the privacyStatementUri value.
*/
String privacyStatementUri();
/**
* @return the releaseNoteUri value.
*/
String releaseNoteUri();
/**
* @return the supportedOSType value.
*/
OperatingSystemTypes supportedOSType();
/**
* @return the tags value.
*/
Map<String, String> tags();
/**
* @return the type value.
*/
String type();
/**
* The entirety of the GalleryApplication definition.
*/
interface Definition extends DefinitionStages.Blank, DefinitionStages.WithGallery, DefinitionStages.WithLocation, DefinitionStages.WithSupportedOSType, DefinitionStages.WithCreate {
}
/**
* Grouping of GalleryApplication definition stages.
*/
interface DefinitionStages {
/**
* The first stage of a GalleryApplication definition.
*/
interface Blank extends WithGallery {
}
/**
* The stage of the galleryapplication definition allowing to specify Gallery.
*/
interface WithGallery {
/**
* Specifies resourceGroupName, galleryName.
* @param resourceGroupName The name of the resource group
* @param galleryName The name of the Shared Application Gallery in which the Application Definition is to be created
* @return the next definition stage
*/
WithLocation withExistingGallery(String resourceGroupName, String galleryName);
}
/**
* The stage of the galleryapplication definition allowing to specify Location.
*/
interface WithLocation {
/**
* Specifies location.
* @param location Resource location
* @return the next definition stage
*/
WithSupportedOSType withLocation(String location);
}
/**
* The stage of the galleryapplication definition allowing to specify SupportedOSType.
*/
interface WithSupportedOSType {
/**
* Specifies supportedOSType.
* @param supportedOSType This property allows you to specify the supported type of the OS that application is built for. <br><br> Possible values are: <br><br> **Windows** <br><br> **Linux**. Possible values include: 'Windows', 'Linux'
* @return the next definition stage
*/
WithCreate withSupportedOSType(OperatingSystemTypes supportedOSType);
}
/**
* The stage of the galleryapplication definition allowing to specify Description.
*/
interface WithDescription {
/**
* Specifies description.
* @param description The description of this gallery Application Definition resource. This property is updatable
* @return the next definition stage
*/
WithCreate withDescription(String description);
}
/**
* The stage of the galleryapplication definition allowing to specify EndOfLifeDate.
*/
interface WithEndOfLifeDate {
/**
* Specifies endOfLifeDate.
* @param endOfLifeDate The end of life date of the gallery Application Definition. This property can be used for decommissioning purposes. This property is updatable
* @return the next definition stage
*/
WithCreate withEndOfLifeDate(DateTime endOfLifeDate);
}
/**
* The stage of the galleryapplication definition allowing to specify Eula.
*/
interface WithEula {
/**
* Specifies eula.
* @param eula The Eula agreement for the gallery Application Definition
* @return the next definition stage
*/
WithCreate withEula(String eula);
}
/**
* The stage of the galleryapplication definition allowing to specify PrivacyStatementUri.
*/
interface WithPrivacyStatementUri {
/**
* Specifies privacyStatementUri.
* @param privacyStatementUri The privacy statement uri
* @return the next definition stage
*/
WithCreate withPrivacyStatementUri(String privacyStatementUri);
}
/**
* The stage of the galleryapplication definition allowing to specify ReleaseNoteUri.
*/
interface WithReleaseNoteUri {
/**
* Specifies releaseNoteUri.
* @param releaseNoteUri The release note uri
* @return the next definition stage
*/
WithCreate withReleaseNoteUri(String releaseNoteUri);
}
/**
* The stage of the galleryapplication definition allowing to specify Tags.
*/
interface WithTags {
/**
* Specifies tags.
* @param tags Resource tags
* @return the next definition stage
*/
WithCreate withTags(Map<String, String> tags);
}
/**
* The stage of the definition which contains all the minimum required inputs for
* the resource to be created (via {@link WithCreate#create()}), but also allows
* for any other optional settings to be specified.
*/
interface WithCreate extends Creatable<GalleryApplication>, DefinitionStages.WithDescription, DefinitionStages.WithEndOfLifeDate, DefinitionStages.WithEula, DefinitionStages.WithPrivacyStatementUri, DefinitionStages.WithReleaseNoteUri, DefinitionStages.WithTags {
}
}
/**
* The template for a GalleryApplication update operation, containing all the settings that can be modified.
*/
interface Update extends Appliable<GalleryApplication>, UpdateStages.WithDescription, UpdateStages.WithEndOfLifeDate, UpdateStages.WithEula, UpdateStages.WithPrivacyStatementUri, UpdateStages.WithReleaseNoteUri, UpdateStages.WithTags {
}
/**
* Grouping of GalleryApplication update stages.
*/
interface UpdateStages {
/**
* The stage of the galleryapplication update allowing to specify Description.
*/
interface WithDescription {
/**
* Specifies description.
* @param description The description of this gallery Application Definition resource. This property is updatable
* @return the next update stage
*/
Update withDescription(String description);
}
/**
* The stage of the galleryapplication update allowing to specify EndOfLifeDate.
*/
interface WithEndOfLifeDate {
/**
* Specifies endOfLifeDate.
* @param endOfLifeDate The end of life date of the gallery Application Definition. This property can be used for decommissioning purposes. This property is updatable
* @return the next update stage
*/
Update withEndOfLifeDate(DateTime endOfLifeDate);
}
/**
* The stage of the galleryapplication update allowing to specify Eula.
*/
interface WithEula {
/**
* Specifies eula.
* @param eula The Eula agreement for the gallery Application Definition
* @return the next update stage
*/
Update withEula(String eula);
}
/**
* The stage of the galleryapplication update allowing to specify PrivacyStatementUri.
*/
interface WithPrivacyStatementUri {
/**
* Specifies privacyStatementUri.
* @param privacyStatementUri The privacy statement uri
* @return the next update stage
*/
Update withPrivacyStatementUri(String privacyStatementUri);
}
/**
* The stage of the galleryapplication update allowing to specify ReleaseNoteUri.
*/
interface WithReleaseNoteUri {
/**
* Specifies releaseNoteUri.
* @param releaseNoteUri The release note uri
* @return the next update stage
*/
Update withReleaseNoteUri(String releaseNoteUri);
}
/**
* The stage of the galleryapplication update allowing to specify Tags.
*/
interface WithTags {
/**
* Specifies tags.
* @param tags Resource tags
* @return the next update stage
*/
Update withTags(Map<String, String> tags);
}
}
}
| |
/**
*/
package Linux.impl;
import Linux.Arquitectura;
import Linux.Configuracion;
import Linux.LineaComando;
import Linux.LinuxFactory;
import Linux.LinuxPackage;
import Linux.Parametro;
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EEnum;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.EReference;
import org.eclipse.emf.ecore.impl.EPackageImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model <b>Package</b>.
* <!-- end-user-doc -->
* @generated
*/
public class LinuxPackageImpl extends EPackageImpl implements LinuxPackage {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass configuracionEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass lineaComandoEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EClass parametroEClass = null;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private EEnum arquitecturaEEnum = null;
/**
* Creates an instance of the model <b>Package</b>, registered with
* {@link org.eclipse.emf.ecore.EPackage.Registry EPackage.Registry} by the package
* package URI value.
* <p>Note: the correct way to create the package is via the static
* factory method {@link #init init()}, which also performs
* initialization of the package, or returns the registered package,
* if one already exists.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see org.eclipse.emf.ecore.EPackage.Registry
* @see Linux.LinuxPackage#eNS_URI
* @see #init()
* @generated
*/
private LinuxPackageImpl() {
super(eNS_URI, LinuxFactory.eINSTANCE);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static boolean isInited = false;
/**
* Creates, registers, and initializes the <b>Package</b> for this model, and for any others upon which it depends.
*
* <p>This method is used to initialize {@link LinuxPackage#eINSTANCE} when that field is accessed.
* Clients should not invoke it directly. Instead, they should simply access that field to obtain the package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #eNS_URI
* @see #createPackageContents()
* @see #initializePackageContents()
* @generated
*/
public static LinuxPackage init() {
if (isInited) return (LinuxPackage)EPackage.Registry.INSTANCE.getEPackage(LinuxPackage.eNS_URI);
// Obtain or create and register package
LinuxPackageImpl theLinuxPackage = (LinuxPackageImpl)(EPackage.Registry.INSTANCE.get(eNS_URI) instanceof LinuxPackageImpl ? EPackage.Registry.INSTANCE.get(eNS_URI) : new LinuxPackageImpl());
isInited = true;
// Create package meta-data objects
theLinuxPackage.createPackageContents();
// Initialize created meta-data
theLinuxPackage.initializePackageContents();
// Mark meta-data to indicate it can't be changed
theLinuxPackage.freeze();
// Update the registry and return the package
EPackage.Registry.INSTANCE.put(LinuxPackage.eNS_URI, theLinuxPackage);
return theLinuxPackage;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getConfiguracion() {
return configuracionEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getConfiguracion_Version() {
return (EAttribute)configuracionEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getConfiguracion_Descripcion() {
return (EAttribute)configuracionEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getConfiguracion_Arquitectura() {
return (EAttribute)configuracionEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getConfiguracion_Repositorios() {
return (EReference)configuracionEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getConfiguracion_Aplicaciones() {
return (EReference)configuracionEClass.getEStructuralFeatures().get(4);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getLineaComando() {
return lineaComandoEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EReference getLineaComando_Parametros() {
return (EReference)lineaComandoEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getLineaComando_Id() {
return (EAttribute)lineaComandoEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getLineaComando_Descripcion() {
return (EAttribute)lineaComandoEClass.getEStructuralFeatures().get(2);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getLineaComando_RutaDestino() {
return (EAttribute)lineaComandoEClass.getEStructuralFeatures().get(3);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getLineaComando_Gestor() {
return (EAttribute)lineaComandoEClass.getEStructuralFeatures().get(4);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getLineaComando_Aplicacion() {
return (EAttribute)lineaComandoEClass.getEStructuralFeatures().get(5);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EClass getParametro() {
return parametroEClass;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getParametro_Nombre() {
return (EAttribute)parametroEClass.getEStructuralFeatures().get(0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EAttribute getParametro_Valor() {
return (EAttribute)parametroEClass.getEStructuralFeatures().get(1);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EEnum getArquitectura() {
return arquitecturaEEnum;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public LinuxFactory getLinuxFactory() {
return (LinuxFactory)getEFactoryInstance();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isCreated = false;
/**
* Creates the meta-model objects for the package. This method is
* guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void createPackageContents() {
if (isCreated) return;
isCreated = true;
// Create classes and their features
configuracionEClass = createEClass(CONFIGURACION);
createEAttribute(configuracionEClass, CONFIGURACION__VERSION);
createEAttribute(configuracionEClass, CONFIGURACION__DESCRIPCION);
createEAttribute(configuracionEClass, CONFIGURACION__ARQUITECTURA);
createEReference(configuracionEClass, CONFIGURACION__REPOSITORIOS);
createEReference(configuracionEClass, CONFIGURACION__APLICACIONES);
lineaComandoEClass = createEClass(LINEA_COMANDO);
createEReference(lineaComandoEClass, LINEA_COMANDO__PARAMETROS);
createEAttribute(lineaComandoEClass, LINEA_COMANDO__ID);
createEAttribute(lineaComandoEClass, LINEA_COMANDO__DESCRIPCION);
createEAttribute(lineaComandoEClass, LINEA_COMANDO__RUTA_DESTINO);
createEAttribute(lineaComandoEClass, LINEA_COMANDO__GESTOR);
createEAttribute(lineaComandoEClass, LINEA_COMANDO__APLICACION);
parametroEClass = createEClass(PARAMETRO);
createEAttribute(parametroEClass, PARAMETRO__NOMBRE);
createEAttribute(parametroEClass, PARAMETRO__VALOR);
// Create enums
arquitecturaEEnum = createEEnum(ARQUITECTURA);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private boolean isInitialized = false;
/**
* Complete the initialization of the package and its meta-model. This
* method is guarded to have no affect on any invocation but its first.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void initializePackageContents() {
if (isInitialized) return;
isInitialized = true;
// Initialize package
setName(eNAME);
setNsPrefix(eNS_PREFIX);
setNsURI(eNS_URI);
// Create type parameters
// Set bounds for type parameters
// Add supertypes to classes
// Initialize classes, features, and operations; add parameters
initEClass(configuracionEClass, Configuracion.class, "Configuracion", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getConfiguracion_Version(), ecorePackage.getEString(), "version", null, 0, 1, Configuracion.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getConfiguracion_Descripcion(), ecorePackage.getEString(), "descripcion", null, 0, 1, Configuracion.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getConfiguracion_Arquitectura(), this.getArquitectura(), "arquitectura", null, 0, 1, Configuracion.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEReference(getConfiguracion_Repositorios(), this.getLineaComando(), null, "repositorios", null, 0, -1, Configuracion.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, !IS_ORDERED);
initEReference(getConfiguracion_Aplicaciones(), this.getLineaComando(), null, "aplicaciones", null, 1, -1, Configuracion.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, !IS_ORDERED);
initEClass(lineaComandoEClass, LineaComando.class, "LineaComando", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEReference(getLineaComando_Parametros(), this.getParametro(), null, "parametros", null, 0, 1, LineaComando.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, IS_COMPOSITE, !IS_RESOLVE_PROXIES, !IS_UNSETTABLE, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getLineaComando_Id(), ecorePackage.getEString(), "id", null, 0, 1, LineaComando.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getLineaComando_Descripcion(), ecorePackage.getEString(), "descripcion", null, 0, 1, LineaComando.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getLineaComando_RutaDestino(), ecorePackage.getEString(), "rutaDestino", null, 0, 1, LineaComando.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getLineaComando_Gestor(), ecorePackage.getEString(), "gestor", null, 1, 1, LineaComando.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getLineaComando_Aplicacion(), ecorePackage.getEString(), "aplicacion", null, 1, 1, LineaComando.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEClass(parametroEClass, Parametro.class, "Parametro", !IS_ABSTRACT, !IS_INTERFACE, IS_GENERATED_INSTANCE_CLASS);
initEAttribute(getParametro_Nombre(), ecorePackage.getEString(), "nombre", null, 1, 1, Parametro.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
initEAttribute(getParametro_Valor(), ecorePackage.getEString(), "valor", null, 1, 1, Parametro.class, !IS_TRANSIENT, !IS_VOLATILE, IS_CHANGEABLE, !IS_UNSETTABLE, !IS_ID, IS_UNIQUE, !IS_DERIVED, IS_ORDERED);
// Initialize enums and add enum literals
initEEnum(arquitecturaEEnum, Arquitectura.class, "Arquitectura");
addEEnumLiteral(arquitecturaEEnum, Arquitectura.X32);
addEEnumLiteral(arquitecturaEEnum, Arquitectura.X64);
// Create resource
createResource(eNS_URI);
}
} //LinuxPackageImpl
| |
package bboss.org.apache.velocity.runtime.directive;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.IOException;
import java.io.Writer;
import java.util.Iterator;
import bboss.org.apache.velocity.context.ChainedInternalContextAdapter;
import bboss.org.apache.velocity.context.InternalContextAdapter;
import bboss.org.apache.velocity.exception.MethodInvocationException;
import bboss.org.apache.velocity.exception.ParseErrorException;
import bboss.org.apache.velocity.exception.ResourceNotFoundException;
import bboss.org.apache.velocity.exception.TemplateInitException;
import bboss.org.apache.velocity.exception.VelocityException;
import bboss.org.apache.velocity.runtime.RuntimeConstants;
import bboss.org.apache.velocity.runtime.RuntimeServices;
import bboss.org.apache.velocity.runtime.log.Log;
import bboss.org.apache.velocity.runtime.parser.node.ASTReference;
import bboss.org.apache.velocity.runtime.parser.node.Node;
import bboss.org.apache.velocity.runtime.parser.node.SimpleNode;
import bboss.org.apache.velocity.util.introspection.Info;
/**
* Foreach directive used for moving through arrays,
* or objects that provide an Iterator.
*
* @author <a href="mailto:jvanzyl@apache.org">Jason van Zyl</a>
* @author <a href="mailto:geirm@optonline.net">Geir Magnusson Jr.</a>
* @author Daniel Rall
* @version $Id: Foreach.java 945927 2010-05-18 22:21:41Z nbubna $
*/
public class Foreach extends Directive
{
/**
* A special context to use when the foreach iterator returns a null. This
* is required since the standard context may not support nulls.
* All puts and gets are passed through, except for the foreach iterator key.
* @since 1.5
*/
protected static class NullHolderContext extends ChainedInternalContextAdapter
{
private String loopVariableKey = "";
private boolean active = true;
/**
* Create the context as a wrapper to be used within the foreach
* @param key the reference used in the foreach
* @param context the parent context
*/
private NullHolderContext( String key, InternalContextAdapter context )
{
super(context);
if( key != null )
loopVariableKey = key;
}
/**
* Get an object from the context, or null if the key is equal to the loop variable
* @see bboss.org.apache.velocity.context.InternalContextAdapter#get(java.lang.String)
* @exception MethodInvocationException passes on potential exception from reference method call
*/
public Object get( String key ) throws MethodInvocationException
{
return ( active && loopVariableKey.equals(key) )
? null
: super.get(key);
}
/**
* @see bboss.org.apache.velocity.context.InternalContextAdapter#put(java.lang.String key, java.lang.Object value)
*/
public Object put( String key, Object value )
{
if( loopVariableKey.equals(key) && (value == null) )
{
active = true;
}
return super.put( key, value );
}
/**
* Allows callers to explicitly put objects in the local context.
* Objects added to the context through this method always end up
* in the top-level context of possible wrapped contexts.
*
* @param key name of item to set.
* @param value object to set to key.
* @see bboss.org.apache.velocity.context.InternalWrapperContext#localPut(String, Object)
*/
public Object localPut(final String key, final Object value)
{
return put(key, value);
}
/**
* Remove an object from the context
* @see bboss.org.apache.velocity.context.InternalContextAdapter#remove(java.lang.Object key)
*/
public Object remove(Object key)
{
if( loopVariableKey.equals(key) )
{
active = false;
}
return super.remove(key);
}
}
/**
* Return name of this directive.
* @return The name of this directive.
*/
public String getName()
{
return "foreach";
}
/**
* Return type of this directive.
* @return The type of this directive.
*/
public int getType()
{
return BLOCK;
}
/**
* The name of the variable to use when placing
* the counter value into the context. Right
* now the default is $velocityCount.
*/
private String counterName;
/**
* The name of the variable to use when placing
* iterator hasNext() value into the context.Right
* now the defailt is $velocityHasNext
*/
private String hasNextName;
/**
* What value to start the loop counter at.
*/
private int counterInitialValue;
/**
* The maximum number of times we're allowed to loop.
*/
private int maxNbrLoops;
/**
* Whether or not to throw an Exception if the iterator is null.
*/
private boolean skipInvalidIterator;
/**
* The reference name used to access each
* of the elements in the list object. It
* is the $item in the following:
*
* #foreach ($item in $list)
*
* This can be used class wide because
* it is immutable.
*/
private String elementKey;
// track if we've done the deprecation warning thing already
private boolean warned = false;
/**
* immutable, so create in init
*/
protected Info uberInfo;
/**
* simple init - init the tree and get the elementKey from
* the AST
* @param rs
* @param context
* @param node
* @throws TemplateInitException
*/
public void init(RuntimeServices rs, InternalContextAdapter context, Node node)
throws TemplateInitException
{
super.init(rs, context, node);
// handle deprecated config settings
counterName = rsvc.getString(RuntimeConstants.COUNTER_NAME);
hasNextName = rsvc.getString(RuntimeConstants.HAS_NEXT_NAME);
counterInitialValue = rsvc.getInt(RuntimeConstants.COUNTER_INITIAL_VALUE);
// only warn once per instance...
if (!warned && rsvc.getLog().isWarnEnabled())
{
warned = true;
// ...and only if they customize these settings
if (!"velocityCount".equals(counterName))
{
rsvc.getLog().warn("The "+RuntimeConstants.COUNTER_NAME+
" property has been deprecated. It will be removed"+
" (along with $velocityCount itself) in Velocity 2.0. "+
" Instead, please use $foreach.count to access"+
" the loop counter.");
}
if (!"velocityHasNext".equals(hasNextName))
{
rsvc.getLog().warn("The "+RuntimeConstants.HAS_NEXT_NAME+
" property has been deprecated. It will be removed"+
" (along with $velocityHasNext itself ) in Velocity 2.0. "+
" Instead, please use $foreach.hasNext to access"+
" this value from now on.");
}
if (counterInitialValue != 1)
{
rsvc.getLog().warn("The "+RuntimeConstants.COUNTER_INITIAL_VALUE+
" property has been deprecated. It will be removed"+
" (along with $velocityCount itself) in Velocity 2.0. "+
" Instead, please use $foreach.index to access"+
" the 0-based loop index and $foreach.count"+
" to access the 1-based loop counter.");
}
}
maxNbrLoops = rsvc.getInt(RuntimeConstants.MAX_NUMBER_LOOPS,
Integer.MAX_VALUE);
if (maxNbrLoops < 1)
{
maxNbrLoops = Integer.MAX_VALUE;
}
skipInvalidIterator =
rsvc.getBoolean(RuntimeConstants.SKIP_INVALID_ITERATOR, true);
if (rsvc.getBoolean(RuntimeConstants.RUNTIME_REFERENCES_STRICT, false))
{
// If we are in strict mode then the default for skipInvalidItarator
// is true. However, if the property is explicitly set, then honor the setting.
skipInvalidIterator = rsvc.getBoolean(RuntimeConstants.SKIP_INVALID_ITERATOR, false);
}
/*
* this is really the only thing we can do here as everything
* else is context sensitive
*/
SimpleNode sn = (SimpleNode) node.jjtGetChild(0);
if (sn instanceof ASTReference)
{
elementKey = ((ASTReference) sn).getRootString();
}
else
{
/*
* the default, error-prone way which we'll remove
* TODO : remove if all goes well
*/
elementKey = sn.getFirstToken().image.substring(1);
}
/*
* make an uberinfo - saves new's later on
*/
uberInfo = new Info(this.getTemplateName(),
getLine(),getColumn());
}
/**
* Extension hook to allow subclasses to control whether loop vars
* are set locally or not. So, those in favor of VELOCITY-285, can
* make that happen easily by overriding this and having it use
* context.localPut(k,v). See VELOCITY-630 for more on this.
*/
protected void put(InternalContextAdapter context, String key, Object value)
{
context.put(key, value);
}
/**
* renders the #foreach() block
* @param context
* @param writer
* @param node
* @return True if the directive rendered successfully.
* @throws IOException
* @throws MethodInvocationException
* @throws ResourceNotFoundException
* @throws ParseErrorException
*/
public boolean render(InternalContextAdapter context,
Writer writer, Node node)
throws IOException, MethodInvocationException, ResourceNotFoundException,
ParseErrorException
{
/*
* do our introspection to see what our collection is
*/
Object listObject = node.jjtGetChild(2).value(context);
if (listObject == null)
return false;
Iterator i = null;
try
{
i = rsvc.getUberspect().getIterator(listObject, uberInfo);
}
/**
* pass through application level runtime exceptions
*/
catch( RuntimeException e )
{
throw e;
}
catch(Exception ee)
{
String msg = "Error getting iterator for #foreach at "+uberInfo;
rsvc.getLog().error(msg, ee);
throw new VelocityException(msg, ee);
}
if (i == null)
{
if (skipInvalidIterator)
{
return false;
}
else
{
Node pnode = node.jjtGetChild(2);
String msg = "#foreach parameter " + pnode.literal() + " at "
+ Log.formatFileString(pnode)
+ " is of type " + listObject.getClass().getName()
+ " and is either of wrong type or cannot be iterated.";
rsvc.getLog().error(msg);
throw new VelocityException(msg);
}
}
int counter = counterInitialValue;
boolean maxNbrLoopsExceeded = false;
/*
* save the element key if there is one, and the loop counter
*/
Object o = context.get(elementKey);
Object savedCounter = context.get(counterName);
Object nextFlag = context.get(hasNextName);
/*
* roll our own scope class instead of using preRender(ctx)'s
*/
ForeachScope foreach = null;
if (isScopeProvided())
{
String name = getScopeName();
foreach = new ForeachScope(this, context.get(name));
context.put(name, foreach);
}
/*
* Instantiate the null holder context if a null value
* is returned by the foreach iterator. Only one instance is
* created - it's reused for every null value.
*/
NullHolderContext nullHolderContext = null;
while (!maxNbrLoopsExceeded && i.hasNext())
{
// TODO: JDK 1.5+ -> Integer.valueOf()
put(context, counterName , new Integer(counter));
Object value = i.next();
put(context, hasNextName, Boolean.valueOf(i.hasNext()));
put(context, elementKey, value);
if (isScopeProvided())
{
// update the scope control
foreach.index++;
foreach.hasNext = i.hasNext();
}
try
{
/*
* If the value is null, use the special null holder context
*/
if (value == null)
{
if (nullHolderContext == null)
{
// lazy instantiation
nullHolderContext = new NullHolderContext(elementKey, context);
}
node.jjtGetChild(3).render(nullHolderContext, writer);
}
else
{
node.jjtGetChild(3).render(context, writer);
}
}
catch (StopCommand stop)
{
if (stop.isFor(this))
{
break;
}
else
{
// clean up first
clean(context, o, savedCounter, nextFlag);
throw stop;
}
}
counter++;
// Determine whether we're allowed to continue looping.
// ASSUMPTION: counterInitialValue is not negative!
maxNbrLoopsExceeded = (counter - counterInitialValue) >= maxNbrLoops;
}
clean(context, o, savedCounter, nextFlag);
return true;
}
protected void clean(InternalContextAdapter context,
Object o, Object savedCounter, Object nextFlag)
{
/*
* restores element key if exists
* otherwise just removes
*/
if (o != null)
{
context.put(elementKey, o);
}
else
{
context.remove(elementKey);
}
/*
* restores the loop counter (if we were nested)
* if we have one, else just removes
*/
if (savedCounter != null)
{
context.put(counterName, savedCounter);
}
else
{
context.remove(counterName);
}
/*
* restores the "hasNext" boolean flag if it exists
*/
if (nextFlag != null)
{
context.put(hasNextName, nextFlag);
}
else
{
context.remove(hasNextName);
}
// clean up after the ForeachScope
postRender(context);
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.resourcemanager.network.models;
import com.azure.core.annotation.Fluent;
import com.azure.resourcemanager.network.NetworkManager;
import com.azure.resourcemanager.network.fluent.models.VirtualNetworkPeeringInner;
import com.azure.resourcemanager.resources.fluentcore.arm.models.IndependentChild;
import com.azure.resourcemanager.resources.fluentcore.model.Appliable;
import com.azure.resourcemanager.resources.fluentcore.model.Creatable;
import com.azure.resourcemanager.resources.fluentcore.model.HasInnerModel;
import com.azure.resourcemanager.resources.fluentcore.model.Refreshable;
import com.azure.resourcemanager.resources.fluentcore.model.Updatable;
import java.util.List;
import reactor.core.publisher.Mono;
/** An client-side representation of a network peering. */
@Fluent()
public interface NetworkPeering
extends IndependentChild<NetworkManager>,
HasInnerModel<VirtualNetworkPeeringInner>,
Refreshable<NetworkPeering>,
Updatable<NetworkPeering.Update> {
/** @return the local virtual network's ID */
String networkId();
/** @return the reference of the remote virtual network address space */
List<String> remoteAddressSpaces();
/**
* @return true if the peering enables IP addresses within the peered networks to be accessible from both networks,
* otherwise false
* <p>(Note this method makes a separate call to Azure.)
*/
boolean checkAccessBetweenNetworks();
/** @return the associated remote virtual network's ID */
String remoteNetworkId();
/** @return the remote network if it is in the same subscription, otherwise null. */
Network getRemoteNetwork();
/**
* Gets the remote network associated with this peering asynchronously.
*
* @return a representation of the future computation of this call
*/
Mono<Network> getRemoteNetworkAsync();
/**
* @return the associated matching peering on the remote network if it is in the same subscription, otherwise this
* future computation will evaluate to null.
*/
NetworkPeering getRemotePeering();
/**
* Gets the associated matching peering on the remote network if it is in the same subscription.
*
* @return a representation of the future computation of this call.
*/
Mono<NetworkPeering> getRemotePeeringAsync();
/** @return the state of the peering between the two networks */
VirtualNetworkPeeringState state();
/** @return the type of gateway use enabled for this network */
NetworkPeeringGatewayUse gatewayUse();
/** @return true if traffic forwarding from the remote network is allowed into this network */
boolean isTrafficForwardingFromRemoteNetworkAllowed();
/** @return true if the peered networks are in the same subscription, otherwise false */
boolean isSameSubscription();
/** Grouping of all the network peering definition stages. */
interface DefinitionStages {
/** The first stage of a network peering definition. */
interface Blank extends WithRemoteNetwork {
}
/** The stage of a network peering definition allowing to specify the remote virtual network. */
interface WithRemoteNetwork {
// The remote network can only be specified at the time of the peering creation, not update
/**
* Specifies the remote network to peer with.
*
* <p>The remote network will have the matching peering associated with it automatically.
*
* @param resourceId the resource ID of an existing network
* @return the next stage of the definition
*/
WithCreate withRemoteNetwork(String resourceId);
/**
* Specifies the remote network to peer with.
*
* <p>The remote network will have the matching peering associated with it automatically.
*
* @param network an existing network
* @return the next stage of the definition
*/
WithCreate withRemoteNetwork(Network network);
}
/**
* The stage of a network peering definition allowing to control traffic forwarding from or to the remote
* network.
*/
interface WithTrafficForwarding {
/**
* Allows traffic forwarded from the remote network.
*
* @return the next stage of the definition
*/
WithCreate withTrafficForwardingFromRemoteNetwork();
/**
* Allows traffic forwarding from this network to the remote network.
*
* <p>This setting will have effect only if the remote network is in the same subscription. Otherwise, it
* will be ignored and you need to change the corresponding traffic forwarding setting on the remote
* network's matching peering explicitly.
*
* @return the next stage of the definition
*/
WithCreate withTrafficForwardingToRemoteNetwork();
/**
* Allows traffic forwarding both from either peered network into the other.
*
* <p>This setting will have effect on the remote network only if the remote network is in the same
* subscription. Otherwise, it will be ignored and you need to change the corresponding traffic forwarding
* setting on the remote network's matching peering explicitly.
*
* @return the next stage of the definition
*/
WithCreate withTrafficForwardingBetweenBothNetworks();
}
/**
* The stage of a network peering definition allowing to control the gateway use by or on the remote network.
*/
interface WithGatewayUse {
/**
* Allows the remote network to use this network's gateway (a.k.a. gateway transit), but does not start the
* use of the gateway by the remote network.
*
* <p>If this network is currently configured to use the remote network's gateway, that use will be
* automatically disabled, as these two settings cannot be used together.
*
* @return the next stage of the definition
*/
WithCreate withGatewayUseByRemoteNetworkAllowed();
/**
* Allows and starts the use of this network's gateway by the remote network (a.k.a. gateway transit).
*
* <p>If the remote network is not in the same subscription as this network, then gateway use by the remote
* gateway will only be allowed on this network, but not started. The matching peering on the remote network
* must be modified explicitly to start it.
*
* <p>If this network is currently configured to use the remote network's gateway, that use will be
* automatically disabled, as these two settings cannot be used together.
*
* <p>Before gateway use by a remote network can be started, a working gateway must already be in place
* within this network.
*
* @return the next stage of the definition
*/
WithCreate withGatewayUseByRemoteNetworkStarted();
/**
* Starts the use of the remote network's gateway.
*
* <p>If the remote network is in the same subscription, remote gateway use by this network (a.k.a. gateway
* transit) will also be automatically allowed on the remote network's side. Otherwise, this network will
* only be configured to use the remote gateway, but the matching peering on the remote network must still
* be additionally modified explicitly to allow gateway use by this network.
*
* <p>If this network is currently configured to allow the remote network to use its gateway, that use will
* be automatically disabled, as these two settings cannot be used together.
*
* <p>Before gateway use on a remote network can be started, a working gateway must already be in place
* within the remote network.
*
* @return the next stage of the definition
*/
WithCreate withGatewayUseOnRemoteNetworkStarted();
/**
* Disables any gateway use by this network and the remote one.
*
* @return the next stage of the definition
*/
WithCreate withoutAnyGatewayUse();
}
/** The stage of a network peering definition allowing to control access from and to the remote network. */
interface WithAccess {
/**
* Disallows access to either peered network from the other.
*
* <p>This setting will have effect on the remote network only if the remote network is in the same
* subscription. Otherwise, it will be ignored and you need to change the corresponding access setting on
* the remote network's matching peering explicitly.
*
* @return the next stage of the definition
*/
WithCreate withoutAccessFromEitherNetwork();
}
/**
* The stage of a network peering definition with sufficient inputs to create a new network peering in the
* cloud, but exposing additional optional settings to specify.
*/
interface WithCreate extends Creatable<NetworkPeering>, WithGatewayUse, WithTrafficForwarding, WithAccess {
}
}
/** The entirety of the network peering definition. */
interface Definition
extends DefinitionStages.Blank, DefinitionStages.WithCreate, DefinitionStages.WithRemoteNetwork {
}
/** The template for a network peering update operation, containing all the settings that can be modified. */
interface Update
extends Appliable<NetworkPeering>,
UpdateStages.WithTrafficForwarding,
UpdateStages.WithAccess,
UpdateStages.WithGatewayUse {
}
/** Grouping of all the network peering update stages. */
interface UpdateStages {
/** The stage of a network peering update allowing to control the gateway use by or on the remote network. */
interface WithGatewayUse {
/**
* Allows the remote network to use this network's gateway (a.k.a. gateway transit), but does not start the
* use of the gateway by the remote network.
*
* <p>If this network is currently configured to use the remote network's gateway, that use will be
* automatically disabled, as these two settings cannot be used together.
*
* @return the next stage of the update
*/
Update withGatewayUseByRemoteNetworkAllowed();
/**
* Allows and starts the use of this network's gateway by the remote network (a.k.a. gateway transit).
*
* <p>If the remote network is not in the same subscription as this network, then gateway use by the remote
* gateway will only be allowed on this network, but not started. The matching peering on the remote network
* must be modified explicitly to start it.
*
* <p>If this network is currently configured to use the remote network's gateway, that use will be
* automatically disabled, as these two settings cannot be used together.
*
* <p>Before gateway use by a remote network can be started, a working gateway must already be in place
* within this network.
*
* @return the next stage of the update
*/
Update withGatewayUseByRemoteNetworkStarted();
/**
* Starts the use of the remote network's gateway.
*
* <p>If the remote network is in the same subscription, remote gateway use by this network (a.k.a. gateway
* transit) will also be automatically allowed on the remote network's side. Otherwise, this network will
* only be configured to use the remote gateway, but the matching peering on the remote network must still
* be additionally modified explicitly to allow gateway use by this network.
*
* <p>If this network is currently configured to allow the remote network to use its gateway, that use will
* be automatically disabled, as these two settings cannot be used together.
*
* <p>Before gateway use on a remote network can be started, a working gateway must already be in place
* within the remote network.
*
* @return the next stage of the update
*/
Update withGatewayUseOnRemoteNetworkStarted();
/**
* Stops this network's use of the remote network's gateway.
*
* @return the next stage of the definition.
*/
Update withoutGatewayUseOnRemoteNetwork();
/**
* Disables any gateway use by this network and the remote one.
*
* <p>This will have effect on the remote network only if the remote network is in the same subscription as
* this network. Otherwise, only this network's use of the remote network's gateway will be stopped, but the
* use of this network's gateway by the remote network will only be disallowed. You will have to update the
* remote network's peering explicitly to properly stop its use of this network's gateway.
*
* @return the next stage of the update
*/
Update withoutAnyGatewayUse();
/**
* Stops and disallows the use of this network's gateway by the remote network.
*
* <p>If the remote network is not in the same subscription, then the use of that network's gateway by this
* network will be stopped but not disallowed by the remote network. The matching peering on the remote
* network must still be explicitly updated to also disallow such use.
*
* @return the next stage of the update
*/
Update withoutGatewayUseByRemoteNetwork();
}
/**
* The stage of a network peering update allowing to control traffic forwarding from or to the remote network.
*/
interface WithTrafficForwarding {
/**
* Allows traffic forwarding from the remote network.
*
* @return the next stage of the update
*/
Update withTrafficForwardingFromRemoteNetwork();
/**
* Prevents traffic forwarding from the remote network.
*
* @return the next stage of the update
*/
Update withoutTrafficForwardingFromRemoteNetwork();
/**
* Allows traffic forwarding from this network to the remote network.
*
* <p>This setting will only work here if the remote network is in the same subscription. Otherwise, it will
* be ignored and you need to change the corresponding traffic forwarding setting on the remote network's
* matching peering explicitly.
*
* @return the next stage of the update
*/
Update withTrafficForwardingToRemoteNetwork();
/**
* Disables traffic forwarding to the remote network.
*
* @return the next stage of the update
*/
Update withoutTrafficForwardingToRemoteNetwork();
/**
* Allows traffic forwarding both from either peered network to the other.
*
* <p>This setting will have effect on the remote network only if the remote network is in the same
* subscription. Otherwise, it will be ignored and you need to change the corresponding traffic forwarding
* setting on the remote network's matching peering explicitly.
*
* @return the next stage of the update
*/
Update withTrafficForwardingBetweenBothNetworks();
/**
* Disables traffic forwarding from either peered network to the other.
*
* <p>This setting will have effect on the remote network only if the remote network is in the same
* subscription. Otherwise, it will be ignored and you need to change the corresponding traffic forwarding
* setting on the remote network's matching peering explicitly.
*
* @return the next stage of the update
*/
Update withoutTrafficForwardingFromEitherNetwork();
}
/** The stage of a network peering update allowing to control access from and to the remote network. */
interface WithAccess {
/**
* Enables access to either peered virtual network from the other.
*
* <p>This setting will have effect on the remote network only if the remote network is in the same
* subscription. Otherwise, it will be ignored and you need to change the corresponding access setting on
* the remote network's matching peering explicitly.
*
* @return the next stage of the update
*/
Update withAccessBetweenBothNetworks();
/**
* Disallows access to either peered network from the other.
*
* <p>This setting will have effect on the remote network only if the remote network is in the same
* subscription. Otherwise, it will be ignored and you need to change the corresponding access setting on
* the remote network's matching peering explicitly.
*
* @return the next stage of the update
*/
Update withoutAccessFromEitherNetwork();
}
}
}
| |
package com.eaw1805.www.shared.stores.map;
import com.eaw1805.data.dto.web.army.ArmyDTO;
import com.eaw1805.data.dto.web.army.BrigadeDTO;
import com.eaw1805.data.dto.web.army.CommanderDTO;
import com.eaw1805.data.dto.web.army.CorpDTO;
import com.eaw1805.data.dto.web.army.SpyDTO;
import com.eaw1805.data.dto.web.economy.BaggageTrainDTO;
import com.eaw1805.data.dto.web.fleet.FleetDTO;
import com.eaw1805.data.dto.web.fleet.ShipDTO;
import com.eaw1805.www.client.events.loading.ForeignUnitsLoadedEvent;
import com.eaw1805.www.client.events.loading.ForeignUnitsLoadedHandler;
import com.eaw1805.www.client.events.loading.LoadEventManager;
import com.eaw1805.www.shared.stores.map.units.MapArmyGroup;
import com.eaw1805.www.shared.stores.map.units.MapBtrainGroup;
import com.eaw1805.www.shared.stores.map.units.MapFleetGroup;
import com.eaw1805.www.shared.stores.map.units.MapSpyCommGroup;
import org.vaadin.gwtgraphics.client.Group;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ForeignUnitsGroup {
private final Map<Integer, Group> regionUnitImages = new HashMap<Integer, Group>(4);
private final Map<Integer, MapArmyGroup> regionArmyImages = new HashMap<Integer, MapArmyGroup>(4);
private final Map<Integer, MapFleetGroup> regionFleetImages = new HashMap<Integer, MapFleetGroup>(4);
private final Map<Integer, MapBtrainGroup> regionBaggageTrains = new HashMap<Integer, MapBtrainGroup>(4);
private final Map<Integer, MapSpyCommGroup> regionSpyComm = new HashMap<Integer, MapSpyCommGroup>(4);
private final Map<Integer, MapArmyGroup> regionReportedArmyImages = new HashMap<Integer, MapArmyGroup>(4);
private final Map<Integer, MapFleetGroup> regionReportedFleetImages = new HashMap<Integer, MapFleetGroup>(4);
private final Map<Integer, Group> regionReportedUnitImages = new HashMap<Integer, Group>(4);
public ForeignUnitsGroup() {
LoadEventManager.addForeignUnitsLoadedHandler(new ForeignUnitsLoadedHandler() {
public void onForeignUnitsLoaded(final ForeignUnitsLoadedEvent event) {
final Map<Integer, List<CommanderDTO>> regionToCommanders = new HashMap<Integer, List<CommanderDTO>>();
final Map<Integer, List<SpyDTO>> regionToSpies = new HashMap<Integer, List<SpyDTO>>();
final Map<Integer, List<BaggageTrainDTO>> regionToBaggageTrains = new HashMap<Integer, List<BaggageTrainDTO>>();
for (List<CommanderDTO> commanders : event.getForeignUnits().getCommanders().values()) {
for (CommanderDTO commander : commanders) {
if (regionToCommanders.containsKey(commander.getRegionId())) {
regionToCommanders.get(commander.getRegionId()).add(commander);
} else {
regionToCommanders.put(commander.getRegionId(), new ArrayList<CommanderDTO>());
regionToCommanders.get(commander.getRegionId()).add(commander);
}
}
}
for (List<SpyDTO> spies : event.getForeignUnits().getSpies().values()) {
for (SpyDTO spy : spies) {
if (regionToSpies.containsKey(spy.getRegionId())) {
regionToSpies.get(spy.getRegionId()).add(spy);
} else {
regionToSpies.put(spy.getRegionId(), new ArrayList<SpyDTO>());
regionToSpies.get(spy.getRegionId()).add(spy);
}
}
}
for (List<BaggageTrainDTO> bTrains : event.getForeignUnits().getBaggageTrains().values()) {
for (BaggageTrainDTO bTrain : bTrains) {
if (regionToBaggageTrains.containsKey(bTrain.getRegionId())) {
regionToBaggageTrains.get(bTrain.getRegionId()).add(bTrain);
} else {
regionToBaggageTrains.put(bTrain.getRegionId(), new ArrayList<BaggageTrainDTO>());
regionToBaggageTrains.get(bTrain.getRegionId()).add(bTrain);
}
}
}
final Map<Integer, List<ArmyDTO>> regionToArmies = new HashMap<Integer, List<ArmyDTO>>();
final Map<Integer, ArmyDTO> regionToZeroArmy = new HashMap<Integer, ArmyDTO>();
final Map<Integer, CorpDTO> regionToZeroCorp = new HashMap<Integer, CorpDTO>();
final Map<Integer, List<ArmyDTO>> regionToReportedArmies = new HashMap<Integer, List<ArmyDTO>>();
final Map<Integer, ArmyDTO> regionToZeroReportedArmy = new HashMap<Integer, ArmyDTO>();
final Map<Integer, CorpDTO> regionToZeroReportedCorp = new HashMap<Integer, CorpDTO>();
for (List<ArmyDTO> armies : event.getForeignUnits().getArmies().values()) {
for (ArmyDTO army : armies) {
if (army.getArmyId() == 0) {
for (CorpDTO corps : army.getCorps().values()) {
if (corps.getCorpId() == 0) {
for (BrigadeDTO brigade : corps.getBrigades().values()) {
if (brigade.getBrigadeId() >= 0) {
if (!regionToZeroCorp.containsKey(brigade.getRegionId())) {
final CorpDTO zeroCorp = new CorpDTO();
zeroCorp.setArmyId(0);
zeroCorp.setCorpId(0);
zeroCorp.setBrigades(new HashMap<Integer, BrigadeDTO>());
regionToZeroCorp.put(brigade.getRegionId(), zeroCorp);
if (!regionToZeroArmy.containsKey(brigade.getRegionId())) {
final ArmyDTO zeroArmy = new ArmyDTO();
zeroArmy.setArmyId(0);
zeroArmy.setCorps(new HashMap<Integer, CorpDTO>());
regionToZeroArmy.put(brigade.getRegionId(), zeroArmy);
if (!regionToArmies.containsKey(brigade.getRegionId())) {
regionToArmies.put(brigade.getRegionId(), new ArrayList<ArmyDTO>());
}
regionToArmies.get(brigade.getRegionId()).add(zeroArmy);
}
regionToZeroArmy.get(brigade.getRegionId()).getCorps().put(0, zeroCorp);
}
regionToZeroCorp.get(brigade.getRegionId()).getBrigades().put(brigade.getBrigadeId(), brigade);
} else {
if (!regionToZeroReportedCorp.containsKey(brigade.getRegionId())) {
final CorpDTO zeroCorp = new CorpDTO();
zeroCorp.setArmyId(0);
zeroCorp.setCorpId(0);
zeroCorp.setBrigades(new HashMap<Integer, BrigadeDTO>());
regionToZeroReportedCorp.put(brigade.getRegionId(), zeroCorp);
if (!regionToZeroReportedArmy.containsKey(brigade.getRegionId())) {
final ArmyDTO zeroArmy = new ArmyDTO();
zeroArmy.setArmyId(0);
zeroArmy.setCorps(new HashMap<Integer, CorpDTO>());
regionToZeroReportedArmy.put(brigade.getRegionId(), zeroArmy);
if (!regionToReportedArmies.containsKey(brigade.getRegionId())) {
regionToReportedArmies.put(brigade.getRegionId(), new ArrayList<ArmyDTO>());
}
regionToReportedArmies.get(brigade.getRegionId()).add(zeroArmy);
}
regionToZeroReportedArmy.get(brigade.getRegionId()).getCorps().put(0, zeroCorp);
}
regionToZeroReportedCorp.get(brigade.getRegionId()).getBrigades().put(brigade.getBrigadeId(), brigade);
}
}
} else {
if (corps.getCorpId() > 0) {
if (!regionToZeroArmy.containsKey(corps.getRegionId())) {
final ArmyDTO zeroArmy = new ArmyDTO();
zeroArmy.setArmyId(0);
zeroArmy.setCorps(new HashMap<Integer, CorpDTO>());
regionToZeroArmy.put(corps.getRegionId(), zeroArmy);
if (!regionToArmies.containsKey(corps.getRegionId())) {
regionToArmies.put(corps.getRegionId(), new ArrayList<ArmyDTO>());
}
regionToArmies.get(corps.getRegionId()).add(zeroArmy);
}
regionToZeroArmy.get(corps.getRegionId()).getCorps().put(corps.getCorpId(), corps);
} else {
if (!regionToZeroReportedArmy.containsKey(corps.getRegionId())) {
final ArmyDTO zeroArmy = new ArmyDTO();
zeroArmy.setArmyId(0);
zeroArmy.setCorps(new HashMap<Integer, CorpDTO>());
regionToZeroReportedArmy.put(corps.getRegionId(), zeroArmy);
if (!regionToReportedArmies.containsKey(corps.getRegionId())) {
regionToReportedArmies.put(corps.getRegionId(), new ArrayList<ArmyDTO>());
}
regionToReportedArmies.get(corps.getRegionId()).add(zeroArmy);
}
regionToZeroReportedArmy.get(corps.getRegionId()).getCorps().put(corps.getCorpId(), corps);
}
}
}
} else {
if (army.getArmyId() > 0) {
if (!regionToArmies.containsKey(army.getRegionId())) {
regionToArmies.put(army.getRegionId(), new ArrayList<ArmyDTO>());
}
regionToArmies.get(army.getRegionId()).add(army);
} else if (army.getArmyId() < 0) {
if (!regionToReportedArmies.containsKey(army.getRegionId())) {
regionToReportedArmies.put(army.getRegionId(), new ArrayList<ArmyDTO>());
}
regionToReportedArmies.get(army.getRegionId()).add(army);
}
}
}
}
final Map<Integer, List<FleetDTO>> regionToFleets = new HashMap<Integer, List<FleetDTO>>();
final Map<Integer, FleetDTO> regionToZeroFleet = new HashMap<Integer, FleetDTO>();
final Map<Integer, List<FleetDTO>> regionToReportedFleets = new HashMap<Integer, List<FleetDTO>>();
final Map<Integer, FleetDTO> regionToZeroReportedFleet = new HashMap<Integer, FleetDTO>();
for (List<FleetDTO> fleets : event.getForeignUnits().getFleets().values()) {
for (FleetDTO fleet : fleets) {
if (fleet.getFleetId() > 0) {
if (!regionToFleets.containsKey(fleet.getRegionId())) {
regionToFleets.put(fleet.getRegionId(), new ArrayList<FleetDTO>());
}
regionToFleets.get(fleet.getRegionId()).add(fleet);
} else if (fleet.getFleetId() < 0) {
if (!regionToReportedFleets.containsKey(fleet.getRegionId())) {
regionToReportedFleets.put(fleet.getRegionId(), new ArrayList<FleetDTO>());
}
regionToReportedFleets.get(fleet.getRegionId()).add(fleet);
} else {
for (ShipDTO ship : fleet.getShips().values()) {
if (ship.getId() >= 0) {
if (!regionToZeroFleet.containsKey(ship.getRegionId())) {
final FleetDTO zeroFleet = new FleetDTO();
zeroFleet.setFleetId(0);
zeroFleet.setShips(new HashMap<Integer, ShipDTO>());
regionToZeroFleet.put(ship.getRegionId(), zeroFleet);
if (!regionToFleets.containsKey(ship.getRegionId())) {
regionToFleets.put(ship.getRegionId(), new ArrayList<FleetDTO>());
}
regionToFleets.get(ship.getRegionId()).add(zeroFleet);
}
regionToZeroFleet.get(ship.getRegionId()).getShips().put(ship.getId(), ship);
} else {
if (!regionToZeroReportedFleet.containsKey(ship.getRegionId())) {
final FleetDTO zeroFleet = new FleetDTO();
zeroFleet.setFleetId(0);
zeroFleet.setShips(new HashMap<Integer, ShipDTO>());
regionToZeroReportedFleet.put(ship.getRegionId(), zeroFleet);
if (!regionToReportedFleets.containsKey(ship.getRegionId())) {
regionToReportedFleets.put(ship.getRegionId(), new ArrayList<FleetDTO>());
}
regionToReportedFleets.get(ship.getRegionId()).add(zeroFleet);
}
regionToZeroReportedFleet.get(ship.getRegionId()).getShips().put(ship.getId(), ship);
}
}
}
}
}
for (int regionId = 1; regionId <= 5; regionId++) {
final List<CommanderDTO> regionCommanders = new ArrayList<CommanderDTO>();
final List<SpyDTO> regionSpies = new ArrayList<SpyDTO>();
if (regionToCommanders.containsKey(regionId)) {
regionCommanders.addAll(regionToCommanders.get(regionId));
}
if (regionToSpies.containsKey(regionId)) {
regionSpies.addAll(regionToSpies.get(regionId));
}
if (!regionCommanders.isEmpty() || !regionSpies.isEmpty()) {
final MapSpyCommGroup mpcGroup = new MapSpyCommGroup(regionCommanders, regionSpies, regionId, false, true);
regionSpyComm.put(regionId, mpcGroup);
getByRegion(regionId).add(regionSpyComm.get(regionId));
}
if (regionToBaggageTrains.containsKey(regionId)
&& regionToBaggageTrains.get(regionId).size() > 0) {
final MapBtrainGroup mbGroup = new MapBtrainGroup(regionToBaggageTrains.get(regionId), regionId, false, true);
regionBaggageTrains.put(regionId, mbGroup);
getByRegion(regionId).add(regionBaggageTrains.get(regionId));
}
if (regionToArmies.containsKey(regionId)
&& regionToArmies.get(regionId).size() > 0) {
final MapArmyGroup maGroup = new MapArmyGroup(regionToArmies.get(regionId), regionId, false, true, false);
regionArmyImages.put(regionId, maGroup);
getByRegion(regionId).add(regionArmyImages.get(regionId));
}
//generate fleets group
try {
if (regionToFleets.containsKey(regionId) &&
regionToFleets.get(regionId).size() > 0) {
final MapFleetGroup mfGroup = new MapFleetGroup(regionToFleets.get(regionId), regionId, false, true, false);
regionFleetImages.put(regionId, mfGroup);
getByRegion(regionId).add(regionFleetImages.get(regionId));
}
} catch (Exception e) {
// Window.alert("fleets" + e.toString());
}
//at last create new group for virtual reported armies
if (regionToReportedArmies.containsKey(regionId)
&& regionToReportedArmies.get(regionId).size() > 0) {
final MapArmyGroup maGroup = new MapArmyGroup(regionToReportedArmies.get(regionId), regionId, false, true, false);
regionReportedArmyImages.put(regionId, maGroup);
getReportedByRegion(regionId).add(regionReportedArmyImages.get(regionId));
}
if (regionToReportedFleets.containsKey(regionId)
&& regionToReportedFleets.get(regionId).size() > 0) {
final MapFleetGroup mfGroup = new MapFleetGroup(regionToReportedFleets.get(regionId), regionId, false, true, false);
regionReportedFleetImages.put(regionId, mfGroup);
getReportedByRegion(regionId).add(regionReportedFleetImages.get(regionId));
}
}
}
});
}
private Group getByRegion(final int regionId) {
if (!regionUnitImages.containsKey(regionId)) {
regionUnitImages.put(regionId, new Group());
}
return regionUnitImages.get(regionId);
}
private Group getReportedByRegion(final int regionId) {
if (!regionReportedUnitImages.containsKey(regionId)) {
regionReportedUnitImages.put(regionId, new Group());
}
return regionReportedUnitImages.get(regionId);
}
public Group getRegionForeignUnitImages(final int regionId) {
return getByRegion(regionId);
}
public MapArmyGroup getRegionArmyImages(final int regionId) {
return regionArmyImages.get(regionId);
}
public MapBtrainGroup getBaggageTrainsByRegionId(final int regionId) {
return regionBaggageTrains.get(regionId);
}
public MapSpyCommGroup getSpiesCommandersByRegionId(final int regionId) {
return regionSpyComm.get(regionId);
}
public MapFleetGroup getFleetsByRegionId(final int regionId) {
return regionFleetImages.get(regionId);
}
public Group getRegionReportedForeignUnitImages(final int regionId) {
return getReportedByRegion(regionId);
}
public MapArmyGroup getRegionReportedArmyImages(final int regionId) {
return regionReportedArmyImages.get(regionId);
}
public MapFleetGroup getRegionReportedFleetImages(final int regionId) {
return regionReportedFleetImages.get(regionId);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.conf;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.retry.RetryNTimes;
import org.apache.curator.test.TestingServer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.ha.HAServiceProtocol;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.service.Service;
import org.apache.hadoop.util.curator.ZKCuratorManager;
import org.apache.hadoop.yarn.conf.HAUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.records.Version;
import org.apache.hadoop.yarn.server.records.impl.pb.VersionPBImpl;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.ZKRMStateStore;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.MutableConfScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.MutableConfigurationProvider;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.conf.YarnConfigurationStore.LogMutation;
import org.apache.hadoop.yarn.webapp.dao.QueueConfigInfo;
import org.apache.hadoop.yarn.webapp.dao.SchedConfUpdateInfo;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.ByteArrayOutputStream;
import java.io.ObjectOutputStream;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
/**
* Tests {@link ZKConfigurationStore}.
*/
public class TestZKConfigurationStore extends
PersistentConfigurationStoreBaseTest {
public static final Logger LOG =
LoggerFactory.getLogger(TestZKConfigurationStore.class);
private static final int ZK_TIMEOUT_MS = 10000;
private TestingServer curatorTestingServer;
private CuratorFramework curatorFramework;
private ResourceManager rm;
public static TestingServer setupCuratorServer() throws Exception {
TestingServer curatorTestingServer = new TestingServer();
curatorTestingServer.start();
return curatorTestingServer;
}
public static CuratorFramework setupCuratorFramework(
TestingServer curatorTestingServer) throws Exception {
CuratorFramework curatorFramework = CuratorFrameworkFactory.builder()
.connectString(curatorTestingServer.getConnectString())
.retryPolicy(new RetryNTimes(100, 100))
.build();
curatorFramework.start();
return curatorFramework;
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
curatorTestingServer = setupCuratorServer();
curatorFramework = setupCuratorFramework(curatorTestingServer);
conf.set(CommonConfigurationKeys.ZK_ADDRESS,
curatorTestingServer.getConnectString());
rm = new MockRM(conf);
rm.start();
rmContext = rm.getRMContext();
}
@After
public void cleanup() throws IOException {
rm.stop();
curatorFramework.close();
curatorTestingServer.stop();
}
@Test(expected = YarnConfStoreVersionIncompatibleException.class)
public void testIncompatibleVersion() throws Exception {
confStore.initialize(conf, schedConf, rmContext);
Version otherVersion = Version.newInstance(1, 1);
String zkVersionPath = getZkPath("VERSION");
byte[] versionData =
((VersionPBImpl) otherVersion).getProto().toByteArray();
((ZKConfigurationStore) confStore).safeCreateZkData(zkVersionPath,
versionData);
assertEquals("The configuration store should have stored the new" +
"version.", otherVersion, confStore.getConfStoreVersion());
confStore.checkVersion();
}
@Test
public void testFormatConfiguration() throws Exception {
schedConf.set("key", "val");
confStore.initialize(conf, schedConf, rmContext);
assertEquals("val", confStore.retrieve().get("key"));
confStore.format();
assertNull(confStore.retrieve());
}
@Test(expected = IllegalStateException.class)
public void testGetConfigurationVersionOnSerializedNullData()
throws Exception {
confStore.initialize(conf, schedConf, rmContext);
String confVersionPath = getZkPath("CONF_VERSION");
((ZKConfigurationStore) confStore).setZkData(confVersionPath, null);
confStore.getConfigVersion();
}
/**
* The correct behavior of logMutation should be, that even though an
* Exception is thrown during serialization, the log data must not be
* overridden.
*
* @throws Exception
*/
@Test(expected = ClassCastException.class)
public void testLogMutationAfterSerializationError() throws Exception {
byte[] data = null;
String logs = "NOT_LINKED_LIST";
confStore.initialize(conf, schedConf, rmContext);
try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos)) {
oos.writeObject(logs);
oos.flush();
baos.flush();
data = baos.toByteArray();
}
String logsPath = getZkPath("LOGS");
((ZKConfigurationStore)confStore).setZkData(logsPath, data);
Map<String, String> update = new HashMap<>();
update.put("valid_key", "valid_value");
confStore.logMutation(new LogMutation(update, TEST_USER));
assertEquals(data, ((ZKConfigurationStore)confStore).getZkData(logsPath));
}
@Test
public void testDisableAuditLogs() throws Exception {
conf.setLong(YarnConfiguration.RM_SCHEDCONF_MAX_LOGS, 0);
confStore.initialize(conf, schedConf, rmContext);
String logsPath = getZkPath("LOGS");
byte[] data = null;
((ZKConfigurationStore) confStore).setZkData(logsPath, data);
prepareLogMutation("key1", "val1");
data = ((ZKConfigurationStore) confStore).getZkData(logsPath);
assertNull("Failed to Disable Audit Logs", data);
}
public Configuration createRMHAConf(String rmIds, String rmId,
int adminPort) {
Configuration conf = new YarnConfiguration();
this.conf.setClass(YarnConfiguration.RM_SCHEDULER,
CapacityScheduler.class, CapacityScheduler.class);
conf.setBoolean(YarnConfiguration.RM_HA_ENABLED, true);
conf.set(YarnConfiguration.RM_HA_IDS, rmIds);
conf.setBoolean(YarnConfiguration.RECOVERY_ENABLED, true);
conf.set(YarnConfiguration.SCHEDULER_CONFIGURATION_STORE_CLASS,
YarnConfiguration.ZK_CONFIGURATION_STORE);
conf.set(YarnConfiguration.RM_STORE, ZKRMStateStore.class.getName());
conf.set(YarnConfiguration.RM_ZK_ADDRESS,
curatorTestingServer.getConnectString());
conf.set(YarnConfiguration.RM_HA_ID, rmId);
conf.set(YarnConfiguration.RM_WEBAPP_ADDRESS, "localhost:0");
conf.setBoolean(YarnConfiguration.AUTO_FAILOVER_ENABLED, false);
for (String rpcAddress :
YarnConfiguration.getServiceAddressConfKeys(conf)) {
for (String id : HAUtil.getRMHAIds(conf)) {
conf.set(HAUtil.addSuffix(rpcAddress, id), "localhost:0");
}
}
conf.set(HAUtil.addSuffix(YarnConfiguration.RM_ADMIN_ADDRESS, rmId),
"localhost:" + adminPort);
return conf;
}
/**
* When failing over, new active RM should read from current state of store,
* including any updates when the new active RM was in standby.
* @throws Exception
*/
@Test
public void testFailoverReadsFromUpdatedStore() throws Exception {
HAServiceProtocol.StateChangeRequestInfo req =
new HAServiceProtocol.StateChangeRequestInfo(
HAServiceProtocol.RequestSource.REQUEST_BY_USER);
Configuration conf1 = createRMHAConf("rm1,rm2", "rm1", 1234);
ResourceManager rm1 = new MockRM(conf1);
rm1.start();
rm1.getRMContext().getRMAdminService().transitionToActive(req);
assertEquals("RM with ZKStore didn't start",
Service.STATE.STARTED, rm1.getServiceState());
assertEquals("RM should be Active",
HAServiceProtocol.HAServiceState.ACTIVE,
rm1.getRMContext().getRMAdminService().getServiceStatus().getState());
assertNull(((MutableConfScheduler) rm1.getResourceScheduler())
.getConfiguration().get("key"));
Configuration conf2 = createRMHAConf("rm1,rm2", "rm2", 5678);
ResourceManager rm2 = new MockRM(conf2);
rm2.start();
assertEquals("RM should be Standby",
HAServiceProtocol.HAServiceState.STANDBY,
rm2.getRMContext().getRMAdminService().getServiceStatus().getState());
// Update configuration on RM1
SchedConfUpdateInfo schedConfUpdateInfo = new SchedConfUpdateInfo();
schedConfUpdateInfo.getGlobalParams().put("key", "val");
MutableConfigurationProvider confProvider = ((MutableConfScheduler)
rm1.getResourceScheduler()).getMutableConfProvider();
UserGroupInformation user = UserGroupInformation
.createUserForTesting(TEST_USER, new String[0]);
LogMutation log = confProvider.logAndApplyMutation(user,
schedConfUpdateInfo);
rm1.getResourceScheduler().reinitialize(conf1, rm1.getRMContext());
assertEquals("val", ((MutableConfScheduler) rm1.getResourceScheduler())
.getConfiguration().get("key"));
confProvider.confirmPendingMutation(log, true);
assertEquals("val", ((MutableCSConfigurationProvider) confProvider)
.getConfStore().retrieve().get("key"));
// Next update is not persisted, it should not be recovered
schedConfUpdateInfo.getGlobalParams().put("key", "badVal");
log = confProvider.logAndApplyMutation(user, schedConfUpdateInfo);
// Start RM2 and verifies it starts with updated configuration
rm2.getRMContext().getRMAdminService().transitionToActive(req);
assertEquals("RM with ZKStore didn't start",
Service.STATE.STARTED, rm2.getServiceState());
assertEquals("RM should be Active",
HAServiceProtocol.HAServiceState.ACTIVE,
rm2.getRMContext().getRMAdminService().getServiceStatus().getState());
for (int i = 0; i < ZK_TIMEOUT_MS / 50; i++) {
if (HAServiceProtocol.HAServiceState.ACTIVE ==
rm1.getRMContext().getRMAdminService().getServiceStatus()
.getState()) {
Thread.sleep(100);
}
}
assertEquals("RM should have been fenced",
HAServiceProtocol.HAServiceState.STANDBY,
rm1.getRMContext().getRMAdminService().getServiceStatus().getState());
assertEquals("RM should be Active",
HAServiceProtocol.HAServiceState.ACTIVE,
rm2.getRMContext().getRMAdminService().getServiceStatus().getState());
assertEquals("val", ((MutableCSConfigurationProvider) (
(CapacityScheduler) rm2.getResourceScheduler())
.getMutableConfProvider()).getConfStore().retrieve().get("key"));
assertEquals("val", ((MutableConfScheduler) rm2.getResourceScheduler())
.getConfiguration().get("key"));
// Transition to standby will set RM's HA status and then reinitialize in
// a separate thread. Despite asserting for STANDBY state, it's
// possible for reinitialization to be unfinished. Wait here for it to
// finish, otherwise closing rm1 will close zkManager and the unfinished
// reinitialization will throw an exception.
Thread.sleep(10000);
rm1.close();
rm2.close();
}
/**
* When failing over, if RM1 stopped and removed a queue that RM2 has in
* memory, failing over to RM2 should not throw an exception.
* @throws Exception
*/
@Test
public void testFailoverAfterRemoveQueue() throws Exception {
HAServiceProtocol.StateChangeRequestInfo req =
new HAServiceProtocol.StateChangeRequestInfo(
HAServiceProtocol.RequestSource.REQUEST_BY_USER);
Configuration conf1 = createRMHAConf("rm1,rm2", "rm1", 1234);
ResourceManager rm1 = new MockRM(conf1);
rm1.start();
rm1.getRMContext().getRMAdminService().transitionToActive(req);
assertEquals("RM with ZKStore didn't start",
Service.STATE.STARTED, rm1.getServiceState());
assertEquals("RM should be Active",
HAServiceProtocol.HAServiceState.ACTIVE,
rm1.getRMContext().getRMAdminService().getServiceStatus().getState());
Configuration conf2 = createRMHAConf("rm1,rm2", "rm2", 5678);
ResourceManager rm2 = new MockRM(conf2);
rm2.start();
assertEquals("RM should be Standby",
HAServiceProtocol.HAServiceState.STANDBY,
rm2.getRMContext().getRMAdminService().getServiceStatus().getState());
UserGroupInformation user = UserGroupInformation
.createUserForTesting(TEST_USER, new String[0]);
MutableConfigurationProvider confProvider = ((MutableConfScheduler)
rm1.getResourceScheduler()).getMutableConfProvider();
// Add root.a
SchedConfUpdateInfo schedConfUpdateInfo = new SchedConfUpdateInfo();
Map<String, String> addParams = new HashMap<>();
addParams.put("capacity", "100");
QueueConfigInfo addInfo = new QueueConfigInfo("root.a", addParams);
schedConfUpdateInfo.getAddQueueInfo().add(addInfo);
// Stop root.default
Map<String, String> stopParams = new HashMap<>();
stopParams.put("state", "STOPPED");
stopParams.put("capacity", "0");
QueueConfigInfo stopInfo = new QueueConfigInfo("root.default", stopParams);
schedConfUpdateInfo.getUpdateQueueInfo().add(stopInfo);
LogMutation log = confProvider.logAndApplyMutation(user,
schedConfUpdateInfo);
rm1.getResourceScheduler().reinitialize(conf1, rm1.getRMContext());
confProvider.confirmPendingMutation(log, true);
assertTrue(Arrays.asList(((MutableConfScheduler) rm1.getResourceScheduler())
.getConfiguration().get("yarn.scheduler.capacity.root.queues").split
(",")).contains("a"));
// Remove root.default
schedConfUpdateInfo.getUpdateQueueInfo().clear();
schedConfUpdateInfo.getAddQueueInfo().clear();
schedConfUpdateInfo.getRemoveQueueInfo().add("root.default");
log = confProvider.logAndApplyMutation(user, schedConfUpdateInfo);
rm1.getResourceScheduler().reinitialize(conf1, rm1.getRMContext());
confProvider.confirmPendingMutation(log, true);
assertEquals("a", ((MutableConfScheduler) rm1.getResourceScheduler())
.getConfiguration().get("yarn.scheduler.capacity.root.queues"));
// Start RM2 and verifies it starts with updated configuration
rm2.getRMContext().getRMAdminService().transitionToActive(req);
assertEquals("RM with ZKStore didn't start",
Service.STATE.STARTED, rm2.getServiceState());
assertEquals("RM should be Active",
HAServiceProtocol.HAServiceState.ACTIVE,
rm2.getRMContext().getRMAdminService().getServiceStatus().getState());
for (int i = 0; i < ZK_TIMEOUT_MS / 50; i++) {
if (HAServiceProtocol.HAServiceState.ACTIVE ==
rm1.getRMContext().getRMAdminService().getServiceStatus()
.getState()) {
Thread.sleep(100);
}
}
assertEquals("RM should have been fenced",
HAServiceProtocol.HAServiceState.STANDBY,
rm1.getRMContext().getRMAdminService().getServiceStatus().getState());
assertEquals("RM should be Active",
HAServiceProtocol.HAServiceState.ACTIVE,
rm2.getRMContext().getRMAdminService().getServiceStatus().getState());
assertEquals("a", ((MutableCSConfigurationProvider) (
(CapacityScheduler) rm2.getResourceScheduler())
.getMutableConfProvider()).getConfStore().retrieve()
.get("yarn.scheduler.capacity.root.queues"));
assertEquals("a", ((MutableConfScheduler) rm2.getResourceScheduler())
.getConfiguration().get("yarn.scheduler.capacity.root.queues"));
// Transition to standby will set RM's HA status and then reinitialize in
// a separate thread. Despite asserting for STANDBY state, it's
// possible for reinitialization to be unfinished. Wait here for it to
// finish, otherwise closing rm1 will close zkManager and the unfinished
// reinitialization will throw an exception.
Thread.sleep(10000);
rm1.close();
rm2.close();
}
@Override
public YarnConfigurationStore createConfStore() {
return new ZKConfigurationStore();
}
private String getZkPath(String nodeName) {
String znodeParentPath = conf.get(YarnConfiguration.
RM_SCHEDCONF_STORE_ZK_PARENT_PATH,
YarnConfiguration.DEFAULT_RM_SCHEDCONF_STORE_ZK_PARENT_PATH);
return ZKCuratorManager.getNodePath(znodeParentPath, nodeName);
}
@Override
Version getVersion() {
return ZKConfigurationStore.CURRENT_VERSION_INFO;
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.main;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.ImmutableList;
import com.streamsets.datacollector.http.WebServerTask;
import com.streamsets.datacollector.util.AuthzRole;
import com.streamsets.datacollector.util.Configuration;
import com.streamsets.lib.security.http.RemoteSSOService;
import com.streamsets.pipeline.api.impl.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.SSLContext;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
public abstract class RuntimeInfo {
private static final Logger LOG = LoggerFactory.getLogger(RuntimeInfo.class);
public static final String SPLITTER = "|";
public static final String CONFIG_DIR = ".conf.dir";
public static final String DATA_DIR = ".data.dir";
public static final String LOG_DIR = ".log.dir";
public static final String RESOURCES_DIR = ".resources.dir";
public static final String LIBEXEC_DIR = ".libexec.dir";
public static final String STATIC_WEB_DIR = ".static-web.dir";
public static final String TRANSIENT_ENVIRONMENT = "sdc.transient-env";
public static final String UNDEF = "UNDEF";
public static final String CALLBACK_URL = "/public-rest/v1/cluster/callback";
public static final String SECURITY_PREFIX = "java.security.";
public static final String DATA_COLLECTOR_BASE_HTTP_URL = "sdc.base.http.url";
public static final String PIPELINE_ACCESS_CONTROL_ENABLED = "pipeline.access.control.enabled";
public static final boolean PIPELINE_ACCESS_CONTROL_ENABLED_DEFAULT = false;
private boolean DPMEnabled;
private boolean aclEnabled;
private final static String USER_ROLE = "user";
public static final String LOG4J_CONFIGURATION_URL_ATTR = "log4j.configuration.url";
public static final String LOG4J_PROPERTIES = "-log4j.properties";
private static final String STREAMSETS_LIBRARIES_EXTRA_DIR_SYS_PROP = "STREAMSETS_LIBRARIES_EXTRA_DIR";
private final MetricRegistry metrics;
private final List<? extends ClassLoader> stageLibraryClassLoaders;
private String httpUrl;
private String appAuthToken;
private final Map<String, Object> attributes;
private ShutdownHandler shutdownRunnable;
private final Map<String, String> authenticationTokens;
private final String propertyPrefix;
private final UUID randomUUID;
private SSLContext sslContext;
private boolean remoteRegistrationSuccessful;
public RuntimeInfo(String propertyPrefix, MetricRegistry metrics,
List<? extends ClassLoader> stageLibraryClassLoaders) {
this.metrics = metrics;
if(stageLibraryClassLoaders != null) {
this.stageLibraryClassLoaders = ImmutableList.copyOf(stageLibraryClassLoaders);
} else {
this.stageLibraryClassLoaders = null;
}
this.propertyPrefix = propertyPrefix;
httpUrl = UNDEF;
this.attributes = new ConcurrentHashMap<>();
authenticationTokens = new HashMap<>();
reloadAuthenticationToken();
randomUUID = UUID.randomUUID();
}
protected UUID getRandomUUID() {
return randomUUID;
}
public abstract void init();
public abstract String getId();
public abstract String getMasterSDCId();
public abstract String getRuntimeDir();
public abstract boolean isClusterSlave();
public MetricRegistry getMetrics() {
return metrics;
}
public void setBaseHttpUrl(String url) {
this.httpUrl = url;
}
public String getBaseHttpUrl() {
return httpUrl;
}
public String getStaticWebDir() {
return System.getProperty(propertyPrefix + STATIC_WEB_DIR, getRuntimeDir() + "/" + propertyPrefix + "-static-web");
}
public String getConfigDir() {
return System.getProperty(propertyPrefix + CONFIG_DIR, getRuntimeDir() + "/etc");
}
public String getLogDir() {
return System.getProperty(propertyPrefix + LOG_DIR, getRuntimeDir() + "/log");
}
public String getLog4jPropertiesFileName() {
return propertyPrefix + LOG4J_PROPERTIES;
}
public String getDataDir() {
return System.getProperty(propertyPrefix + DATA_DIR, getRuntimeDir() + "/var");
}
public String getLibexecDir() {
return System.getProperty(propertyPrefix + LIBEXEC_DIR, getRuntimeDir() + "/libexec");
}
public String getResourcesDir() {
return System.getProperty(propertyPrefix + RESOURCES_DIR, getRuntimeDir() + "/resources");
}
public String getLibsExtraDir() {
return System.getProperty(STREAMSETS_LIBRARIES_EXTRA_DIR_SYS_PROP, null);
}
public boolean hasAttribute(String key) {
Utils.checkNotNull(key, "key");
return attributes.containsKey(key);
}
public <T> void setAttribute(String key, T value) {
Utils.checkNotNull(key, "key");
attributes.put(key, value);
}
public void removeAttribute(String key) {
Utils.checkNotNull(key, "key");
attributes.remove(key);
}
@SuppressWarnings("unchecked")
public <T> T getAttribute(String key) {
Utils.checkNotNull(key, "key");
return (T) attributes.get(key);
}
public List<? extends ClassLoader> getStageLibraryClassLoaders() {
return stageLibraryClassLoaders;
}
public void log(Logger log) {
log.info("Runtime info:");
log.info(" Java version : {}", System.getProperty("java.runtime.version"));
log.info(" SDC ID : {}", getId());
log.info(" Runtime dir : {}", getRuntimeDir());
log.info(" Config dir : {}", getConfigDir());
log.info(" Data dir : {}", getDataDir());
log.info(" Log dir : {}", getLogDir());
}
public void setShutdownHandler(ShutdownHandler runnable) {
shutdownRunnable = runnable;
}
public void shutdown(int status) {
if (shutdownRunnable != null) {
shutdownRunnable.setExistStatus(status);
shutdownRunnable.run();
}
}
public Map<String, String> getAuthenticationTokens() {
return authenticationTokens;
}
public boolean isValidAuthenticationToken(String authToken) {
String [] authTokens = authToken.split(",");
for(String token: authTokens) {
String [] strArr = token.split("\\" + SPLITTER);
if(strArr.length > 1) {
String role = strArr[1];
String tokenCache = authenticationTokens.get(role);
if(!token.equals(tokenCache)) {
return false;
}
} else {
return false;
}
}
return true;
}
public String [] getRolesFromAuthenticationToken(String authToken) {
List<String> roles = new ArrayList<>();
roles.add(USER_ROLE);
String [] authTokens = authToken.split(",");
for(String token: authTokens) {
String [] strArr = token.split("\\" + SPLITTER);
if(strArr.length > 1) {
roles.add(strArr[1]);
}
}
return roles.toArray(new String[roles.size()]);
}
public void reloadAuthenticationToken() {
for(String role: AuthzRole.ALL_ROLES) {
authenticationTokens.put(role, UUID.randomUUID().toString() + SPLITTER + role);
}
}
public String getClusterCallbackURL() {
return getBaseHttpUrl() + CALLBACK_URL;
}
public void setRemoteRegistrationStatus(boolean remoteRegistrationSuccessful) {
this.remoteRegistrationSuccessful = remoteRegistrationSuccessful;
}
public boolean isRemoteRegistrationSuccessful() {
return this.remoteRegistrationSuccessful;
}
public void setSSLContext(SSLContext sslContext) {
this.sslContext = sslContext;
}
public SSLContext getSSLContext() {
return sslContext;
}
void setAppAuthToken(String appAuthToken) {
this.appAuthToken = appAuthToken;
}
public String getAppAuthToken() {
return appAuthToken;
}
public void setDPMEnabled(boolean DPMEnabled) {
this.DPMEnabled = DPMEnabled;
}
public boolean isDPMEnabled() {
return DPMEnabled;
}
public boolean isAclEnabled() {
return aclEnabled;
}
public void setAclEnabled(boolean aclEnabled) {
this.aclEnabled = aclEnabled;
}
public static void loadOrReloadConfigs(RuntimeInfo runtimeInfo, Configuration conf) {
File configFile = new File(runtimeInfo.getConfigDir(), "sdc.properties");
if (configFile.exists()) {
try(FileReader reader = new FileReader(configFile)) {
conf.load(reader);
runtimeInfo.setBaseHttpUrl(conf.get(DATA_COLLECTOR_BASE_HTTP_URL, runtimeInfo.getBaseHttpUrl()));
String appAuthToken = conf.get(RemoteSSOService.SECURITY_SERVICE_APP_AUTH_TOKEN_CONFIG, "").trim();
runtimeInfo.setAppAuthToken(appAuthToken);
boolean isDPMEnabled = conf.get(RemoteSSOService.DPM_ENABLED, RemoteSSOService.DPM_ENABLED_DEFAULT);
runtimeInfo.setDPMEnabled(isDPMEnabled);
boolean aclEnabled = conf.get(PIPELINE_ACCESS_CONTROL_ENABLED, PIPELINE_ACCESS_CONTROL_ENABLED_DEFAULT);
String auth = conf.get(WebServerTask.AUTHENTICATION_KEY, WebServerTask.AUTHENTICATION_DEFAULT);
if (aclEnabled && (!"none".equals(auth) || isDPMEnabled)) {
runtimeInfo.setAclEnabled(true);
} else {
runtimeInfo.setAclEnabled(false);
}
} catch (IOException ex) {
throw new RuntimeException(ex);
}
} else {
LOG.error("Error did not find sdc.properties at expected location: {}", configFile);
}
// Transfer all security properties to the JVM configuration
for(Map.Entry<String, String> entry : conf.getSubSetConfiguration(SECURITY_PREFIX).getValues().entrySet()) {
java.security.Security.setProperty(
entry.getKey().substring(SECURITY_PREFIX.length()),
entry.getValue()
);
}
}
}
| |
package com.ofs.ofmc.toolbox;
import android.content.Context;
import android.content.SharedPreferences;
import java.util.HashMap;
/**
* Created by saravana.subramanian on 12/19/16.
*
* Shared preference class to store app information and settings
*/
public class SharedPref {
private Context context;
private static SharedPref ourInstance = new SharedPref();
public static SharedPref getInstance() {
return ourInstance;
}
public static final String PREFS_OFMC = "OFMC";
public static final String PREFS_USERNAME = "email";
public static final String PREFS_USER = "user";
public static final String PREFS_PASSWORD = "password";
public static final String PREFS_USERID= "userId";
public static final String PREFS_USER_IMAGE ="userImage";
public static final String PREFS_IS_PROFILE_COMPLETE = "profile";
public static final String PREFS_AUTOFILL = "autofill";
public static final String PREFS_NOTIFICATION_INTERVAL = "notificationInterval";
public static final String PREFS_COACHMARKS = "coachMarks";
public static final String PREFS_FRESH_INSTALL = "freshInstall";
public static final int PREFS_LIST = 1;
public static final int PREFS_TIMELINE = 2;
public SharedPref() {
super();
}
public SharedPref(Context context) {
this.context = context;
}
public void init(Context context) {
SharedPreferences settings;
SharedPreferences.Editor editor;
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE); //1
editor = settings.edit(); //2
editor.putBoolean(PREFS_FRESH_INSTALL,true);
editor.putBoolean(PREFS_AUTOFILL,false);
editor.apply(); //4
}
public void save(Context context, String text) {
SharedPreferences settings;
SharedPreferences.Editor editor;
//settings = PreferenceManager.getDefaultSharedPreferences(context);
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE); //1
editor = settings.edit(); //2
//editor.putString(PREFS_KEY, text); //3
editor.commit(); //4
}
public void saveProfile(Context context, HashMap<String,Integer> map) {
SharedPreferences settings;
SharedPreferences.Editor editor;
//settings = PreferenceManager.getDefaultSharedPreferences(context);
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE); //1
editor = settings.edit(); //2
if(map.containsKey(PREFS_NOTIFICATION_INTERVAL))
editor.putInt(PREFS_NOTIFICATION_INTERVAL, map.get(PREFS_NOTIFICATION_INTERVAL));
//editor.putString(PREFS_KEY, String.valueOf(map.get("123"))); //3
editor.apply(); //4
}
public void save(Context context, HashMap<String,?> map) {
SharedPreferences settings;
SharedPreferences.Editor editor;
//settings = PreferenceManager.getDefaultSharedPreferences(context);
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE); //1
editor = settings.edit(); //2
if(map.containsKey(PREFS_USERNAME))
editor.putString(PREFS_USERNAME,String.valueOf(map.get(PREFS_USERNAME)));
if(map.containsKey(PREFS_PASSWORD))
editor.putString(PREFS_PASSWORD,String.valueOf(map.get(PREFS_PASSWORD)));
if(map.containsKey(PREFS_USERID))
editor.putString(PREFS_USERID,String.valueOf(map.get(PREFS_USERID)));
if(map.containsKey(PREFS_USER))
editor.putString(PREFS_USER,String.valueOf(map.get(PREFS_USER)));
editor.apply(); //4
}
public void save(Context context, String Key, boolean boole) {
SharedPreferences settings;
SharedPreferences.Editor editor;
//settings = PreferenceManager.getDefaultSharedPreferences(context);
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE); //1
editor = settings.edit(); //
editor.putBoolean(Key, boole);
//editor.putString(PREFS_KEY, String.valueOf(map.get("123"))); //3
editor.apply(); //4
}
public void save(Context context, String Key, String string) {
SharedPreferences settings;
SharedPreferences.Editor editor;
//settings = PreferenceManager.getDefaultSharedPreferences(context);
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE); //1
editor = settings.edit(); //
editor.putString(Key, string);
//editor.putString(PREFS_KEY, String.valueOf(map.get("123"))); //3
editor.apply(); //4
}
public void coachmarksShown(Context context, String className,boolean shown) {
SharedPreferences settings;
SharedPreferences.Editor editor;
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE); //1
editor = settings.edit(); //2
editor.putBoolean(className, shown);
editor.apply(); //4
}
public boolean isCoachShown(Context context, String className) {
SharedPreferences settings;
SharedPreferences.Editor editor;
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE); //1
return settings.getBoolean(className,false);
}
public String getString(Context context,String PREFS_KEY) {
SharedPreferences settings;
String text = null;
//settings = PreferenceManager.getDefaultSharedPreferences(context);
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE);
text = settings.getString(PREFS_KEY, null);
return text;
}
public Boolean getBoolean(Context context,String PREFS_KEY) {
SharedPreferences settings;
boolean value;
//settings = PreferenceManager.getDefaultSharedPreferences(context);
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE);
value = settings.getBoolean(PREFS_KEY, false);
return value;
}
public Integer getInteger(Context context,String PREFS_KEY) {
SharedPreferences settings;
int value;
//settings = PreferenceManager.getDefaultSharedPreferences(context);
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE);
value = settings.getInt(PREFS_KEY, 0);
return value;
}
public HashMap<String,Integer> getMap(Context context) {
SharedPreferences settings;
HashMap<String,Integer> map = new HashMap<>();
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE);
// map.put(PREFS_FLOW,settings.getInt(PREFS_FLOW,1));
return map;
}
public void clearSharedPreference(Context context) {
SharedPreferences settings;
SharedPreferences.Editor editor;
//settings = PreferenceManager.getDefaultSharedPreferences(context);
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE);
editor = settings.edit();
editor.clear();
editor.apply();
}
public void removeValue(Context context) {
SharedPreferences settings;
SharedPreferences.Editor editor;
settings = context.getSharedPreferences(PREFS_OFMC, Context.MODE_PRIVATE);
editor = settings.edit();
//editor.remove(PREFS_KEY);
editor.commit();
}
}
| |
package polybuf.xml;
import static org.junit.Assert.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import javax.xml.XMLConstants;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import org.junit.Test;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import polybuf.core.config.SerializerConfig;
import polybuf.core.test.Coverage;
import polybuf.core.test.ExtensionsBase;
import polybuf.core.test.ExtensionsExt;
import polybuf.core.test.ExtensionsExtSamePackage;
import polybuf.core.test.Nested;
import polybuf.core.test.NoOptions;
import polybuf.core.test.PackagesPkg1;
import polybuf.core.test.PackagesPkg2;
import polybuf.core.test.Services;
import com.google.common.base.Charsets;
import com.google.common.primitives.UnsignedInteger;
import com.google.common.primitives.UnsignedLong;
public class SchemaWriterTest {
private final static boolean laxAnyExtensions = true;
private Schema schema(SerializerConfig config) throws Exception {
return schema(config,false);
}
private Schema schema(SerializerConfig config, boolean useLaxAnyForExtensions) throws Exception {
SchemaWriter writer = new SchemaWriter(config, useLaxAnyForExtensions);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
writer.writeSchema(stream);
//assertEquals("", stream.toString("UTF-8"));
ByteArrayInputStream input = new ByteArrayInputStream(stream.toByteArray());
SchemaFactory factory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
return factory.newSchema(new StreamSource(input));
}
private boolean validateXmlAgainstSchema(String xml, Schema schema) throws Exception {
Validator validator = schema.newValidator();
ByteArrayInputStream stream = new ByteArrayInputStream(xml.replace('`','"').getBytes(Charsets.UTF_8));
SAXSource source = new SAXSource(new InputSource(stream));
validator.validate(source);
return true;
}
private final static String xml = "<?xml version=`1.0` encoding=`UTF-8`?>";
private final static String coverageNs = "xmlns=`http://www.example.org/polybuf-test/coverage`";
private final static String coveragePrefixedNs = "xmlns:c=`http://www.example.org/polybuf-test/coverage`";
private final static String extensionsBaseNs = "xmlns=`http://www.example.org/polybuf-test/extensions/base`";
private final static String extensionsExtNs = "xmlns=`http://www.example.org/polybuf-test/extensions/ext`";
private final static String extensionsExtSamePackageNs = "xmlns=`http://www.example.org/polybuf-test/extensions/ext_same_package`";
private final static String nestedNs = "xmlns=`http://www.example.org/polybuf-test/nested`";
private final static String packagesPkg2Ns = "xmlns=`http://www.example.org/polybuf-test/packages/pkg2`";
private final static String noOptionsNs = "xmlns=`http://example.com/NoOptions`";
private final static String servicesNs = "xmlns=`http://example.com/Services`";
@Test
public void coverageBool() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Bool ").append(coverageNs).append(">")
.append( "<required>true</required>")
.append( "<defaulted>false</defaulted>")
.append( "<repeated>true</repeated>")
.append( "<repeated>false</repeated>")
.append("</coverage.Bool>").toString(), schema);
}
@Test
public void coverageBoolNullSchema() throws Exception {
Schema schema = schema(SerializerConfig.builder(Coverage.class).setNamespaceUri(null).build());
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Bool>")
.append( "<required>true</required>")
.append( "<defaulted>false</defaulted>")
.append( "<repeated>true</repeated>")
.append( "<repeated>false</repeated>")
.append("</coverage.Bool>").toString(), schema);
}
@Test
public void coverageBoolPrefixedSchema() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<c:coverage.Bool ").append(coveragePrefixedNs).append(">")
.append( "<c:required>true</c:required>")
.append( "<c:defaulted>false</c:defaulted>")
.append( "<c:repeated>true</c:repeated>")
.append( "<c:repeated>false</c:repeated>")
.append("</c:coverage.Bool>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageBoolBadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Bool ").append(coverageNs).append(">")
.append( "<required>bad</required>")
.append("</coverage.Bool>").toString(), schema);
}
@Test
public void coverageBytes() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Bytes ").append(coverageNs).append(">")
.append( "<required>abcd</required>")
.append( "<defaulted>1234</defaulted>")
.append( "<repeated>+/+/</repeated>")
.append( "<repeated>1g==</repeated>")
.append("</coverage.Bytes>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageBytesBadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Bytes ").append(coverageNs).append(">")
.append( "<required>bad</required>") // doesn't accept odd-length
.append("</coverage.Bytes>").toString(), schema);
}
@Test
public void coverageDouble() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Double ").append(coverageNs).append(">")
.append( "<required>1234</required>")
.append( "<defaulted>-5.84757</defaulted>")
.append( "<repeated>1.e-9</repeated>")
.append( "<repeated>INF</repeated>")
.append("</coverage.Double>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageDoubleBadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Double ").append(coverageNs).append(">")
.append( "<required>5 5</required>")
.append("</coverage.Double>").toString(), schema);
}
@Test
public void coverageEnum() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Enum ").append(coverageNs).append(">")
.append( "<required>A</required>")
.append( "<optional>B</optional>")
.append( "<repeated>A</repeated>")
.append( "<repeated>C</repeated>")
.append("</coverage.Enum>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageEnumBadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Enum ").append(coverageNs).append(">")
.append( "<required>D</required>")
.append("</coverage.Enum>").toString(), schema);
}
@Test
public void coverageNestedEnum() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.NestedEnum ").append(coverageNs).append(">")
.append( "<required>X</required>")
.append( "<optional>Y</optional>")
.append( "<repeated>Z</repeated>")
.append( "<repeated>Y</repeated>")
.append("</coverage.NestedEnum>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageNestedEnumBadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.NestedEnum ").append(coverageNs).append(">")
.append( "<required>W</required>")
.append("</coverage.NestedEnum>").toString(), schema);
}
@Test
public void coverageFixed32() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Fixed32 ").append(coverageNs).append(">")
.append( "<required>1</required>")
.append( "<optional>0</optional>")
.append( "<repeated>3734646</repeated>")
.append( "<repeated>"+UnsignedInteger.MAX_VALUE+"</repeated>")
.append("</coverage.Fixed32>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageFixed32BadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Fixed32 ").append(coverageNs).append(">")
.append( "<required>-1</required>")
.append("</coverage.Fixed32>").toString(), schema);
}
@Test
public void coverageFixed64() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Fixed64 ").append(coverageNs).append(">")
.append( "<required>1</required>")
.append( "<optional>0</optional>")
.append( "<repeated>3734646857463</repeated>")
.append( "<repeated>"+UnsignedLong.MAX_VALUE+"</repeated>")
.append("</coverage.Fixed64>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageFixed64BadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Fixed64 ").append(coverageNs).append(">")
.append( "<required>-1</required>")
.append("</coverage.Fixed64>").toString(), schema);
}
@Test
public void coverageFloat() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Float ").append(coverageNs).append(">")
.append( "<required>1234</required>")
.append( "<defaulted>-5.84757</defaulted>")
.append( "<repeated>NaN</repeated>")
.append( "<repeated>-INF</repeated>")
.append("</coverage.Float>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageFloatBadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Float ").append(coverageNs).append(">")
.append( "<required>a</required>")
.append("</coverage.Float>").toString(), schema);
}
@Test
public void coverageInt32() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Int32 ").append(coverageNs).append(">")
.append( "<required>1234</required>")
.append( "<defaulted>-3874</defaulted>")
.append( "<repeated>"+Integer.MIN_VALUE+"</repeated>")
.append( "<repeated>"+Integer.MAX_VALUE+"</repeated>")
.append("</coverage.Int32>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageInt32BadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Int32 ").append(coverageNs).append(">")
.append( "<required>1"+Integer.MAX_VALUE+"</required>")
.append("</coverage.Int32>").toString(), schema);
}
@Test
public void coverageInt64() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Int64 ").append(coverageNs).append(">")
.append( "<required>1234</required>")
.append( "<defaulted>-3874</defaulted>")
.append( "<repeated>"+Long.MIN_VALUE+"</repeated>")
.append( "<repeated>"+Long.MAX_VALUE+"</repeated>")
.append("</coverage.Int64>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageInt64BadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Int64 ").append(coverageNs).append(">")
.append( "<required>1"+Long.MAX_VALUE+"</required>")
.append("</coverage.Int64>").toString(), schema);
}
@Test
public void coverageMessage() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
assertTrue(validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Message ").append(coverageNs).append(">")
.append( "<required>")
.append( "<required>true</required>")
.append( "<defaulted>false</defaulted>")
.append( "<repeated>true</repeated>")
.append( "<repeated>false</repeated>")
.append( "</required>")
.append( "<optional>")
.append( "<required>true</required>")
.append( "</optional>")
.append( "<repeated>")
.append( "<required>false</required>")
.append( "</repeated>")
.append( "<repeated>")
.append( "<required>true</required>")
.append( "</repeated>")
.append("</coverage.Message>").toString(), schema));
}
@Test(expected=SAXException.class)
public void coverageMessageBadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
assertTrue(validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Message ").append(coverageNs).append(">")
.append( "<required>true</required>")
.append("</coverage.Message>").toString(), schema));
}
@Test
public void coverageNestedMessage() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
assertTrue(validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.NestedMessage ").append(coverageNs).append(">")
.append( "<required>")
.append( "<s>a</s>")
.append( "</required>")
.append( "<optional>")
.append( "<s>b</s>")
.append( "</optional>")
.append( "<repeated>")
.append( "<s>c</s>")
.append( "</repeated>")
.append( "<repeated>")
.append( "<s>d</s>")
.append( "</repeated>")
.append("</coverage.NestedMessage>").toString(), schema));
}
@Test(expected=SAXException.class)
public void coverageNestedMessageBadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
assertTrue(validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Message ").append(coverageNs).append(">")
.append( "<required>")
.append( "<s>") // s is a string so any structure should cause exception
.append( "<required>a</required>")
.append( "</s>")
.append( "</required>")
.append("</coverage.Message>").toString(), schema));
}
@Test
public void coverageSfixed32() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Sfixed32 ").append(coverageNs).append(">")
.append( "<required>1234</required>")
.append( "<defaulted>-3874</defaulted>")
.append( "<repeated>"+Integer.MIN_VALUE+"</repeated>")
.append( "<repeated>"+Integer.MAX_VALUE+"</repeated>")
.append("</coverage.Sfixed32>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageSfixed32BadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Sfixed32 ").append(coverageNs).append(">")
.append( "<required>1"+Integer.MAX_VALUE+"</required>")
.append("</coverage.Sfixed32>").toString(), schema);
}
@Test
public void coverageSfixed64() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Sfixed64 ").append(coverageNs).append(">")
.append( "<required>1234</required>")
.append( "<defaulted>-3874</defaulted>")
.append( "<repeated>"+Long.MIN_VALUE+"</repeated>")
.append( "<repeated>"+Long.MAX_VALUE+"</repeated>")
.append("</coverage.Sfixed64>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageSfixed64BadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Sfixed64 ").append(coverageNs).append(">")
.append( "<required>1"+Long.MAX_VALUE+"</required>")
.append("</coverage.Sfixed64>").toString(), schema);
}
@Test
public void coverageSint32() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Sint32 ").append(coverageNs).append(">")
.append( "<required>1234</required>")
.append( "<defaulted>-3874</defaulted>")
.append( "<repeated>"+Integer.MIN_VALUE+"</repeated>")
.append( "<repeated>"+Integer.MAX_VALUE+"</repeated>")
.append("</coverage.Sint32>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageSint32BadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Sint32 ").append(coverageNs).append(">")
.append( "<required>1"+Integer.MAX_VALUE+"</required>")
.append("</coverage.Sint32>").toString(), schema);
}
@Test
public void coverageSint64() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Sint64 ").append(coverageNs).append(">")
.append( "<required>1234</required>")
.append( "<defaulted>-3874</defaulted>")
.append( "<repeated>"+Long.MIN_VALUE+"</repeated>")
.append( "<repeated>"+Long.MAX_VALUE+"</repeated>")
.append("</coverage.Sint64>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageSint64BadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Sint64 ").append(coverageNs).append(">")
.append( "<required>1"+Long.MAX_VALUE+"</required>")
.append("</coverage.Sint64>").toString(), schema);
}
@Test
public void coverageString1() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.String1 ").append(coverageNs).append(">")
.append( "<required>1fd s f asdjfasdf asdjf</required>")
.append( "<defaulted>werwsd</defaulted>")
.append( "<repeated>asdsdf</repeated>")
.append( "<repeated>psdofasdk</repeated>")
.append("</coverage.String1>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageString1BadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.String1 ").append(coverageNs).append(">")
.append( "<required>")
.append( "<required>a</required>")
.append( "</required>")
.append("</coverage.String1>").toString(), schema);
}
@Test
public void coverageUint32() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Uint32 ").append(coverageNs).append(">")
.append( "<required>1</required>")
.append( "<optional>0</optional>")
.append( "<repeated>3734646</repeated>")
.append( "<repeated>"+UnsignedInteger.MAX_VALUE+"</repeated>")
.append("</coverage.Uint32>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageUint32BadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Uint32 ").append(coverageNs).append(">")
.append( "<required>-1</required>")
.append("</coverage.Uint32>").toString(), schema);
}
@Test
public void coverageUint64() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Uint64 ").append(coverageNs).append(">")
.append( "<required>1</required>")
.append( "<optional>0</optional>")
.append( "<repeated>3734646857463</repeated>")
.append( "<repeated>"+UnsignedLong.MAX_VALUE+"</repeated>")
.append("</coverage.Uint64>").toString(), schema);
}
@Test(expected=SAXException.class)
public void coverageUint64BadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(Coverage.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<coverage.Uint64 ").append(coverageNs).append(">")
.append( "<required>-1</required>")
.append("</coverage.Uint64>").toString(), schema);
}
@Test
public void extensionBaseStrict() throws Exception {
Schema schema = schema(SerializerConfig.of(ExtensionsBase.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<base.Message1 ").append(extensionsBaseNs).append(">")
.append( "<id>base id</id>")
.append("</base.Message1>").toString(), schema);
}
@Test(expected=SAXException.class)
public void extensionBaseStrictBadValue() throws Exception {
Schema schema = schema(SerializerConfig.of(ExtensionsBase.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<base.Message1 ").append(extensionsBaseNs).append(">")
.append( "<id>base id</id>")
.append( "<blah>0</blah>") // only accepts items in extension registry
.append("</base.Message1>").toString(), schema);
}
@Test
public void extensionBaseLax() throws Exception {
Schema schema = schema(SerializerConfig.of(ExtensionsBase.class), laxAnyExtensions);
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<base.Message1 ").append(extensionsBaseNs).append(">")
// defined in proto
.append( "<id>base id</id>")
// accepted since Message1 defines lax any extensions
.append( "<blah>0</blah>")
.append( "<a.b.c>3.141</a.b.c>")
.append( "<a><b>c</b></a>")
.append("</base.Message1>").toString(), schema);
}
@Test(expected=SAXException.class)
public void extensionBaseLaxWrongOrder() throws Exception {
Schema schema = schema(SerializerConfig.of(ExtensionsBase.class), laxAnyExtensions);
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<base.Message1 ").append(extensionsBaseNs).append(">")
.append( "<blah>0</blah>")
.append( "<id>base id</id>") // defined members need to come before lax extensions
.append("</base.Message1>").toString(), schema);
}
@Test
public void extensionExtStrict() throws Exception {
Schema schema = schema(SerializerConfig.builder(ExtensionsExt.class)
.addRoot(ExtensionsBase.Message1.class).build());
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<base.Message1 ").append(extensionsExtNs).append(">")
.append( "<id>base id</id>")
.append( "<ext.id>1</ext.id>")
.append("</base.Message1>").toString(), schema);
}
@Test(expected=SAXException.class)
public void extensionExtStrictUnknownExtension() throws Exception {
Schema schema = schema(SerializerConfig.builder(ExtensionsExt.class)
.addRoot(ExtensionsBase.Message1.class).build());
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<base.Message1 ").append(extensionsExtNs).append(">")
.append( "<id>base id</id>")
.append( "<ext.bad>s</ext.bad>")
.append("</base.Message1>").toString(), schema);
}
@Test
public void extensionExtLax() throws Exception {
Schema schema = schema(SerializerConfig.builder(ExtensionsExt.class)
.addRoot(ExtensionsBase.Message1.class).build(), laxAnyExtensions);
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<base.Message1 ").append(extensionsExtNs).append(">")
.append( "<id>base id</id>")
.append( "<ext.unknown><a>1</a></ext.unknown>")
.append("</base.Message1>").toString(), schema);
}
@Test(expected=SAXException.class)
public void extensionExtLaxWrongOrder() throws Exception {
Schema schema = schema(SerializerConfig.builder(ExtensionsExt.class)
.addRoot(ExtensionsBase.Message1.class).build(), laxAnyExtensions);
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<base.Message1 ").append(extensionsExtNs).append(">")
.append( "<ext.unknown><a>1</a></ext.unknown>") // extensions must come after known fields
.append( "<id>base id</id>")
.append("</base.Message1>").toString(), schema);
}
@Test
public void extensionExtSamePackageStrict() throws Exception {
Schema schema = schema(SerializerConfig.builder(ExtensionsExtSamePackage.class)
.addRoot(ExtensionsBase.Message1.class).build());
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<base.Message1 ").append(extensionsExtSamePackageNs).append(">")
.append( "<id>base id</id>")
.append( "<base.Scope.id>1</base.Scope.id>")
.append("</base.Message1>").toString(), schema);
}
@Test
public void extensionNested() throws Exception {
Schema schema = schema(SerializerConfig.of(Nested.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<nested.Message1.Message1A ").append(nestedNs).append(">")
.append( "<id>nested id</id>")
.append("</nested.Message1.Message1A>").toString(), schema);
}
@Test(expected=SAXException.class)
public void extensionNestedNonRoot() throws Exception {
Schema schema = schema(SerializerConfig.of(Nested.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
// enclosing message is not a root so should not be accepted element
.append("<nested.Message1 ").append(nestedNs).append(">")
.append( "<parts>")
.append( "<id>nested id</id>")
.append( "</parts>")
.append("</nested.Message1>").toString(), schema);
}
@Test
public void packagesPkg1() throws Exception {
Schema schema = schema(SerializerConfig.of(PackagesPkg1.class));
assertNotNull(schema);
}
@Test
public void packagesPkg2() throws Exception {
Schema schema = schema(SerializerConfig.of(PackagesPkg2.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
// enclosing message is not a root so should not be accepted element
.append("<pkg2.Message2 ").append(packagesPkg2Ns).append(">")
.append( "<pkg1>")
.append( "<id>pkg1 id</id>")
.append( "</pkg1>")
.append( "<pkg2>")
.append( "<id>1</id>")
.append( "<type>A</type>")
.append( "</pkg2>")
.append("</pkg2.Message2>").toString(), schema);
}
@Test
public void noOptions() throws Exception {
Schema schema = schema(SerializerConfig.of(NoOptions.class));
assertNotNull(schema);
}
@Test(expected=SAXException.class)
public void noOptionsRoot() throws Exception {
Schema schema = schema(SerializerConfig.of(NoOptions.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
// enclosing message is not a root so should not be accepted element
.append("<Message1 ").append(noOptionsNs).append(">")
.append( "<id>id</id>")
.append("</Message1>").toString(), schema);
}
@Test
public void noOptionsExplicitRoot() throws Exception {
Schema schema = schema(SerializerConfig.builder(NoOptions.class).addRoot(NoOptions.Message1.class).build());
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<Message1 ").append(noOptionsNs).append(">")
.append( "<id>id</id>")
.append("</Message1>").toString(), schema);
}
@Test
public void services() throws Exception {
Schema schema = schema(SerializerConfig.of(Services.class));
assertNotNull(schema);
}
@Test(expected=SAXException.class)
public void servicesRoot() throws Exception {
Schema schema = schema(SerializerConfig.of(Services.class));
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
// enclosing message is not a root so should not be accepted element
.append("<Message1 ").append(servicesNs).append(">")
.append( "<id>id</id>")
.append("</Message1>").toString(), schema);
}
@Test
public void servicesExplicitRoot() throws Exception {
Schema schema = schema(SerializerConfig.builder(Services.class).addRoot(NoOptions.Message1.class).build());
assertNotNull(schema);
validateXmlAgainstSchema(
new StringBuilder(xml)
.append("<Message1 ").append(servicesNs).append(">")
.append( "<id>id</id>")
.append("</Message1>").toString(), schema);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.lang3.builder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import org.junit.Test;
/**
* Unit tests {@link org.apache.commons.lang3.builder.HashCodeBuilder}.
*/
public class HashCodeBuilderTest {
/**
* A reflection test fixture.
*/
static class ReflectionTestCycleA {
ReflectionTestCycleB b;
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
}
/**
* A reflection test fixture.
*/
static class ReflectionTestCycleB {
ReflectionTestCycleA a;
@Override
public int hashCode() {
return HashCodeBuilder.reflectionHashCode(this);
}
}
// -----------------------------------------------------------------------
@Test(expected=IllegalArgumentException.class)
public void testConstructorExZero() {
new HashCodeBuilder(0, 0);
}
@Test(expected=IllegalArgumentException.class)
public void testConstructorExEvenFirst() {
new HashCodeBuilder(2, 3);
}
@Test(expected=IllegalArgumentException.class)
public void testConstructorExEvenSecond() {
new HashCodeBuilder(3, 2);
}
@Test(expected=IllegalArgumentException.class)
public void testConstructorExEvenNegative() {
new HashCodeBuilder(-2, -2);
}
static class TestObject {
private int a;
TestObject(final int a) {
this.a = a;
}
@Override
public boolean equals(final Object o) {
if (o == this) {
return true;
}
if (!(o instanceof TestObject)) {
return false;
}
final TestObject rhs = (TestObject) o;
return a == rhs.a;
}
@Override
public int hashCode() {
return a;
}
public void setA(final int a) {
this.a = a;
}
public int getA() {
return a;
}
}
static class TestSubObject extends TestObject {
private int b;
@SuppressWarnings("unused")
private transient int t;
TestSubObject() {
super(0);
}
TestSubObject(final int a, final int b, final int t) {
super(a);
this.b = b;
this.t = t;
}
@Override
public boolean equals(final Object o) {
if (o == this) {
return true;
}
if (!(o instanceof TestSubObject)) {
return false;
}
final TestSubObject rhs = (TestSubObject) o;
return super.equals(o) && b == rhs.b;
}
@Override
public int hashCode() {
return b*17 + super.hashCode();
}
}
@Test
public void testReflectionHashCode() {
assertEquals(17 * 37, HashCodeBuilder.reflectionHashCode(new TestObject(0)));
assertEquals(17 * 37 + 123456, HashCodeBuilder.reflectionHashCode(new TestObject(123456)));
}
@Test
public void testReflectionHierarchyHashCode() {
assertEquals(17 * 37 * 37, HashCodeBuilder.reflectionHashCode(new TestSubObject(0, 0, 0)));
assertEquals(17 * 37 * 37 * 37, HashCodeBuilder.reflectionHashCode(new TestSubObject(0, 0, 0), true));
assertEquals((17 * 37 + 7890) * 37 + 123456, HashCodeBuilder.reflectionHashCode(new TestSubObject(123456, 7890,
0)));
assertEquals(((17 * 37 + 7890) * 37 + 0) * 37 + 123456, HashCodeBuilder.reflectionHashCode(new TestSubObject(
123456, 7890, 0), true));
}
@Test(expected=IllegalArgumentException.class)
public void testReflectionHierarchyHashCodeEx1() {
HashCodeBuilder.reflectionHashCode(0, 0, new TestSubObject(0, 0, 0), true);
}
@Test(expected=IllegalArgumentException.class)
public void testReflectionHierarchyHashCodeEx2() {
HashCodeBuilder.reflectionHashCode(2, 2, new TestSubObject(0, 0, 0), true);
}
@Test(expected=IllegalArgumentException.class)
public void testReflectionHashCodeEx1() {
HashCodeBuilder.reflectionHashCode(0, 0, new TestObject(0), true);
}
@Test(expected=IllegalArgumentException.class)
public void testReflectionHashCodeEx2() {
HashCodeBuilder.reflectionHashCode(2, 2, new TestObject(0), true);
}
@Test(expected=IllegalArgumentException.class)
public void testReflectionHashCodeEx3() {
HashCodeBuilder.reflectionHashCode(13, 19, null, true);
}
@Test
public void testSuper() {
final Object obj = new Object();
assertEquals(17 * 37 + 19 * 41 + obj.hashCode(), new HashCodeBuilder(17, 37).appendSuper(
new HashCodeBuilder(19, 41).append(obj).toHashCode()).toHashCode());
}
@Test
public void testObject() {
Object obj = null;
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj = new Object();
assertEquals(17 * 37 + obj.hashCode(), new HashCodeBuilder(17, 37).append(obj).toHashCode());
}
@Test
public void testObjectBuild() {
Object obj = null;
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append(obj).build().intValue());
obj = new Object();
assertEquals(17 * 37 + obj.hashCode(), new HashCodeBuilder(17, 37).append(obj).build().intValue());
}
@Test
@SuppressWarnings("cast") // cast is not really needed, keep for consistency
public void testLong() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append(0L).toHashCode());
assertEquals(17 * 37 + (int) (123456789L ^ 123456789L >> 32), new HashCodeBuilder(17, 37).append(
123456789L).toHashCode());
}
@Test
@SuppressWarnings("cast") // cast is not really needed, keep for consistency
public void testInt() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append(0).toHashCode());
assertEquals(17 * 37 + 123456, new HashCodeBuilder(17, 37).append(123456).toHashCode());
}
@Test
public void testShort() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append((short) 0).toHashCode());
assertEquals(17 * 37 + 12345, new HashCodeBuilder(17, 37).append((short) 12345).toHashCode());
}
@Test
public void testChar() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append((char) 0).toHashCode());
assertEquals(17 * 37 + 1234, new HashCodeBuilder(17, 37).append((char) 1234).toHashCode());
}
@Test
public void testByte() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append((byte) 0).toHashCode());
assertEquals(17 * 37 + 123, new HashCodeBuilder(17, 37).append((byte) 123).toHashCode());
}
@Test
@SuppressWarnings("cast") // cast is not really needed, keep for consistency
public void testDouble() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append(0d).toHashCode());
final double d = 1234567.89;
final long l = Double.doubleToLongBits(d);
assertEquals(17 * 37 + (int) (l ^ l >> 32), new HashCodeBuilder(17, 37).append(d).toHashCode());
}
@Test
@SuppressWarnings("cast") // cast is not really needed, keep for consistency
public void testFloat() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append(0f).toHashCode());
final float f = 1234.89f;
final int i = Float.floatToIntBits(f);
assertEquals(17 * 37 + i, new HashCodeBuilder(17, 37).append(f).toHashCode());
}
@Test
public void testBoolean() {
assertEquals(17 * 37 + 0, new HashCodeBuilder(17, 37).append(true).toHashCode());
assertEquals(17 * 37 + 1, new HashCodeBuilder(17, 37).append(false).toHashCode());
}
@Test
public void testObjectArray() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append((Object[]) null).toHashCode());
final Object[] obj = new Object[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0] = new Object();
assertEquals((17 * 37 + obj[0].hashCode()) * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[1] = new Object();
assertEquals((17 * 37 + obj[0].hashCode()) * 37 + obj[1].hashCode(), new HashCodeBuilder(17, 37).append(obj)
.toHashCode());
}
@Test
public void testObjectArrayAsObject() {
final Object[] obj = new Object[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[0] = new Object();
assertEquals((17 * 37 + obj[0].hashCode()) * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[1] = new Object();
assertEquals((17 * 37 + obj[0].hashCode()) * 37 + obj[1].hashCode(), new HashCodeBuilder(17, 37).append(
(Object) obj).toHashCode());
}
@Test
public void testLongArray() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append((long[]) null).toHashCode());
final long[] obj = new long[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0] = 5L;
final int h1 = (int) (5L ^ 5L >> 32);
assertEquals((17 * 37 + h1) * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[1] = 6L;
final int h2 = (int) (6L ^ 6L >> 32);
assertEquals((17 * 37 + h1) * 37 + h2, new HashCodeBuilder(17, 37).append(obj).toHashCode());
}
@Test
public void testLongArrayAsObject() {
final long[] obj = new long[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[0] = 5L;
final int h1 = (int) (5L ^ 5L >> 32);
assertEquals((17 * 37 + h1) * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[1] = 6L;
final int h2 = (int) (6L ^ 6L >> 32);
assertEquals((17 * 37 + h1) * 37 + h2, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
}
@Test
public void testIntArray() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append((int[]) null).toHashCode());
final int[] obj = new int[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0] = 5;
assertEquals((17 * 37 + 5) * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[1] = 6;
assertEquals((17 * 37 + 5) * 37 + 6, new HashCodeBuilder(17, 37).append(obj).toHashCode());
}
@Test
public void testIntArrayAsObject() {
final int[] obj = new int[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[0] = 5;
assertEquals((17 * 37 + 5) * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[1] = 6;
assertEquals((17 * 37 + 5) * 37 + 6, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
}
@Test
public void testShortArray() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append((short[]) null).toHashCode());
final short[] obj = new short[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0] = (short) 5;
assertEquals((17 * 37 + 5) * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[1] = (short) 6;
assertEquals((17 * 37 + 5) * 37 + 6, new HashCodeBuilder(17, 37).append(obj).toHashCode());
}
@Test
public void testShortArrayAsObject() {
final short[] obj = new short[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[0] = (short) 5;
assertEquals((17 * 37 + 5) * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[1] = (short) 6;
assertEquals((17 * 37 + 5) * 37 + 6, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
}
@Test
public void testCharArray() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append((char[]) null).toHashCode());
final char[] obj = new char[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0] = (char) 5;
assertEquals((17 * 37 + 5) * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[1] = (char) 6;
assertEquals((17 * 37 + 5) * 37 + 6, new HashCodeBuilder(17, 37).append(obj).toHashCode());
}
@Test
public void testCharArrayAsObject() {
final char[] obj = new char[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[0] = (char) 5;
assertEquals((17 * 37 + 5) * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[1] = (char) 6;
assertEquals((17 * 37 + 5) * 37 + 6, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
}
@Test
public void testByteArray() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append((byte[]) null).toHashCode());
final byte[] obj = new byte[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0] = (byte) 5;
assertEquals((17 * 37 + 5) * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[1] = (byte) 6;
assertEquals((17 * 37 + 5) * 37 + 6, new HashCodeBuilder(17, 37).append(obj).toHashCode());
}
@Test
public void testByteArrayAsObject() {
final byte[] obj = new byte[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[0] = (byte) 5;
assertEquals((17 * 37 + 5) * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[1] = (byte) 6;
assertEquals((17 * 37 + 5) * 37 + 6, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
}
@Test
public void testDoubleArray() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append((double[]) null).toHashCode());
final double[] obj = new double[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0] = 5.4d;
final long l1 = Double.doubleToLongBits(5.4d);
final int h1 = (int) (l1 ^ l1 >> 32);
assertEquals((17 * 37 + h1) * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[1] = 6.3d;
final long l2 = Double.doubleToLongBits(6.3d);
final int h2 = (int) (l2 ^ l2 >> 32);
assertEquals((17 * 37 + h1) * 37 + h2, new HashCodeBuilder(17, 37).append(obj).toHashCode());
}
@Test
public void testDoubleArrayAsObject() {
final double[] obj = new double[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[0] = 5.4d;
final long l1 = Double.doubleToLongBits(5.4d);
final int h1 = (int) (l1 ^ l1 >> 32);
assertEquals((17 * 37 + h1) * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[1] = 6.3d;
final long l2 = Double.doubleToLongBits(6.3d);
final int h2 = (int) (l2 ^ l2 >> 32);
assertEquals((17 * 37 + h1) * 37 + h2, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
}
@Test
public void testFloatArray() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append((float[]) null).toHashCode());
final float[] obj = new float[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0] = 5.4f;
final int h1 = Float.floatToIntBits(5.4f);
assertEquals((17 * 37 + h1) * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[1] = 6.3f;
final int h2 = Float.floatToIntBits(6.3f);
assertEquals((17 * 37 + h1) * 37 + h2, new HashCodeBuilder(17, 37).append(obj).toHashCode());
}
@Test
public void testFloatArrayAsObject() {
final float[] obj = new float[2];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[0] = 5.4f;
final int h1 = Float.floatToIntBits(5.4f);
assertEquals((17 * 37 + h1) * 37, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[1] = 6.3f;
final int h2 = Float.floatToIntBits(6.3f);
assertEquals((17 * 37 + h1) * 37 + h2, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
}
@Test
public void testBooleanArray() {
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append((boolean[]) null).toHashCode());
final boolean[] obj = new boolean[2];
assertEquals((17 * 37 + 1) * 37 + 1, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0] = true;
assertEquals((17 * 37 + 0) * 37 + 1, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[1] = false;
assertEquals((17 * 37 + 0) * 37 + 1, new HashCodeBuilder(17, 37).append(obj).toHashCode());
}
@Test
public void testBooleanArrayAsObject() {
final boolean[] obj = new boolean[2];
assertEquals((17 * 37 + 1) * 37 + 1, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[0] = true;
assertEquals((17 * 37 + 0) * 37 + 1, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
obj[1] = false;
assertEquals((17 * 37 + 0) * 37 + 1, new HashCodeBuilder(17, 37).append((Object) obj).toHashCode());
}
@Test
public void testBooleanMultiArray() {
final boolean[][] obj = new boolean[2][];
assertEquals(17 * 37 * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0] = new boolean[0];
assertEquals(17 * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0] = new boolean[1];
assertEquals((17 * 37 + 1) * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0] = new boolean[2];
assertEquals(((17 * 37 + 1) * 37 + 1) * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[0][0] = true;
assertEquals(((17 * 37 + 0) * 37 + 1) * 37, new HashCodeBuilder(17, 37).append(obj).toHashCode());
obj[1] = new boolean[1];
assertEquals(((17 * 37 + 0) * 37 + 1) * 37 + 1, new HashCodeBuilder(17, 37).append(obj).toHashCode());
}
@Test
public void testReflectionHashCodeExcludeFields() throws Exception {
final TestObjectWithMultipleFields x = new TestObjectWithMultipleFields(1, 2, 3);
assertEquals(((17 * 37 + 1) * 37 + 2) * 37 + 3, HashCodeBuilder.reflectionHashCode(x));
assertEquals(((17 * 37 + 1) * 37 + 2) * 37 + 3, HashCodeBuilder.reflectionHashCode(x, (String[]) null));
assertEquals(((17 * 37 + 1) * 37 + 2) * 37 + 3, HashCodeBuilder.reflectionHashCode(x));
assertEquals(((17 * 37 + 1) * 37 + 2) * 37 + 3, HashCodeBuilder.reflectionHashCode(x, "xxx"));
assertEquals((17 * 37 + 1) * 37 + 3, HashCodeBuilder.reflectionHashCode(x, "two"));
assertEquals((17 * 37 + 1) * 37 + 2, HashCodeBuilder.reflectionHashCode(x, "three"));
assertEquals(17 * 37 + 1, HashCodeBuilder.reflectionHashCode(x, "two", "three"));
assertEquals(17, HashCodeBuilder.reflectionHashCode(x, "one", "two", "three"));
assertEquals(17, HashCodeBuilder.reflectionHashCode(x, "one", "two", "three", "xxx"));
}
static class TestObjectWithMultipleFields {
@SuppressWarnings("unused")
private int one = 0;
@SuppressWarnings("unused")
private int two = 0;
@SuppressWarnings("unused")
private int three = 0;
TestObjectWithMultipleFields(final int one, final int two, final int three) {
this.one = one;
this.two = two;
this.three = three;
}
}
/**
* Test Objects pointing to each other.
*/
@Test
public void testReflectionObjectCycle() {
final ReflectionTestCycleA a = new ReflectionTestCycleA();
final ReflectionTestCycleB b = new ReflectionTestCycleB();
a.b = b;
b.a = a;
// Used to caused:
// java.lang.StackOverflowError
// at java.lang.ClassLoader.getCallerClassLoader(Native Method)
// at java.lang.Class.getDeclaredFields(Class.java:992)
// at org.apache.commons.lang.builder.HashCodeBuilder.reflectionAppend(HashCodeBuilder.java:373)
// at org.apache.commons.lang.builder.HashCodeBuilder.reflectionHashCode(HashCodeBuilder.java:349)
// at org.apache.commons.lang.builder.HashCodeBuilder.reflectionHashCode(HashCodeBuilder.java:155)
// at
// org.apache.commons.lang.builder.HashCodeBuilderTest$ReflectionTestCycleB.hashCode(HashCodeBuilderTest.java:53)
// at org.apache.commons.lang.builder.HashCodeBuilder.append(HashCodeBuilder.java:422)
// at org.apache.commons.lang.builder.HashCodeBuilder.reflectionAppend(HashCodeBuilder.java:383)
// at org.apache.commons.lang.builder.HashCodeBuilder.reflectionHashCode(HashCodeBuilder.java:349)
// at org.apache.commons.lang.builder.HashCodeBuilder.reflectionHashCode(HashCodeBuilder.java:155)
// at
// org.apache.commons.lang.builder.HashCodeBuilderTest$ReflectionTestCycleA.hashCode(HashCodeBuilderTest.java:42)
// at org.apache.commons.lang.builder.HashCodeBuilder.append(HashCodeBuilder.java:422)
a.hashCode();
assertNull(HashCodeBuilder.getRegistry());
b.hashCode();
assertNull(HashCodeBuilder.getRegistry());
}
/**
* Ensures LANG-520 remains true
*/
@Test
public void testToHashCodeEqualsHashCode() {
final HashCodeBuilder hcb = new HashCodeBuilder(17, 37).append(new Object()).append('a');
assertEquals("hashCode() is no longer returning the same value as toHashCode() - see LANG-520",
hcb.toHashCode(), hcb.hashCode());
}
static class TestObjectHashCodeExclude {
@HashCodeExclude
private final int a;
private final int b;
TestObjectHashCodeExclude(final int a, final int b) {
this.a = a;
this.b = b;
}
public int getA() {
return a;
}
public int getB() {
return b;
}
}
static class TestObjectHashCodeExclude2 {
@HashCodeExclude
private final int a;
@HashCodeExclude
private final int b;
TestObjectHashCodeExclude2(final int a, final int b) {
this.a = a;
this.b = b;
}
public int getA() {
return a;
}
public int getB() {
return b;
}
}
@Test
public void testToHashCodeExclude() {
final TestObjectHashCodeExclude one = new TestObjectHashCodeExclude(1, 2);
final TestObjectHashCodeExclude2 two = new TestObjectHashCodeExclude2(1, 2);
assertEquals(17 * 37 + 2, HashCodeBuilder.reflectionHashCode(one));
assertEquals(17, HashCodeBuilder.reflectionHashCode(two));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.ignite.Ignite;
import org.apache.ignite.Ignition;
import org.apache.ignite.cluster.ClusterGroup;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.marshaller.Marshaller;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonTest;
/**
* Test for {@link ClusterGroup}.
*/
@GridCommonTest(group = "Kernal Self")
public class ClusterGroupSelfTest extends ClusterGroupAbstractTest {
/** Nodes count. */
private static final int NODES_CNT = 4;
/** Projection node IDs. */
private static Collection<UUID> ids;
/** */
private static Ignite ignite;
/** {@inheritDoc} */
@SuppressWarnings({"ConstantConditions"})
@Override protected void beforeTestsStarted() throws Exception {
assert NODES_CNT > 2;
ids = new LinkedList<>();
try {
for (int i = 0; i < NODES_CNT; i++) {
Ignition.setClientMode(i > 1);
Ignite g = startGrid(i);
ids.add(g.cluster().localNode().id());
if (i == 0)
ignite = g;
}
waitForTopology(NODES_CNT);
}
finally {
Ignition.setClientMode(false);
}
}
/** {@inheritDoc} */
@Override protected ClusterGroup projection() {
return grid(0).cluster().forPredicate(F.nodeForNodeIds(ids));
}
/** {@inheritDoc} */
@Override protected UUID localNodeId() {
return grid(0).localNode().id();
}
/**
* @throws Exception If failed.
*/
public void testRandom() throws Exception {
assertTrue(ignite.cluster().nodes().contains(ignite.cluster().forRandom().node()));
}
/**
* @throws Exception If failed.
*/
public void testOldest() throws Exception {
ClusterGroup oldest = ignite.cluster().forOldest();
ClusterNode node = null;
long minOrder = Long.MAX_VALUE;
for (ClusterNode n : ignite.cluster().nodes()) {
if (n.order() < minOrder) {
node = n;
minOrder = n.order();
}
}
assertEquals(oldest.node(), ignite.cluster().forNode(node).node());
ClusterGroup emptyGrp = ignite.cluster().forAttribute("nonExistent", "val");
assertEquals(0, emptyGrp.forOldest().nodes().size());
}
/**
* @throws Exception If failed.
*/
public void testYoungest() throws Exception {
ClusterGroup youngest = ignite.cluster().forYoungest();
ClusterNode node = null;
long maxOrder = Long.MIN_VALUE;
for (ClusterNode n : ignite.cluster().nodes()) {
if (n.order() > maxOrder) {
node = n;
maxOrder = n.order();
}
}
assertEquals(youngest.node(), ignite.cluster().forNode(node).node());
ClusterGroup emptyGrp = ignite.cluster().forAttribute("nonExistent", "val");
assertEquals(0, emptyGrp.forYoungest().nodes().size());
}
/**
* @throws Exception If failed.
*/
public void testForDaemons() throws Exception {
assertEquals(4, ignite.cluster().nodes().size());
ClusterGroup daemons = ignite.cluster().forDaemons();
ClusterGroup srvs = ignite.cluster().forServers();
assertEquals(0, daemons.nodes().size());
assertEquals(2, srvs.nodes().size());
Ignition.setDaemon(true);
try (Ignite g = startGrid(NODES_CNT)) {
Ignition.setDaemon(false);
try (Ignite g1 = startGrid(NODES_CNT + 1)) {
assertEquals(1, ignite.cluster().forDaemons().nodes().size());
assertEquals(3, srvs.nodes().size());
assertEquals(1, daemons.nodes().size());
}
}
}
/**
* @throws Exception If failed.
*/
public void testNewNodes() throws Exception {
ClusterGroup youngest = ignite.cluster().forYoungest();
ClusterGroup oldest = ignite.cluster().forOldest();
ClusterNode old = oldest.node();
ClusterNode last = youngest.node();
assertNotNull(last);
try (Ignite g = startGrid(NODES_CNT)) {
ClusterNode n = g.cluster().localNode();
ClusterNode latest = youngest.node();
assertNotNull(latest);
assertEquals(latest.id(), n.id());
assertEquals(oldest.node(), old);
}
}
/**
* @throws Exception If failed.
*/
public void testForPredicate() throws Exception {
IgnitePredicate<ClusterNode> evenP = new IgnitePredicate<ClusterNode>() {
@Override public boolean apply(ClusterNode node) {
return node.order() % 2 == 0;
}
};
IgnitePredicate<ClusterNode> oddP = new IgnitePredicate<ClusterNode>() {
@Override public boolean apply(ClusterNode node) {
return node.order() % 2 == 1;
}
};
ClusterGroup remotes = ignite.cluster().forRemotes();
ClusterGroup evenYoungest = remotes.forPredicate(evenP).forYoungest();
ClusterGroup evenOldest = remotes.forPredicate(evenP).forOldest();
ClusterGroup oddYoungest = remotes.forPredicate(oddP).forYoungest();
ClusterGroup oddOldest = remotes.forPredicate(oddP).forOldest();
int clusterSize = ignite.cluster().nodes().size();
assertEquals(grid(gridMaxOrder(clusterSize, true)).localNode().id(), evenYoungest.node().id());
assertEquals(grid(1).localNode().id(), evenOldest.node().id());
assertEquals(grid(gridMaxOrder(clusterSize, false)).localNode().id(), oddYoungest.node().id());
assertEquals(grid(2).localNode().id(), oddOldest.node().id());
try (Ignite g4 = startGrid(NODES_CNT); Ignite g5 = startGrid(NODES_CNT + 1)) {
clusterSize = g4.cluster().nodes().size();
assertEquals(grid(gridMaxOrder(clusterSize, true)).localNode().id(), evenYoungest.node().id());
assertEquals(grid(1).localNode().id(), evenOldest.node().id());
assertEquals(grid(gridMaxOrder(clusterSize, false)).localNode().id(), oddYoungest.node().id());
assertEquals(grid(2).localNode().id(), oddOldest.node().id());
}
}
/**
* @throws Exception If failed.
*/
public void testAgeClusterGroupSerialization() throws Exception {
Marshaller marshaller = ignite.configuration().getMarshaller();
ClusterGroup grp = ignite.cluster().forYoungest();
ClusterNode node = grp.node();
byte[] arr = marshaller.marshal(grp);
ClusterGroup obj = marshaller.unmarshal(arr, null);
assertEquals(node.id(), obj.node().id());
try (Ignite ignore = startGrid()) {
obj = marshaller.unmarshal(arr, null);
assertEquals(grp.node().id(), obj.node().id());
assertFalse(node.id().equals(obj.node().id()));
}
}
/**
* @throws Exception If failed.
*/
public void testClientServer() throws Exception {
ClusterGroup srv = ignite.cluster().forServers();
assertEquals(2, srv.nodes().size());
assertTrue(srv.nodes().contains(ignite(0).cluster().localNode()));
assertTrue(srv.nodes().contains(ignite(1).cluster().localNode()));
ClusterGroup cli = ignite.cluster().forClients();
assertEquals(2, srv.nodes().size());
assertTrue(cli.nodes().contains(ignite(2).cluster().localNode()));
assertTrue(cli.nodes().contains(ignite(3).cluster().localNode()));
}
/**
* @throws Exception If failed.
*/
public void testForCacheNodesOnDynamicCacheCreateDestroy() throws Exception {
Random rnd = ThreadLocalRandom.current();
final AtomicReference<Exception> ex = new AtomicReference<>();
IgniteInternalFuture fut = runCacheCreateDestroyTask(ex);
while (!fut.isDone())
ignite.cluster().forCacheNodes("cache" + rnd.nextInt(16)).nodes();
if (ex.get() != null)
throw ex.get();
}
/**
* @throws Exception If failed.
*/
public void testForClientNodesOnDynamicCacheCreateDestroy() throws Exception {
Random rnd = ThreadLocalRandom.current();
final AtomicReference<Exception> ex = new AtomicReference<>();
IgniteInternalFuture fut = runCacheCreateDestroyTask(ex);
while (!fut.isDone())
ignite.cluster().forClientNodes("cache" + rnd.nextInt(16)).nodes();
if (ex.get() != null)
throw ex.get();
}
/**
* @param exHldr Exception holder.
* @return Task future.
*/
private IgniteInternalFuture runCacheCreateDestroyTask(final AtomicReference<Exception> exHldr) {
final long deadline = System.currentTimeMillis() + 5000;
final AtomicInteger cntr = new AtomicInteger();
return GridTestUtils.runMultiThreadedAsync(new Runnable() {
@Override public void run() {
int startIdx = cntr.getAndAdd(4);
int idx = 0;
boolean start = true;
Set<String> caches = U.newHashSet(4);
while (System.currentTimeMillis() < deadline) {
try {
if (start) {
caches.add("cache" + (startIdx + idx));
ignite.createCache("cache" + (startIdx + idx));
}
else {
ignite.destroyCache("cache" + (startIdx + idx));
caches.remove("cache" + (startIdx + idx));
}
if ((idx = (idx + 1) % 4) == 0)
start = !start;
}
catch (Exception e) {
addException(exHldr, e);
break;
}
}
for (String cache : caches) {
try {
ignite.destroyCache(cache);
}
catch (Exception e) {
addException(exHldr, e);
}
}
}
}, 4, "cache-start-destroy");
}
/**
* @param exHldr Exception holder.
* @param ex Exception.
*/
private void addException(AtomicReference<Exception> exHldr, Exception ex) {
if (exHldr.get() != null || !exHldr.compareAndSet(null, ex))
exHldr.get().addSuppressed(ex);
}
/**
* @throws Exception If failed.
*/
public void testEmptyGroup() throws Exception {
ClusterGroup emptyGrp = ignite.cluster().forAttribute("nonExistent", "val");
assertEquals(0, emptyGrp.forOldest().nodes().size());
assertEquals(0, emptyGrp.forYoungest().nodes().size());
assertEquals(0, emptyGrp.forAttribute("nonExistent2", "val").nodes().size());
assertEquals(0, emptyGrp.forCacheNodes("cacheName").nodes().size());
assertEquals(0, emptyGrp.forClientNodes("cacheName").nodes().size());
assertEquals(0, emptyGrp.forClients().nodes().size());
assertEquals(0, emptyGrp.forDaemons().nodes().size());
assertEquals(0, emptyGrp.forDataNodes("cacheName").nodes().size());
assertEquals(0, emptyGrp.forRandom().nodes().size());
assertEquals(0, emptyGrp.forRemotes().nodes().size());
assertEquals(0, emptyGrp.forServers().nodes().size());
assertEquals(0, emptyGrp.forHost(ignite.cluster().localNode()).nodes().size());
assertEquals(0, emptyGrp.forHost("127.0.0.1").nodes().size());
}
/**
* @param cnt Count.
* @param even Even.
*/
private static int gridMaxOrder(int cnt, boolean even) {
assert cnt > 2;
cnt = cnt - (cnt % 2);
return even ? cnt - 1 : cnt - 2;
}
}
| |
/*
* Copyright 2001-2009 Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
package org.quartz.core;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.atomic.AtomicBoolean;
import org.quartz.JobPersistenceException;
import org.quartz.SchedulerException;
import org.quartz.Trigger;
import org.quartz.Trigger.CompletedExecutionInstruction;
import org.quartz.spi.OperableTrigger;
import org.quartz.spi.TriggerFiredBundle;
import org.quartz.spi.TriggerFiredResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p>
* The thread responsible for performing the work of firing <code>{@link Trigger}</code>
* s that are registered with the <code>{@link QuartzScheduler}</code>.
* </p>
*
* @see QuartzScheduler
* @see org.quartz.Job
* @see Trigger
*
* @author James House
*/
public class QuartzSchedulerThread extends Thread {
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Data members.
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
private QuartzScheduler qs;
private QuartzSchedulerResources qsRsrcs;
private final Object sigLock = new Object();
private boolean signaled;
private long signaledNextFireTime;
private boolean paused;
private AtomicBoolean halted;
private Random random = new Random(System.currentTimeMillis());
// When the scheduler finds there is no current trigger to fire, how long
// it should wait until checking again...
private static long DEFAULT_IDLE_WAIT_TIME = 30L * 1000L;
private long idleWaitTime = DEFAULT_IDLE_WAIT_TIME;
private int idleWaitVariablness = 7 * 1000;
private final Logger log = LoggerFactory.getLogger(getClass());
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Constructors.
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
/**
* <p>
* Construct a new <code>QuartzSchedulerThread</code> for the given
* <code>QuartzScheduler</code> as a non-daemon <code>Thread</code>
* with normal priority.
* </p>
*/
QuartzSchedulerThread(QuartzScheduler qs, QuartzSchedulerResources qsRsrcs) {
this(qs, qsRsrcs, qsRsrcs.getMakeSchedulerThreadDaemon(), Thread.NORM_PRIORITY);
}
/**
* <p>
* Construct a new <code>QuartzSchedulerThread</code> for the given
* <code>QuartzScheduler</code> as a <code>Thread</code> with the given
* attributes.
* </p>
*/
QuartzSchedulerThread(QuartzScheduler qs, QuartzSchedulerResources qsRsrcs, boolean setDaemon, int threadPrio) {
super(qs.getSchedulerThreadGroup(), qsRsrcs.getThreadName());
this.qs = qs;
this.qsRsrcs = qsRsrcs;
this.setDaemon(setDaemon);
if(qsRsrcs.isThreadsInheritInitializersClassLoadContext()) {
log.info("QuartzSchedulerThread Inheriting ContextClassLoader of thread: " + Thread.currentThread().getName());
this.setContextClassLoader(Thread.currentThread().getContextClassLoader());
}
this.setPriority(threadPrio);
// start the underlying thread, but put this object into the 'paused'
// state
// so processing doesn't start yet...
paused = true;
halted = new AtomicBoolean(false);
}
/*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*
* Interface.
*
* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
void setIdleWaitTime(long waitTime) {
idleWaitTime = waitTime;
idleWaitVariablness = (int) (waitTime * 0.2);
}
private long getRandomizedIdleWaitTime() {
return idleWaitTime - random.nextInt(idleWaitVariablness);
}
/**
* <p>
* Signals the main processing loop to pause at the next possible point.
* </p>
*/
void togglePause(boolean pause) {
synchronized (sigLock) {
paused = pause;
if (paused) {
signalSchedulingChange(0);
} else {
sigLock.notifyAll();
}
}
}
/**
* <p>
* Signals the main processing loop to pause at the next possible point.
* </p>
*/
void halt(boolean wait) {
synchronized (sigLock) {
halted.set(true);
if (paused) {
sigLock.notifyAll();
} else {
signalSchedulingChange(0);
}
}
if (wait) {
boolean interrupted = false;
try {
while (true) {
try {
join();
break;
} catch (InterruptedException _) {
interrupted = true;
}
}
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
}
boolean isPaused() {
return paused;
}
/**
* <p>
* Signals the main processing loop that a change in scheduling has been
* made - in order to interrupt any sleeping that may be occuring while
* waiting for the fire time to arrive.
* </p>
*
* @param candidateNewNextFireTime the time (in millis) when the newly scheduled trigger
* will fire. If this method is being called do to some other even (rather
* than scheduling a trigger), the caller should pass zero (0).
*/
public void signalSchedulingChange(long candidateNewNextFireTime) {
synchronized(sigLock) {
signaled = true;
signaledNextFireTime = candidateNewNextFireTime;
sigLock.notifyAll();
}
}
public void clearSignaledSchedulingChange() {
synchronized(sigLock) {
signaled = false;
signaledNextFireTime = 0;
}
}
public boolean isScheduleChanged() {
synchronized(sigLock) {
return signaled;
}
}
public long getSignaledNextFireTime() {
synchronized(sigLock) {
return signaledNextFireTime;
}
}
/**
* <p>
* The main processing loop of the <code>QuartzSchedulerThread</code>.
* </p>
*/
@Override
public void run() {
boolean lastAcquireFailed = false;
while (!halted.get()) {
try {
// check if we're supposed to pause...
synchronized (sigLock) {
while (paused && !halted.get()) {
try {
// wait until togglePause(false) is called...
sigLock.wait(1000L);
} catch (InterruptedException ignore) {
}
}
if (halted.get()) {
break;
}
}
int availThreadCount = 1;
if(qsRsrcs.getTspProcessJob() == null){
availThreadCount = qsRsrcs.getThreadPool().blockForAvailableThreads();
}
if(availThreadCount > 0) { // will always be true, due to semantics of blockForAvailableThreads...
List<OperableTrigger> triggers = null;
long now = System.currentTimeMillis();
clearSignaledSchedulingChange();
try {
if(qsRsrcs.getTspProcessJob() == null){
triggers = qsRsrcs.getJobStore().acquireNextTriggers(
now + idleWaitTime, Math.min(availThreadCount, qsRsrcs.getMaxBatchSize()), qsRsrcs.getBatchTimeWindow());
}else{
triggers = qsRsrcs.getJobStore().acquireNextTriggers(
now + idleWaitTime, qsRsrcs.getMaxBatchSize(), qsRsrcs.getBatchTimeWindow());
}
lastAcquireFailed = false;
if (log.isDebugEnabled())
log.debug("batch acquisition of " + (triggers == null ? 0 : triggers.size()) + " triggers");
} catch (JobPersistenceException jpe) {
if(!lastAcquireFailed) {
qs.notifySchedulerListenersError(
"An error occurred while scanning for the next triggers to fire.",
jpe);
}
lastAcquireFailed = true;
continue;
} catch (RuntimeException e) {
if(!lastAcquireFailed) {
getLog().error("quartzSchedulerThreadLoop: RuntimeException "
+e.getMessage(), e);
}
lastAcquireFailed = true;
continue;
}
if (triggers != null && !triggers.isEmpty()) {
now = System.currentTimeMillis();
long triggerTime = triggers.get(0).getNextFireTime().getTime();
long timeUntilTrigger = triggerTime - now;
while(timeUntilTrigger > 2) {
synchronized (sigLock) {
if (halted.get()) {
break;
}
if (!isCandidateNewTimeEarlierWithinReason(triggerTime, false)) {
try {
// we could have blocked a long while
// on 'synchronize', so we must recompute
now = System.currentTimeMillis();
timeUntilTrigger = triggerTime - now;
if(timeUntilTrigger >= 1)
sigLock.wait(timeUntilTrigger);
} catch (InterruptedException ignore) {
}
}
}
if(releaseIfScheduleChangedSignificantly(triggers, triggerTime)) {
break;
}
now = System.currentTimeMillis();
timeUntilTrigger = triggerTime - now;
}
// this happens if releaseIfScheduleChangedSignificantly decided to release triggers
if(triggers.isEmpty())
continue;
// set triggers to 'executing'
List<TriggerFiredResult> bndles = new ArrayList<TriggerFiredResult>();
boolean goAhead = true;
synchronized(sigLock) {
goAhead = !halted.get();
}
if(goAhead) {
try {
List<TriggerFiredResult> res = qsRsrcs.getJobStore().triggersFired(triggers);
if(res != null)
bndles = res;
} catch (SchedulerException se) {
qs.notifySchedulerListenersError(
"An error occurred while firing triggers '"
+ triggers + "'", se);
//QTZ-179 : a problem occurred interacting with the triggers from the db
//we release them and loop again
for (int i = 0; i < triggers.size(); i++) {
qsRsrcs.getJobStore().releaseAcquiredTrigger(triggers.get(i));
}
continue;
}
}
for (int i = 0; i < bndles.size(); i++) {
TriggerFiredResult result = bndles.get(i);
TriggerFiredBundle bndle = result.getTriggerFiredBundle();
Exception exception = result.getException();
if (exception instanceof RuntimeException) {
getLog().error("RuntimeException while firing trigger " + triggers.get(i), exception);
qsRsrcs.getJobStore().releaseAcquiredTrigger(triggers.get(i));
continue;
}
// it's possible to get 'null' if the triggers was paused,
// blocked, or other similar occurrences that prevent it being
// fired at this time... or if the scheduler was shutdown (halted)
if (bndle == null) {
qsRsrcs.getJobStore().releaseAcquiredTrigger(triggers.get(i));
continue;
}
// transform source code begin
if(qsRsrcs.getTspProcessJob() != null){
JobRunShellTsp shell = null;
try {
shell = qsRsrcs.getTspJobRunShellFactory().createJobRunShell(bndle);
boolean prepareStatus = shell.initialize(qs, qsRsrcs.getTspProcessJob());
if(!prepareStatus){
continue;
}
} catch (SchedulerException e) {
qsRsrcs.getJobStore().triggeredJobComplete(triggers.get(i), bndle.getJobDetail(), CompletedExecutionInstruction.SET_ALL_JOB_TRIGGERS_ERROR);
continue;
}
qsRsrcs.getTspProcessJob().run(shell);
} else {
JobRunShell shell = null;
try {
shell = qsRsrcs.getJobRunShellFactory().createJobRunShell(bndle);
shell.initialize(qs);
} catch (SchedulerException se) {
qsRsrcs.getJobStore().triggeredJobComplete(triggers.get(i), bndle.getJobDetail(), CompletedExecutionInstruction.SET_ALL_JOB_TRIGGERS_ERROR);
continue;
}
if (qsRsrcs.getThreadPool().runInThread(shell) == false) {
getLog().error("ThreadPool.runInThread() return false!");
qsRsrcs.getJobStore().triggeredJobComplete(triggers.get(i), bndle.getJobDetail(), CompletedExecutionInstruction.SET_ALL_JOB_TRIGGERS_ERROR);
}
}
// transform source code end
}
continue; // while (!halted)
}
} else { // if(availThreadCount > 0)
// should never happen, if threadPool.blockForAvailableThreads() follows contract
continue; // while (!halted)
}
long now = System.currentTimeMillis();
long waitTime = now + getRandomizedIdleWaitTime();
long timeUntilContinue = waitTime - now;
synchronized(sigLock) {
try {
if(!halted.get()) {
// QTZ-336 A job might have been completed in the mean time and we might have
// missed the scheduled changed signal by not waiting for the notify() yet
// Check that before waiting for too long in case this very job needs to be
// scheduled very soon
if (!isScheduleChanged()) {
sigLock.wait(timeUntilContinue);
}
}
} catch (InterruptedException ignore) {
}
}
} catch(RuntimeException re) {
getLog().error("Runtime error occurred in main trigger firing loop.", re);
}
} // while (!halted)
// drop references to scheduler stuff to aid garbage collection...
qs = null;
qsRsrcs = null;
}
private boolean releaseIfScheduleChangedSignificantly(
List<OperableTrigger> triggers, long triggerTime) {
if (isCandidateNewTimeEarlierWithinReason(triggerTime, true)) {
// above call does a clearSignaledSchedulingChange()
for (OperableTrigger trigger : triggers) {
qsRsrcs.getJobStore().releaseAcquiredTrigger(trigger);
}
triggers.clear();
return true;
}
return false;
}
private boolean isCandidateNewTimeEarlierWithinReason(long oldTime, boolean clearSignal) {
// So here's the deal: We know due to being signaled that 'the schedule'
// has changed. We may know (if getSignaledNextFireTime() != 0) the
// new earliest fire time. We may not (in which case we will assume
// that the new time is earlier than the trigger we have acquired).
// In either case, we only want to abandon our acquired trigger and
// go looking for a new one if "it's worth it". It's only worth it if
// the time cost incurred to abandon the trigger and acquire a new one
// is less than the time until the currently acquired trigger will fire,
// otherwise we're just "thrashing" the job store (e.g. database).
//
// So the question becomes when is it "worth it"? This will depend on
// the job store implementation (and of course the particular database
// or whatever behind it). Ideally we would depend on the job store
// implementation to tell us the amount of time in which it "thinks"
// it can abandon the acquired trigger and acquire a new one. However
// we have no current facility for having it tell us that, so we make
// a somewhat educated but arbitrary guess ;-).
synchronized(sigLock) {
if (!isScheduleChanged())
return false;
boolean earlier = false;
if(getSignaledNextFireTime() == 0)
earlier = true;
else if(getSignaledNextFireTime() < oldTime )
earlier = true;
if(earlier) {
// so the new time is considered earlier, but is it enough earlier?
long diff = oldTime - System.currentTimeMillis();
if(diff < (qsRsrcs.getJobStore().supportsPersistence() ? 70L : 7L))
earlier = false;
}
if(clearSignal) {
clearSignaledSchedulingChange();
}
return earlier;
}
}
public Logger getLog() {
return log;
}
} // end of QuartzSchedulerThread
| |
package us.kbase.test.auth2.lib;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static us.kbase.test.auth2.TestCommon.assertClear;
import static us.kbase.test.auth2.TestCommon.set;
import static us.kbase.test.auth2.lib.AuthenticationTester.assertLogEventsCorrect;
import static us.kbase.test.auth2.lib.AuthenticationTester.initTestMocks;
import java.time.Clock;
import java.time.Instant;
import java.util.List;
import java.util.UUID;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.spi.ILoggingEvent;
import us.kbase.auth2.cryptutils.RandomDataGenerator;
import us.kbase.auth2.lib.Authentication;
import us.kbase.auth2.lib.DisplayName;
import us.kbase.auth2.lib.EmailAddress;
import us.kbase.auth2.lib.Password;
import us.kbase.auth2.lib.PasswordHashAndSalt;
import us.kbase.auth2.lib.Role;
import us.kbase.auth2.lib.UserName;
import us.kbase.auth2.lib.exceptions.ErrorType;
import us.kbase.auth2.lib.exceptions.NoSuchRoleException;
import us.kbase.auth2.lib.exceptions.UnauthorizedException;
import us.kbase.auth2.lib.exceptions.UserExistsException;
import us.kbase.auth2.lib.storage.AuthStorage;
import us.kbase.auth2.lib.token.IncomingToken;
import us.kbase.auth2.lib.token.StoredToken;
import us.kbase.auth2.lib.token.TokenType;
import us.kbase.auth2.lib.user.AuthUser;
import us.kbase.auth2.lib.user.LocalUser;
import us.kbase.test.auth2.TestCommon;
import us.kbase.test.auth2.lib.AuthenticationTester.AbstractAuthOperation;
import us.kbase.test.auth2.lib.AuthenticationTester.LocalUserAnswerMatcher;
import us.kbase.test.auth2.lib.AuthenticationTester.LogEvent;
import us.kbase.test.auth2.lib.AuthenticationTester.TestMocks;
public class AuthenticationCreateLocalUserTest {
/* Some of these tests are time sensitive and verify() won't work because the object is
* changed after the mocked method is called. Instead use an Answer:
*
* http://stackoverflow.com/questions/9085738/can-mockito-verify-parameters-based-on-their-values-at-the-time-of-method-call
*
*/
private static List<ILoggingEvent> logEvents;
@BeforeClass
public static void beforeClass() {
logEvents = AuthenticationTester.setUpSLF4JTestLoggerAppender();
}
@Before
public void before() {
logEvents.clear();
}
private final static Instant NOW = Instant.now();
@Test
public void createWithAdminUser() throws Exception {
final AuthUser admin = AuthUser.getBuilder(
new UserName("admin"), new DisplayName("foo"), NOW)
.withEmailAddress(new EmailAddress("f@g.com"))
.withRole(Role.ADMIN).build();
create(admin);
}
@Test
public void createWithCreateAdminUser() throws Exception {
final AuthUser admin = AuthUser.getBuilder(
new UserName("admin"), new DisplayName("foo"), NOW)
.withEmailAddress(new EmailAddress("f@g.com"))
.withRole(Role.CREATE_ADMIN).build();
create(admin);
}
@Test
public void createWithRootUser() throws Exception {
final AuthUser admin = AuthUser.getBuilder(
UserName.ROOT, new DisplayName("foo"), NOW)
.withEmailAddress(new EmailAddress("f@g.com")).build();
create(admin);
}
private void create(final AuthUser adminUser) throws Exception {
final TestMocks testauth = initTestMocks();
final AuthStorage storage = testauth.storageMock;
final Authentication auth = testauth.auth;
final RandomDataGenerator rand = testauth.randGenMock;
final Clock clock = testauth.clockMock;
final IncomingToken token = new IncomingToken("foobar");
final char[] pwdChar = new char [] {'a', 'a', 'a', 'a', 'a', 'b', 'a', 'a', 'a', 'a'};
final byte[] salt = new byte[] {1, 2, 3, 4, 5, 6, 7, 8};
final byte[] hash = AuthenticationTester.fromBase64(
"3TdeAz9GffU+pVH/yqNZrlL8e/nyPkM7VJiVmjzc0Cg=");
final Instant create = Instant.ofEpochSecond(1000);
when(storage.getToken(token.getHashedToken()))
.thenReturn(StoredToken.getBuilder(
TokenType.LOGIN, UUID.randomUUID(), new UserName("admin"))
.withLifeTime(NOW, NOW).build());
when(storage.getUser(new UserName("admin"))).thenReturn(adminUser);
when(rand.getTemporaryPassword(10)).thenReturn(pwdChar);
when(rand.generateSalt()).thenReturn(salt);
when(clock.instant()).thenReturn(create);
final LocalUser expected = LocalUser.getLocalUserBuilder(
new UserName("foo"), new DisplayName("bar"), create)
.withEmailAddress(new EmailAddress("f@g.com"))
.withForceReset(true).build();
final LocalUserAnswerMatcher matcher = new LocalUserAnswerMatcher(
expected, new PasswordHashAndSalt(hash, salt));
doAnswer(matcher).when(storage).createLocalUser(
any(LocalUser.class), any(PasswordHashAndSalt.class));
final Password pwd = auth.createLocalUser(
token, new UserName("foo"), new DisplayName("bar"), new EmailAddress("f@g.com"));
assertThat("incorrect pwd", pwd.getPassword(), is(pwdChar));
assertClear(matcher.savedSalt);
assertClear(matcher.savedHash);
/* ensure method was called at least once
* Usually not necessary when mocking the call, but since createLU returns null
* need to ensure the method was actually called and therefore the RootuserAnswerMatcher
* ran
*/
verify(storage).createLocalUser(any(), any());
assertLogEventsCorrect(logEvents,
new LogEvent(Level.INFO, "Local user foo created by admin " +
adminUser.getUserName().getName(), Authentication.class));
}
@Test
public void createFailUserExists() throws Exception {
// mostly for exercising the pwd, hash, and salt clears
final TestMocks testauth = initTestMocks();
final AuthStorage storage = testauth.storageMock;
final Authentication auth = testauth.auth;
final RandomDataGenerator rand = testauth.randGenMock;
final Clock clock = testauth.clockMock;
final IncomingToken token = new IncomingToken("foobar");
final char[] pwdChar = new char [] {'a', 'a', 'a', 'a', 'a', 'b', 'a', 'a', 'a', 'a'};
final byte[] salt = new byte[] {1, 2, 3, 4, 5, 6, 7, 8};
final Instant create = Instant.ofEpochSecond(1000);
when(storage.getToken(token.getHashedToken()))
.thenReturn(StoredToken.getBuilder(
TokenType.LOGIN, UUID.randomUUID(), new UserName("admin"))
.withLifeTime(NOW, NOW).build());
final AuthUser admin = AuthUser.getBuilder(
new UserName("admin"), new DisplayName("foo"), NOW)
.withEmailAddress(new EmailAddress("f@g.com"))
.withRole(Role.ADMIN).build();
when(storage.getUser(new UserName("admin"))).thenReturn(admin);
when(rand.getTemporaryPassword(10)).thenReturn(pwdChar);
when(rand.generateSalt()).thenReturn(salt);
when(clock.instant()).thenReturn(create);
doThrow(new UserExistsException("foo")).when(storage)
.createLocalUser(any(LocalUser.class), any(PasswordHashAndSalt.class));
failCreateLocalUser(auth, token, new UserName("foo"), new DisplayName("bar"),
new EmailAddress("f@g.com"), new UserExistsException("foo"));
}
@Test
public void createFailIllegalRole() throws Exception {
final TestMocks testauth = initTestMocks();
final AuthStorage storage = testauth.storageMock;
final Authentication auth = testauth.auth;
final RandomDataGenerator rand = testauth.randGenMock;
final Clock clock = testauth.clockMock;
final IncomingToken token = new IncomingToken("foobar");
final char[] pwdChar = new char [] {'a', 'a', 'a', 'a', 'a', 'b', 'a', 'a', 'a', 'a'};
final byte[] salt = new byte[] {1, 2, 3, 4, 5, 6, 7, 8};
final Instant create = Instant.ofEpochSecond(1000);
when(storage.getToken(token.getHashedToken()))
.thenReturn(StoredToken.getBuilder(
TokenType.LOGIN, UUID.randomUUID(), new UserName("admin"))
.withLifeTime(NOW, NOW).build());
final AuthUser admin = AuthUser.getBuilder(
new UserName("admin"), new DisplayName("foo"), NOW)
.withEmailAddress(new EmailAddress("f@g.com"))
.withRole(Role.ADMIN).build();
when(storage.getUser(new UserName("admin"))).thenReturn(admin);
when(rand.getTemporaryPassword(10)).thenReturn(pwdChar);
when(rand.generateSalt()).thenReturn(salt);
when(clock.instant()).thenReturn(create);
doThrow(new NoSuchRoleException("foo")).when(storage)
.createLocalUser(any(LocalUser.class), any(PasswordHashAndSalt.class));
failCreateLocalUser(auth, token, new UserName("foo"), new DisplayName("bar"),
new EmailAddress("f@g.com"), new RuntimeException("didn't supply any roles"));
}
@Test
public void createFailRuntimeOnGetPwd() throws Exception {
// mostly for exercising the pwd, hash, and salt clears
final TestMocks testauth = initTestMocks();
final AuthStorage storage = testauth.storageMock;
final Authentication auth = testauth.auth;
final RandomDataGenerator rand = testauth.randGenMock;
final IncomingToken token = new IncomingToken("foobar");
when(storage.getToken(token.getHashedToken()))
.thenReturn(StoredToken.getBuilder(
TokenType.LOGIN, UUID.randomUUID(), new UserName("admin"))
.withLifeTime(NOW, NOW).build());
final AuthUser admin = AuthUser.getBuilder(
new UserName("admin"), new DisplayName("foo"), NOW)
.withEmailAddress(new EmailAddress("f@g.com"))
.withRole(Role.ADMIN).build();
when(storage.getUser(new UserName("admin"))).thenReturn(admin);
when(rand.getTemporaryPassword(10)).thenThrow(new RuntimeException("booga"));
failCreateLocalUser(auth, token, new UserName("foo"), new DisplayName("bar"),
new EmailAddress("f@g.com"), new RuntimeException("booga"));
}
@Test
public void createUserExecuteStandardUserCheckingTests() throws Exception {
final IncomingToken token = new IncomingToken("foo");
AuthenticationTester.executeStandardUserCheckingTests(new AbstractAuthOperation() {
@Override
public IncomingToken getIncomingToken() {
return token;
}
@Override
public void execute(final Authentication auth) throws Exception {
auth.createLocalUser(token, new UserName("whee"), new DisplayName("bar"),
new EmailAddress("f@g.com"));
}
@Override
public List<ILoggingEvent> getLogAccumulator() {
return logEvents;
}
@Override
public String getOperationString() {
return "create local user whee";
}
}, set(Role.DEV_TOKEN, Role.SERV_TOKEN));
}
@Test
public void createUserFailNulls() throws Exception {
final TestMocks testauth = initTestMocks();
final Authentication auth = testauth.auth;
failCreateLocalUser(auth, null, new UserName("foo"), new DisplayName("bar"),
new EmailAddress("f@g.com"), new NullPointerException("token"));
failCreateLocalUser(auth, new IncomingToken("whee"), null,
new DisplayName("bar"), new EmailAddress("f@g.com"),
new NullPointerException("userName"));
failCreateLocalUser(auth, new IncomingToken("whee"), new UserName("foo"),
null, new EmailAddress("f@g.com"),
new NullPointerException("displayName"));
failCreateLocalUser(auth, new IncomingToken("whee"), new UserName("foo"),
new DisplayName("bar"), null,
new NullPointerException("email"));
}
@Test
public void createRootUserFail() throws Exception {
final TestMocks testauth = initTestMocks();
final AuthStorage storage = testauth.storageMock;
final Authentication auth = testauth.auth;
final IncomingToken token = new IncomingToken("foobar");
when(storage.getToken(token.getHashedToken()))
.thenReturn(StoredToken.getBuilder(
TokenType.LOGIN, UUID.randomUUID(), new UserName("admin"))
.withLifeTime(NOW, NOW).build());
final AuthUser admin = AuthUser.getBuilder(
UserName.ROOT, new DisplayName("foo"), NOW)
.withEmailAddress(new EmailAddress("f@g.com"))
.withRole(Role.ROOT).build();
when(storage.getUser(new UserName("admin"))).thenReturn(admin);
failCreateLocalUser(auth, token, UserName.ROOT, new DisplayName("bar"),
new EmailAddress("f@g.com"), new UnauthorizedException(ErrorType.UNAUTHORIZED,
"Cannot create ROOT user"));
assertLogEventsCorrect(logEvents,
new LogEvent(Level.ERROR,
"User ***ROOT*** attempted to create ROOT user and was thwarted",
Authentication.class));
}
public void failCreateLocalUser(
final Authentication auth,
final IncomingToken token,
final UserName userName,
final DisplayName display,
final EmailAddress email,
final Exception e) {
try {
auth.createLocalUser(token, userName, display, email);
fail("expected exception");
} catch (Exception got) {
TestCommon.assertExceptionCorrect(got, e);
}
}
}
| |
package name.valery1707.core.api;
import javaslang.Tuple;
import javaslang.Value;
import javaslang.collection.Seq;
import name.valery1707.core.app.AccountService;
import name.valery1707.core.db.SpecificationBuilder;
import name.valery1707.core.domain.Account;
import name.valery1707.core.domain.Event.EventType;
import name.valery1707.core.domain.LogicRemovableEntity;
import org.springframework.context.support.DefaultMessageSourceResolvable;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.data.web.PageableDefault;
import org.springframework.data.web.SortDefault;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.validation.BindingResult;
import org.springframework.validation.FieldError;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.validation.Valid;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Supplier;
import static name.valery1707.core.utils.ReflectionUtils.findConverter;
import static name.valery1707.core.utils.ReflectionUtils.getGenericType;
/**
* @param <D> Domain class
* @param <R> Repo class
* @param <F> Filter class
* @param <DTO> Dto class
*/
public abstract class BaseEntityController<D, R extends JpaRepository<D, Long> & JpaSpecificationExecutor<D>, F, DTO extends BaseDto> {
@Inject
@SuppressWarnings("SpringJavaAutowiringInspection")
private R repo;
@Inject
private AccountService accountService;
private final Class<D> domainClass;
private final Class<DTO> dtoClass;
private final Supplier<D> domainBuilder;
private final Seq<Account.Role> canCreate;
private final Seq<Account.Role> canFind;
private final Seq<Account.Role> canRead;
private final Seq<Account.Role> canUpdate;
private final Seq<Account.Role> canDelete;
private SpecificationBuilder<D, F> userFilter;
private Function<D, DTO> domain2dto;
public BaseEntityController(String rightPrefix) {
domainClass = getGenericType(this.getClass(), BaseEntityController.class, "D");
dtoClass = getGenericType(this.getClass(), BaseEntityController.class, "DTO");
domain2dto = findConverter(domainClass, dtoClass);
domainBuilder = () -> {
try {
return domainClass.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
throw new IllegalStateException(String.format("Could not create '%s'", domainClass.getName()));
}
};
canCreate = Account.Role.hasRights(rightPrefix + ".create");
canFind = Account.Role.hasRights(rightPrefix + ".list");
canRead = Account.Role.hasRights(rightPrefix + ".update");
canUpdate = Account.Role.hasRights(rightPrefix + ".update");
canDelete = Account.Role.hasRights(rightPrefix + ".delete");
}
@PostConstruct
public void init() {
userFilter = buildUserFilter();
}
protected R repo() {
return repo;
}
protected AccountService accountService() {
return accountService;
}
protected abstract SpecificationBuilder<D, F> buildUserFilter();
protected abstract EventType eventCreate();
protected abstract EventType eventRead();
protected abstract EventType eventUpdate();
protected abstract EventType eventDelete();
protected abstract EventType eventFind();
protected void canCreate() {
accountService.requireAnyRole(eventCreate(), canCreate);
}
protected void canFind() {
accountService.requireAnyRole(eventFind(), canFind);
}
protected void canRead() {
accountService.requireAnyRole(eventRead(), canRead);
}
protected void canUpdate() {
accountService.requireAnyRole(eventUpdate(), canUpdate);
}
protected void canDelete() {
accountService.requireAnyRole(eventDelete(), canDelete);
}
protected DTO domain2dto(D src) {
return domain2dto.apply(src);
}
/**
* Fill default fields
*
* @param src
* @return
*/
protected D domainInit(D src) {
if (src instanceof LogicRemovableEntity) {
((LogicRemovableEntity) src).setActive(true);
}
return src;
}
protected abstract void dto2domain(DTO dto, D entity);
protected void applyPermanentFilter(F filter) {
}
@RequestMapping(method = RequestMethod.POST)
public Page<DTO> findByFilter(
@PageableDefault(size = 20) @SortDefault("id") Pageable pageable,
@RequestBody(required = false) F filter
) {
canFind();
accountService.logEventSuccess(eventFind());
applyPermanentFilter(filter);
Specification<D> spec = userFilter.build(filter);
return repo.findAll(spec, pageable)
.map(this::domain2dto);
}
@RequestMapping
public DTO get(@RequestParam long id) {
canRead();
D entity = repo.findOne(id);
if (entity == null) {
accountService.logEventFail(eventRead(), String.format("Entity '%s' with id %d not found", domainClass.getName(), id));
throw new AccessDeniedException(String.format("Entity '%s' with id %d not found", domainClass.getName(), id));
}
accountService.logEventSuccess(eventRead());
return domain2dto(entity);
}
@RequestMapping(method = RequestMethod.DELETE)
@Transactional
public void delete(@RequestParam long id) {
canDelete();
D entity = repo.findOne(id);
if (entity == null) {
throw new AccessDeniedException(String.format("Entity '%s' with id %d not found", domainClass.getName(), id));
}
if (entity instanceof LogicRemovableEntity) {
LogicRemovableEntity removableEntity = (LogicRemovableEntity) entity;
if (removableEntity.isActive()) {
removableEntity.setActive(false);
repo.save(entity);
}
} else {
repo.delete(entity);
}
accountService.logEventSuccess(eventDelete());
}
protected void validate(DTO dto, BindingResult validation) {
}
@RequestMapping(method = RequestMethod.PUT)
@Transactional
public ResponseEntity<Map<String, ?>> save(@RequestBody @Valid DTO dto, BindingResult validation) {
validate(dto, validation);
boolean isCreate = dto.getId() == 0;
if (validation.getErrorCount() > 0) {
javaslang.collection.Map<String, List<FieldError>> fieldErrorMap = javaslang.collection.List
.ofAll(validation.getFieldErrors())
.groupBy(FieldError::getField)
.mapValues(Value::toJavaList);
String errorDescription = fieldErrorMap
.map((name, errors) -> Tuple.of(name, javaslang.collection.List.ofAll(errors).map(DefaultMessageSourceResolvable::getDefaultMessage).mkString("[", ", ", "]")))
.mkString("\n");
accountService.logEventFail(isCreate ? eventCreate() : eventUpdate(), errorDescription);
return ResponseEntity.badRequest().body(fieldErrorMap.toJavaMap());
}
D entity = null;
if (dto.getId() != 0) {
canUpdate();
entity = repo.findOne(dto.getId());
}
if (entity == null) {
canCreate();
entity = domainInit(domainBuilder.get());
}
dto2domain(dto, entity);
repo.save(entity);
accountService.logEventSuccess(isCreate ? eventCreate() : eventUpdate());
return ResponseEntity.ok(Collections.emptyMap());
}
}
| |
/*
* Copyright 2014 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.event;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import azkaban.execapp.EventCollectorListener;
import azkaban.execapp.FlowRunner;
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutableNode;
import azkaban.executor.ExecutionOptions;
import azkaban.executor.ExecutorLoader;
import azkaban.executor.JavaJob;
import azkaban.executor.MockExecutorLoader;
import azkaban.executor.Status;
import azkaban.flow.Flow;
import azkaban.jobtype.JobTypeManager;
import azkaban.project.Project;
import azkaban.project.ProjectLoader;
import azkaban.project.MockProjectLoader;
import azkaban.utils.JSONUtils;
import azkaban.utils.Props;
public class LocalFlowWatcherTest {
private File workingDir;
private JobTypeManager jobtypeManager;
private ProjectLoader fakeProjectLoader;
private int dirVal = 0;
@Before
public void setUp() throws Exception {
jobtypeManager =
new JobTypeManager(null, null, this.getClass().getClassLoader());
jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
fakeProjectLoader = new MockProjectLoader(workingDir);
}
@After
public void tearDown() throws IOException {
}
public File setupDirectory() throws IOException {
System.out.println("Create temp dir");
File workingDir = new File("_AzkabanTestDir_" + dirVal);
if (workingDir.exists()) {
FileUtils.deleteDirectory(workingDir);
}
workingDir.mkdirs();
dirVal++;
return workingDir;
}
@Ignore @Test
public void testBasicLocalFlowWatcher() throws Exception {
MockExecutorLoader loader = new MockExecutorLoader();
EventCollectorListener eventCollector = new EventCollectorListener();
File workingDir1 = setupDirectory();
FlowRunner runner1 =
createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
null);
Thread runner1Thread = new Thread(runner1);
File workingDir2 = setupDirectory();
LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
FlowRunner runner2 =
createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2,
watcher, 2);
Thread runner2Thread = new Thread(runner2);
runner1Thread.start();
runner2Thread.start();
runner2Thread.join();
FileUtils.deleteDirectory(workingDir1);
FileUtils.deleteDirectory(workingDir2);
testPipelineLevel2(runner1.getExecutableFlow(), runner2.getExecutableFlow());
}
@Ignore @Test
public void testLevel1LocalFlowWatcher() throws Exception {
MockExecutorLoader loader = new MockExecutorLoader();
EventCollectorListener eventCollector = new EventCollectorListener();
File workingDir1 = setupDirectory();
FlowRunner runner1 =
createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
null);
Thread runner1Thread = new Thread(runner1);
File workingDir2 = setupDirectory();
LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
FlowRunner runner2 =
createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2,
watcher, 1);
Thread runner2Thread = new Thread(runner2);
runner1Thread.start();
runner2Thread.start();
runner2Thread.join();
FileUtils.deleteDirectory(workingDir1);
FileUtils.deleteDirectory(workingDir2);
testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
}
@Ignore @Test
public void testLevel2DiffLocalFlowWatcher() throws Exception {
MockExecutorLoader loader = new MockExecutorLoader();
EventCollectorListener eventCollector = new EventCollectorListener();
File workingDir1 = setupDirectory();
FlowRunner runner1 =
createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
null);
Thread runner1Thread = new Thread(runner1);
File workingDir2 = setupDirectory();
LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
FlowRunner runner2 =
createFlowRunner(workingDir2, loader, eventCollector, "exec1-mod", 2,
watcher, 1);
Thread runner2Thread = new Thread(runner2);
runner1Thread.start();
runner2Thread.start();
runner2Thread.join();
FileUtils.deleteDirectory(workingDir1);
FileUtils.deleteDirectory(workingDir2);
testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
}
private void testPipelineLevel1(ExecutableFlow first, ExecutableFlow second) {
for (ExecutableNode node : second.getExecutableNodes()) {
Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
// check it's start time is after the first's children.
ExecutableNode watchedNode = first.getExecutableNode(node.getId());
if (watchedNode == null) {
continue;
}
Assert.assertEquals(watchedNode.getStatus(), Status.SUCCEEDED);
System.out.println("Node " + node.getId() + " start: "
+ node.getStartTime() + " dependent on " + watchedNode.getId() + " "
+ watchedNode.getEndTime() + " diff: "
+ (node.getStartTime() - watchedNode.getEndTime()));
Assert.assertTrue(node.getStartTime() >= watchedNode.getEndTime());
long minParentDiff = 0;
if (node.getInNodes().size() > 0) {
minParentDiff = Long.MAX_VALUE;
for (String dependency : node.getInNodes()) {
ExecutableNode parent = second.getExecutableNode(dependency);
long diff = node.getStartTime() - parent.getEndTime();
minParentDiff = Math.min(minParentDiff, diff);
}
}
long diff = node.getStartTime() - watchedNode.getEndTime();
System.out.println(" minPipelineTimeDiff:" + diff
+ " minDependencyTimeDiff:" + minParentDiff);
Assert.assertTrue(minParentDiff < 100 || diff < 100);
}
}
private void testPipelineLevel2(ExecutableFlow first, ExecutableFlow second) {
for (ExecutableNode node : second.getExecutableNodes()) {
Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
// check it's start time is after the first's children.
ExecutableNode watchedNode = first.getExecutableNode(node.getId());
if (watchedNode == null) {
continue;
}
Assert.assertEquals(watchedNode.getStatus(), Status.SUCCEEDED);
long minDiff = Long.MAX_VALUE;
for (String watchedChild : watchedNode.getOutNodes()) {
ExecutableNode child = first.getExecutableNode(watchedChild);
if (child == null) {
continue;
}
Assert.assertEquals(child.getStatus(), Status.SUCCEEDED);
long diff = node.getStartTime() - child.getEndTime();
minDiff = Math.min(minDiff, diff);
System.out.println("Node " + node.getId() + " start: "
+ node.getStartTime() + " dependent on " + watchedChild + " "
+ child.getEndTime() + " diff: " + diff);
Assert.assertTrue(node.getStartTime() >= child.getEndTime());
}
long minParentDiff = Long.MAX_VALUE;
for (String dependency : node.getInNodes()) {
ExecutableNode parent = second.getExecutableNode(dependency);
long diff = node.getStartTime() - parent.getEndTime();
minParentDiff = Math.min(minParentDiff, diff);
}
System.out.println(" minPipelineTimeDiff:" + minDiff
+ " minDependencyTimeDiff:" + minParentDiff);
Assert.assertTrue(minParentDiff < 100 || minDiff < 100);
}
}
private FlowRunner createFlowRunner(File workingDir, ExecutorLoader loader,
EventCollectorListener eventCollector, String flowName, int execId,
FlowWatcher watcher, Integer pipeline) throws Exception {
return createFlowRunner(workingDir, loader, eventCollector, flowName, execId, watcher, pipeline, new Props());
}
private FlowRunner createFlowRunner(File workingDir, ExecutorLoader loader,
EventCollectorListener eventCollector, String flowName, int execId,
FlowWatcher watcher, Integer pipeline, Props azkabanProps) throws Exception {
File testDir = new File("unit/executions/exectest1");
ExecutableFlow exFlow =
prepareExecDir(workingDir, testDir, flowName, execId);
ExecutionOptions option = exFlow.getExecutionOptions();
if (watcher != null) {
option.setPipelineLevel(pipeline);
option.setPipelineExecutionId(watcher.getExecId());
}
// MockProjectLoader projectLoader = new MockProjectLoader(new
// File(exFlow.getExecutionPath()));
loader.uploadExecutableFlow(exFlow);
FlowRunner runner =
new FlowRunner(exFlow, loader, fakeProjectLoader, jobtypeManager, azkabanProps);
runner.setFlowWatcher(watcher);
runner.addListener(eventCollector);
return runner;
}
private ExecutableFlow prepareExecDir(File workingDir, File execDir,
String flowName, int execId) throws IOException {
FileUtils.copyDirectory(execDir, workingDir);
File jsonFlowFile = new File(workingDir, flowName + ".flow");
@SuppressWarnings("unchecked")
HashMap<String, Object> flowObj =
(HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
Project project = new Project(1, "test");
Flow flow = Flow.flowFromObject(flowObj);
ExecutableFlow execFlow = new ExecutableFlow(project, flow);
execFlow.setExecutionId(execId);
execFlow.setExecutionPath(workingDir.getPath());
return execFlow;
}
}
| |
package com.dhl.serv.web.rest;
import com.dhl.serv.ProyService1App;
import com.dhl.serv.domain.Article;
import com.dhl.serv.repository.ArticleRepository;
import com.dhl.serv.service.ArticleService;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.MockitoAnnotations;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.web.PageableHandlerMethodArgumentResolver;
import org.springframework.http.MediaType;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.Matchers.hasItem;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
/**
* Test class for the ArticleResource REST controller.
*
* @see ArticleResource
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = ProyService1App.class)
public class ArticleResourceIntTest {
private static final DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'").withZone(ZoneId.of("Z"));
private static final String DEFAULT_ARTICLE_NAME = "AAAAA";
private static final String UPDATED_ARTICLE_NAME = "BBBBB";
private static final String DEFAULT_ARTICLE_DESCRIPTION = "AAAAA";
private static final String UPDATED_ARTICLE_DESCRIPTION = "BBBBB";
private static final ZonedDateTime DEFAULT_ARTICLE_DATE_TIME = ZonedDateTime.ofInstant(Instant.ofEpochMilli(0L), ZoneId.systemDefault());
private static final ZonedDateTime UPDATED_ARTICLE_DATE_TIME = ZonedDateTime.now(ZoneId.systemDefault()).withNano(0);
private static final String DEFAULT_ARTICLE_DATE_TIME_STR = dateTimeFormatter.format(DEFAULT_ARTICLE_DATE_TIME);
@Inject
private ArticleRepository articleRepository;
@Inject
private ArticleService articleService;
@Inject
private MappingJackson2HttpMessageConverter jacksonMessageConverter;
@Inject
private PageableHandlerMethodArgumentResolver pageableArgumentResolver;
@Inject
private EntityManager em;
private MockMvc restArticleMockMvc;
private Article article;
@PostConstruct
public void setup() {
MockitoAnnotations.initMocks(this);
ArticleResource articleResource = new ArticleResource();
ReflectionTestUtils.setField(articleResource, "articleService", articleService);
this.restArticleMockMvc = MockMvcBuilders.standaloneSetup(articleResource)
.setCustomArgumentResolvers(pageableArgumentResolver)
.setMessageConverters(jacksonMessageConverter).build();
}
/**
* Create an entity for this test.
*
* This is a static method, as tests for other entities might also need it,
* if they test an entity which requires the current entity.
*/
public static Article createEntity(EntityManager em) {
Article article = new Article();
article.setArticleName(DEFAULT_ARTICLE_NAME);
article.setArticleDescription(DEFAULT_ARTICLE_DESCRIPTION);
article.setArticleDateTime(DEFAULT_ARTICLE_DATE_TIME);
return article;
}
@Before
public void initTest() {
article = createEntity(em);
}
@Test
@Transactional
public void createArticle() throws Exception {
int databaseSizeBeforeCreate = articleRepository.findAll().size();
// Create the Article
restArticleMockMvc.perform(post("/api/articles")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(article)))
.andExpect(status().isCreated());
// Validate the Article in the database
List<Article> articles = articleRepository.findAll();
assertThat(articles).hasSize(databaseSizeBeforeCreate + 1);
Article testArticle = articles.get(articles.size() - 1);
assertThat(testArticle.getArticleName()).isEqualTo(DEFAULT_ARTICLE_NAME);
assertThat(testArticle.getArticleDescription()).isEqualTo(DEFAULT_ARTICLE_DESCRIPTION);
assertThat(testArticle.getArticleDateTime()).isEqualTo(DEFAULT_ARTICLE_DATE_TIME);
}
@Test
@Transactional
public void getAllArticles() throws Exception {
// Initialize the database
articleRepository.saveAndFlush(article);
// Get all the articles
restArticleMockMvc.perform(get("/api/articles?sort=id,desc"))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.[*].id").value(hasItem(article.getId().intValue())))
.andExpect(jsonPath("$.[*].articleName").value(hasItem(DEFAULT_ARTICLE_NAME.toString())))
.andExpect(jsonPath("$.[*].articleDescription").value(hasItem(DEFAULT_ARTICLE_DESCRIPTION.toString())))
.andExpect(jsonPath("$.[*].articleDateTime").value(hasItem(DEFAULT_ARTICLE_DATE_TIME_STR)));
}
@Test
@Transactional
public void getArticle() throws Exception {
// Initialize the database
articleRepository.saveAndFlush(article);
// Get the article
restArticleMockMvc.perform(get("/api/articles/{id}", article.getId()))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.id").value(article.getId().intValue()))
.andExpect(jsonPath("$.articleName").value(DEFAULT_ARTICLE_NAME.toString()))
.andExpect(jsonPath("$.articleDescription").value(DEFAULT_ARTICLE_DESCRIPTION.toString()))
.andExpect(jsonPath("$.articleDateTime").value(DEFAULT_ARTICLE_DATE_TIME_STR));
}
@Test
@Transactional
public void getNonExistingArticle() throws Exception {
// Get the article
restArticleMockMvc.perform(get("/api/articles/{id}", Long.MAX_VALUE))
.andExpect(status().isNotFound());
}
@Test
@Transactional
public void updateArticle() throws Exception {
// Initialize the database
articleService.save(article);
int databaseSizeBeforeUpdate = articleRepository.findAll().size();
// Update the article
Article updatedArticle = articleRepository.findOne(article.getId());
updatedArticle.setArticleName(UPDATED_ARTICLE_NAME);
updatedArticle.setArticleDescription(UPDATED_ARTICLE_DESCRIPTION);
updatedArticle.setArticleDateTime(UPDATED_ARTICLE_DATE_TIME);
restArticleMockMvc.perform(put("/api/articles")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(updatedArticle)))
.andExpect(status().isOk());
// Validate the Article in the database
List<Article> articles = articleRepository.findAll();
assertThat(articles).hasSize(databaseSizeBeforeUpdate);
Article testArticle = articles.get(articles.size() - 1);
assertThat(testArticle.getArticleName()).isEqualTo(UPDATED_ARTICLE_NAME);
assertThat(testArticle.getArticleDescription()).isEqualTo(UPDATED_ARTICLE_DESCRIPTION);
assertThat(testArticle.getArticleDateTime()).isEqualTo(UPDATED_ARTICLE_DATE_TIME);
}
@Test
@Transactional
public void deleteArticle() throws Exception {
// Initialize the database
articleService.save(article);
int databaseSizeBeforeDelete = articleRepository.findAll().size();
// Get the article
restArticleMockMvc.perform(delete("/api/articles/{id}", article.getId())
.accept(TestUtil.APPLICATION_JSON_UTF8))
.andExpect(status().isOk());
// Validate the database is empty
List<Article> articles = articleRepository.findAll();
assertThat(articles).hasSize(databaseSizeBeforeDelete - 1);
}
}
| |
package org.apache.maven.plugin.version.internal;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import org.apache.maven.artifact.repository.metadata.Metadata;
import org.apache.maven.artifact.repository.metadata.Versioning;
import org.apache.maven.artifact.repository.metadata.io.MetadataReader;
import org.apache.maven.model.Build;
import org.apache.maven.model.Plugin;
import org.apache.maven.plugin.MavenPluginManager;
import org.apache.maven.plugin.PluginResolutionException;
import org.apache.maven.plugin.descriptor.PluginDescriptor;
import org.apache.maven.plugin.version.PluginVersionRequest;
import org.apache.maven.plugin.version.PluginVersionResolutionException;
import org.apache.maven.plugin.version.PluginVersionResolver;
import org.apache.maven.plugin.version.PluginVersionResult;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.component.annotations.Requirement;
import org.codehaus.plexus.logging.Logger;
import org.codehaus.plexus.util.StringUtils;
import org.eclipse.aether.RepositoryEvent.EventType;
import org.eclipse.aether.RepositoryEvent;
import org.eclipse.aether.RepositoryListener;
import org.eclipse.aether.RepositorySystem;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.RequestTrace;
import org.eclipse.aether.metadata.DefaultMetadata;
import org.eclipse.aether.repository.ArtifactRepository;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.resolution.MetadataRequest;
import org.eclipse.aether.resolution.MetadataResult;
import org.eclipse.aether.util.version.GenericVersionScheme;
import org.eclipse.aether.version.InvalidVersionSpecificationException;
import org.eclipse.aether.version.Version;
import org.eclipse.aether.version.VersionScheme;
/**
* Resolves a version for a plugin.
*
* @since 3.0
* @author Benjamin Bentmann
*/
@Component( role = PluginVersionResolver.class )
public class DefaultPluginVersionResolver
implements PluginVersionResolver
{
private static final String REPOSITORY_CONTEXT = "plugin";
@Requirement
private Logger logger;
@Requirement
private RepositorySystem repositorySystem;
@Requirement
private MetadataReader metadataReader;
@Requirement
private MavenPluginManager pluginManager;
public PluginVersionResult resolve( PluginVersionRequest request )
throws PluginVersionResolutionException
{
logger.debug( "Resolving plugin version for " + request.getGroupId() + ":" + request.getArtifactId() );
PluginVersionResult result = resolveFromProject( request );
if ( result == null )
{
result = resolveFromRepository( request );
if ( logger.isDebugEnabled() )
{
logger.debug( "Resolved plugin version for " + request.getGroupId() + ":" + request.getArtifactId()
+ " to " + result.getVersion() + " from repository " + result.getRepository() );
}
}
else if ( logger.isDebugEnabled() )
{
logger.debug( "Resolved plugin version for " + request.getGroupId() + ":" + request.getArtifactId()
+ " to " + result.getVersion() + " from POM " + request.getPom() );
}
return result;
}
private PluginVersionResult resolveFromRepository( PluginVersionRequest request )
throws PluginVersionResolutionException
{
RequestTrace trace = RequestTrace.newChild( null, request );
DefaultPluginVersionResult result = new DefaultPluginVersionResult();
org.eclipse.aether.metadata.Metadata metadata =
new DefaultMetadata( request.getGroupId(), request.getArtifactId(), "maven-metadata.xml",
DefaultMetadata.Nature.RELEASE_OR_SNAPSHOT );
List<MetadataRequest> requests = new ArrayList<MetadataRequest>();
requests.add( new MetadataRequest( metadata, null, REPOSITORY_CONTEXT ).setTrace( trace ) );
for ( RemoteRepository repository : request.getRepositories() )
{
requests.add( new MetadataRequest( metadata, repository, REPOSITORY_CONTEXT ).setTrace( trace ) );
}
List<MetadataResult> results = repositorySystem.resolveMetadata( request.getRepositorySession(), requests );
Versions versions = new Versions();
for ( MetadataResult res : results )
{
ArtifactRepository repository = res.getRequest().getRepository();
if ( repository == null )
{
repository = request.getRepositorySession().getLocalRepository();
}
mergeMetadata( request.getRepositorySession(), trace, versions, res.getMetadata(), repository );
}
selectVersion( result, request, versions );
return result;
}
private void selectVersion( DefaultPluginVersionResult result, PluginVersionRequest request, Versions versions )
throws PluginVersionResolutionException
{
String version = null;
ArtifactRepository repo = null;
if ( StringUtils.isNotEmpty( versions.releaseVersion ) )
{
version = versions.releaseVersion;
repo = versions.releaseRepository;
}
else if ( StringUtils.isNotEmpty( versions.latestVersion ) )
{
version = versions.latestVersion;
repo = versions.latestRepository;
}
if ( version != null && !isCompatible( request, version ) )
{
versions.versions.remove( version );
version = null;
}
if ( version == null )
{
VersionScheme versionScheme = new GenericVersionScheme();
TreeSet<Version> releases = new TreeSet<Version>( Collections.reverseOrder() );
TreeSet<Version> snapshots = new TreeSet<Version>( Collections.reverseOrder() );
for ( String ver : versions.versions.keySet() )
{
try
{
Version v = versionScheme.parseVersion( ver );
if ( ver.endsWith( "-SNAPSHOT" ) )
{
snapshots.add( v );
}
else
{
releases.add( v );
}
}
catch ( InvalidVersionSpecificationException e )
{
}
}
for ( Version v : releases )
{
String ver = v.toString();
if ( isCompatible( request, ver ) )
{
version = ver;
repo = versions.versions.get( version );
break;
}
}
if ( version == null )
{
for ( Version v : snapshots )
{
String ver = v.toString();
if ( isCompatible( request, ver ) )
{
version = ver;
repo = versions.versions.get( version );
break;
}
}
}
}
if ( version != null )
{
result.setVersion( version );
result.setRepository( repo );
}
else
{
throw new PluginVersionResolutionException( request.getGroupId(), request.getArtifactId(),
request.getRepositorySession().getLocalRepository(),
request.getRepositories(),
"Plugin not found in any plugin repository" );
}
}
private boolean isCompatible( PluginVersionRequest request, String version )
{
Plugin plugin = new Plugin();
plugin.setGroupId( request.getGroupId() );
plugin.setArtifactId( request.getArtifactId() );
plugin.setVersion( version );
PluginDescriptor pluginDescriptor;
try
{
pluginDescriptor =
pluginManager.getPluginDescriptor( plugin, request.getRepositories(), request.getRepositorySession() );
}
catch ( PluginResolutionException e )
{
logger.debug( "Ignoring unresolvable plugin version " + version, e );
return false;
}
catch ( Exception e )
{
// ignore for now and delay failure to higher level processing
return true;
}
try
{
pluginManager.checkRequiredMavenVersion( pluginDescriptor );
}
catch ( Exception e )
{
logger.debug( "Ignoring incompatible plugin version " + version + ": " + e.getMessage() );
return false;
}
return true;
}
private void mergeMetadata( RepositorySystemSession session, RequestTrace trace, Versions versions,
org.eclipse.aether.metadata.Metadata metadata, ArtifactRepository repository )
{
if ( metadata != null && metadata.getFile() != null && metadata.getFile().isFile() )
{
try
{
Map<String, ?> options = Collections.singletonMap( MetadataReader.IS_STRICT, Boolean.FALSE );
Metadata repoMetadata = metadataReader.read( metadata.getFile(), options );
mergeMetadata( versions, repoMetadata, repository );
}
catch ( IOException e )
{
invalidMetadata( session, trace, metadata, repository, e );
}
}
}
private void invalidMetadata( RepositorySystemSession session, RequestTrace trace,
org.eclipse.aether.metadata.Metadata metadata, ArtifactRepository repository,
Exception exception )
{
RepositoryListener listener = session.getRepositoryListener();
if ( listener != null )
{
RepositoryEvent.Builder event = new RepositoryEvent.Builder( session, EventType.METADATA_INVALID );
event.setTrace( trace );
event.setMetadata( metadata );
event.setException( exception );
event.setRepository( repository );
listener.metadataInvalid( event.build() );
}
}
private void mergeMetadata( Versions versions, Metadata source, ArtifactRepository repository )
{
Versioning versioning = source.getVersioning();
if ( versioning != null )
{
String timestamp = StringUtils.clean( versioning.getLastUpdated() );
if ( StringUtils.isNotEmpty( versioning.getRelease() )
&& timestamp.compareTo( versions.releaseTimestamp ) > 0 )
{
versions.releaseVersion = versioning.getRelease();
versions.releaseTimestamp = timestamp;
versions.releaseRepository = repository;
}
if ( StringUtils.isNotEmpty( versioning.getLatest() )
&& timestamp.compareTo( versions.latestTimestamp ) > 0 )
{
versions.latestVersion = versioning.getLatest();
versions.latestTimestamp = timestamp;
versions.latestRepository = repository;
}
for ( String version : versioning.getVersions() )
{
if ( !versions.versions.containsKey( version ) )
{
versions.versions.put( version, repository );
}
}
}
}
private PluginVersionResult resolveFromProject( PluginVersionRequest request )
{
PluginVersionResult result = null;
if ( request.getPom() != null && request.getPom().getBuild() != null )
{
Build build = request.getPom().getBuild();
result = resolveFromProject( request, build.getPlugins() );
if ( result == null && build.getPluginManagement() != null )
{
result = resolveFromProject( request, build.getPluginManagement().getPlugins() );
}
}
return result;
}
private PluginVersionResult resolveFromProject( PluginVersionRequest request, List<Plugin> plugins )
{
for ( Plugin plugin : plugins )
{
if ( request.getGroupId().equals( plugin.getGroupId() )
&& request.getArtifactId().equals( plugin.getArtifactId() ) )
{
if ( plugin.getVersion() != null )
{
return new DefaultPluginVersionResult( plugin.getVersion() );
}
else
{
return null;
}
}
}
return null;
}
static class Versions
{
String releaseVersion = "";
String releaseTimestamp = "";
ArtifactRepository releaseRepository;
String latestVersion = "";
String latestTimestamp = "";
ArtifactRepository latestRepository;
Map<String, ArtifactRepository> versions = new LinkedHashMap<String, ArtifactRepository>();
}
}
| |
/*
* Copyright 2013 Cloudera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kitesdk.data;
import com.google.common.collect.Lists;
import java.net.URI;
import java.util.Collection;
import java.util.List;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.kitesdk.data.spi.URIBuilder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
@SuppressWarnings("unchecked")
public class TestDatasets {
private DatasetRepository repo = null;
private URI repoUri = null;
@Before
public void setupMock() {
this.repo = MockRepositories.newMockRepository();
this.repoUri = repo.getUri();
verify(repo).getUri(); // verify the above getUri() call
}
@Test
public void testCreate() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
.schemaLiteral("\"string\"")
.build();
Dataset<Object> expected = mock(Dataset.class);
when(repo.create("test", descriptor, Object.class)).thenReturn(expected);
Dataset<Object> ds = Datasets.<Object, Dataset<Object>>
create(datasetUri, descriptor, Object.class);
verify(repo).create("test", descriptor, Object.class);
verifyNoMoreInteractions(repo);
verifyNoMoreInteractions(expected);
Assert.assertEquals(expected, ds);
}
@Test(expected=NullPointerException.class)
public void testCreateNullType() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
.schemaLiteral("\"string\"")
.build();
Datasets.<Object, Dataset<Object>> create(datasetUri, descriptor, null);
}
@Test
public void testCreateStringUri() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
.schemaLiteral("\"string\"")
.build();
Dataset<Object> expected = mock(Dataset.class);
when(repo.create("test", descriptor, Object.class)).thenReturn(expected);
Dataset<Object> ds = Datasets.<Object, Dataset<Object>>
create(datasetUri.toString(), descriptor, Object.class);
verify(repo).create("test", descriptor, Object.class);
verifyNoMoreInteractions(repo);
verifyNoMoreInteractions(expected);
Assert.assertEquals(expected, ds);
}
@Test
public void testCreateView() throws Exception {
DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
.schemaUri("resource:schema/user.avsc")
.build();
Dataset<Object> ds = mock(Dataset.class);
when(repo.create("test", descriptor, Object.class)).thenReturn(ds);
when(ds.getDescriptor()).thenReturn(descriptor);
RefinableView<Object> userView = mock(RefinableView.class);
when(ds.with("username", "user1")).thenReturn(userView);
RefinableView<Object> userAndEmailView = mock(RefinableView.class);
when(userView.with("email", "user1@example.com"))
.thenReturn(userAndEmailView);
URI datasetUri = new URIBuilder(repoUri, "test")
.with("username", "user1")
.with("email", "user1@example.com")
.with("ignoredOption", "abc")
.build();
RefinableView<Object> view = Datasets.<Object, RefinableView<Object>>
create(datasetUri, descriptor, Object.class);
verify(repo).create("test", descriptor, Object.class);
verifyNoMoreInteractions(repo);
verify(ds).getDescriptor();
verify(ds).with("username", "user1");
verifyNoMoreInteractions(ds);
verify(userView).with("email", "user1@example.com");
verifyNoMoreInteractions(userView);
verifyNoMoreInteractions(userAndEmailView);
Assert.assertEquals(userAndEmailView, view);
}
@Test
public void testCreateViewStringUri() throws Exception {
DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
.schemaUri("resource:schema/user.avsc")
.build();
Dataset<Object> ds = mock(Dataset.class);
when(repo.create("test", descriptor, Object.class)).thenReturn(ds);
when(ds.getDescriptor()).thenReturn(descriptor);
RefinableView<Object> userView = mock(RefinableView.class);
when(ds.with("username", "user1")).thenReturn(userView);
RefinableView<Object> userAndEmailView = mock(RefinableView.class);
when(userView.with("email", "user1@example.com"))
.thenReturn(userAndEmailView);
URI datasetUri = new URIBuilder(repoUri, "test")
.with("username", "user1")
.with("email", "user1@example.com")
.with("ignoredOption", "abc")
.build();
RefinableView<Object> view = Datasets.<Object, RefinableView<Object>>
create(datasetUri.toString(), descriptor, Object.class);
verify(repo).create("test", descriptor, Object.class);
verifyNoMoreInteractions(repo);
verify(ds).getDescriptor();
verify(ds).with("username", "user1");
verifyNoMoreInteractions(ds);
verify(userView).with("email", "user1@example.com");
verifyNoMoreInteractions(userView);
verifyNoMoreInteractions(userAndEmailView);
Assert.assertEquals(userAndEmailView, view);
}
@Test
public void testLoad() {
Dataset<Object> expected = mock(Dataset.class);
when(repo.load("test", Object.class)).thenReturn(expected);
URI datasetUri = new URIBuilder(repoUri, "test").build();
Dataset<Object> ds = Datasets.<Object, Dataset<Object>>
load(datasetUri, Object.class);
verify(repo).load("test", Object.class);
verifyNoMoreInteractions(repo);
verifyNoMoreInteractions(expected);
Assert.assertEquals(expected, ds);
}
@Test(expected=NullPointerException.class)
public void testLoadNullType() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
Datasets.<Object, Dataset<Object>> load(datasetUri, null);
}
@Test
public void testLoadStringUri() {
Dataset<Object> expected = mock(Dataset.class);
when(repo.load("test", Object.class)).thenReturn(expected);
URI datasetUri = new URIBuilder(repoUri, "test").build();
Dataset<Object> ds = Datasets.<Object, Dataset<Object>>
load(datasetUri, Object.class);
verify(repo).load("test", Object.class);
verifyNoMoreInteractions(repo);
verifyNoMoreInteractions(expected);
Assert.assertEquals(expected, ds);
}
@Test
public void testLoadView() throws Exception {
DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
.schemaUri("resource:schema/user.avsc")
.build();
Dataset<Object> ds = mock(Dataset.class);
when(repo.load("test", Object.class)).thenReturn(ds);
when(ds.getDescriptor()).thenReturn(descriptor);
RefinableView<Object> userView = mock(RefinableView.class);
when(ds.with("username", "user1")).thenReturn(userView);
RefinableView<Object> userAndEmailView = mock(RefinableView.class);
when(userView.with("email", "user1@example.com"))
.thenReturn(userAndEmailView);
URI datasetUri = new URIBuilder(repoUri, "test")
.with("username", "user1")
.with("email", "user1@example.com")
.with("ignoredOption", "abc")
.build();
RefinableView<Object> view = Datasets.<Object, RefinableView<Object>>
load(datasetUri, Object.class);
verify(repo).load("test", Object.class);
verifyNoMoreInteractions(repo);
verify(ds).getDescriptor();
verify(ds).with("username", "user1");
verifyNoMoreInteractions(ds);
verify(userView).with("email", "user1@example.com");
verifyNoMoreInteractions(userView);
verifyNoMoreInteractions(userAndEmailView);
Assert.assertEquals(userAndEmailView, view);
}
@Test
public void testLoadViewStringUri() throws Exception {
DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
.schemaUri("resource:schema/user.avsc")
.build();
Dataset<Object> ds = mock(Dataset.class);
when(repo.load("test", Object.class)).thenReturn(ds);
when(ds.getDescriptor()).thenReturn(descriptor);
RefinableView<Object> userView = mock(RefinableView.class);
when(ds.with("username", "user1")).thenReturn(userView);
RefinableView<Object> userAndEmailView = mock(RefinableView.class);
when(userView.with("email", "user1@example.com"))
.thenReturn(userAndEmailView);
URI datasetUri = new URIBuilder(repoUri, "test")
.with("username", "user1")
.with("email", "user1@example.com")
.with("ignoredOption", "abc")
.build();
RefinableView<Object> view = Datasets.<Object, RefinableView<Object>>
load(datasetUri.toString(), Object.class);
verify(repo).load("test", Object.class);
verifyNoMoreInteractions(repo);
verify(ds).getDescriptor();
verify(ds).with("username", "user1");
verifyNoMoreInteractions(ds);
verify(userView).with("email", "user1@example.com");
verifyNoMoreInteractions(userView);
verifyNoMoreInteractions(userAndEmailView);
Assert.assertEquals(userAndEmailView, view);
}
@Test
public void testDelete() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
Datasets.delete(datasetUri);
verify(repo).delete("test");
verifyNoMoreInteractions(repo);
}
@Test
public void testDeleteStringUri() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
Datasets.delete(datasetUri.toString());
verify(repo).delete("test");
verifyNoMoreInteractions(repo);
}
@Test
public void testDeleteRejectsViewUri() {
final URI datasetUri = new URIBuilder(repoUri, "test")
.with("field", 34)
.build();
TestHelpers.assertThrows("Should reject view URI",
IllegalArgumentException.class, new Runnable() {
@Override
public void run() {
Datasets.delete(datasetUri);
}
});
verifyNoMoreInteractions(repo);
}
@Test
public void testExists() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
when(repo.exists("test")).thenReturn(true);
Assert.assertTrue(Datasets.exists(datasetUri));
verify(repo).exists("test");
verifyNoMoreInteractions(repo);
}
@Test
public void testExistsStringUri() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
when(repo.exists("test")).thenReturn(false);
Assert.assertFalse(Datasets.exists(datasetUri.toString()));
verify(repo).exists("test");
verifyNoMoreInteractions(repo);
}
@Test
public void testExistsRejectsViewUri() {
final URI datasetUri = new URIBuilder(repoUri, "test")
.with("field", 34)
.build();
TestHelpers.assertThrows("Should reject view URI",
IllegalArgumentException.class, new Runnable() {
@Override
public void run() {
Datasets.exists(datasetUri);
}
});
verifyNoMoreInteractions(repo);
}
@Test
public void testUpdate() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
.schemaLiteral("\"string\"")
.build();
Dataset<Object> expected = mock(Dataset.class);
when(repo.update("test", descriptor, Object.class)).thenReturn(expected);
Dataset<Object> ds = Datasets.<Object, Dataset<Object>>
update(datasetUri, descriptor, Object.class);
verify(repo).update("test", descriptor, Object.class);
verifyNoMoreInteractions(repo);
verifyNoMoreInteractions(expected);
Assert.assertEquals(expected, ds);
}
@Test(expected=NullPointerException.class)
public void testUpdateNullType() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
.schemaLiteral("\"string\"")
.build();
Datasets.<Object, Dataset<Object>> update(datasetUri, descriptor, null);
}
@Test
public void testUpdateStringUri() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
.schemaLiteral("\"string\"")
.build();
Dataset<Object> expected = mock(Dataset.class);
when(repo.update("test", descriptor, Object.class)).thenReturn(expected);
Dataset<Object> ds = Datasets.<Object, Dataset<Object>>
update(datasetUri.toString(), descriptor, Object.class);
verify(repo).update("test", descriptor, Object.class);
verifyNoMoreInteractions(repo);
verifyNoMoreInteractions(expected);
Assert.assertEquals(expected, ds);
}
@Test
public void testUpdateRejectsViewUri() {
final URI datasetUri = new URIBuilder(repoUri, "test")
.with("field", 34)
.build();
final DatasetDescriptor descriptor = new DatasetDescriptor.Builder()
.schemaLiteral("\"string\"")
.build();
TestHelpers.assertThrows("Should reject view URI",
IllegalArgumentException.class, new Runnable() {
@Override
public void run() {
Datasets.<Object, Dataset<Object>>update(datasetUri, descriptor, Object.class);
}
});
verifyNoMoreInteractions(repo);
}
@Test
public void testListRejectsDatasetUri() {
TestHelpers.assertThrows("Should reject dataset URI",
IllegalArgumentException.class, new Runnable() {
@Override
public void run() {
Datasets.list(new URIBuilder(repoUri, "test").build());
}
});
verifyNoMoreInteractions(repo);
}
@Test
public void testListRejectsViewUri() {
TestHelpers.assertThrows("Should reject dataset URI",
IllegalArgumentException.class, new Runnable() {
@Override
public void run() {
Datasets.list(new URIBuilder(repoUri, "test")
.with("field", 34)
.build());
}
});
verifyNoMoreInteractions(repo);
}
@Test
public void testListRepoUri() {
when(repo.list()).thenReturn(Lists.newArrayList("a", "b", "c"));
List<URI> expected = Lists.newArrayList(
new URIBuilder(repoUri, "a").build(),
new URIBuilder(repoUri, "b").build(),
new URIBuilder(repoUri, "c").build()
);
Collection<URI> datasetUris = Datasets.list(repoUri);
verify(repo, times(2)).getUri(); // called in @Before and Datasets.list
verify(repo).list();
verifyNoMoreInteractions(repo);
Assert.assertEquals(expected, datasetUris);
}
@Test
public void testListStringRepoUri() {
when(repo.list()).thenReturn(Lists.newArrayList("a", "b", "c"));
List<URI> expected = Lists.newArrayList(
new URIBuilder(repoUri, "a").build(),
new URIBuilder(repoUri, "b").build(),
new URIBuilder(repoUri, "c").build()
);
Collection<URI> datasetUris = Datasets.list(repoUri.toString());
verify(repo, times(2)).getUri(); // called in @Before and Datasets.list
verify(repo).list();
verifyNoMoreInteractions(repo);
Assert.assertEquals(expected, datasetUris);
}
@Test
public void testRepositoryFor() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
Assert.assertEquals(repo, Datasets.repositoryFor(datasetUri));
verifyNoMoreInteractions(repo);
}
@Test
public void testRepositoryForStringUri() {
URI datasetUri = new URIBuilder(repoUri, "test").build();
Assert.assertEquals(repo, Datasets.repositoryFor(datasetUri.toString()));
verifyNoMoreInteractions(repo);
}
@Test
public void testRepositoryForView() {
URI datasetUri = new URIBuilder(repoUri, "test")
.with("field", 34)
.build();
Assert.assertEquals(repo, Datasets.repositoryFor(datasetUri));
verifyNoMoreInteractions(repo);
}
@Test
public void testRepositoryForViewStringUri() {
URI datasetUri = new URIBuilder(repoUri, "test")
.with("field", 34)
.build();
Assert.assertEquals(repo, Datasets.repositoryFor(datasetUri.toString()));
verifyNoMoreInteractions(repo);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.io.BufferedOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FilterOutputStream;
import java.io.OutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.security.DigestOutputStream;
import java.security.MessageDigest;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicLong;
import com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.protobuf.CodedOutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.server.common.FileRegion;
import org.apache.hadoop.hdfs.server.common.blockaliasmap.BlockAliasMap;
import org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.SectionName;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.CacheManagerSection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.FilesUnderConstructionSection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeDirectorySection.DirEntry;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection.INode;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.NameSystemSection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SecretManagerSection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.StringTableSection;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressorStream;
import org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import static org.apache.hadoop.hdfs.server.namenode.FSImageUtil.MAGIC_HEADER;
/**
* Utility crawling an existing hierarchical FileSystem and emitting
* a valid FSImage/NN storage.
*/
// TODO: generalize to types beyond FileRegion
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class ImageWriter implements Closeable {
private static final int ONDISK_VERSION = 1;
private static final int LAYOUT_VERSION =
NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION;
private final Path outdir;
private final FileSystem outfs;
private final File dirsTmp;
private final OutputStream dirs;
private final File inodesTmp;
private final OutputStream inodes;
private final MessageDigest digest;
private final FSImageCompression compress;
private final long startBlock;
private final long startInode;
private final UGIResolver ugis;
private final BlockAliasMap.Writer<FileRegion> blocks;
private final BlockResolver blockIds;
private final Map<Long, DirEntry.Builder> dircache;
private final TrackedOutputStream<DigestOutputStream> raw;
private boolean closed = false;
private long curSec;
private long curBlock;
private final AtomicLong curInode;
private final FileSummary.Builder summary = FileSummary.newBuilder()
.setOndiskVersion(ONDISK_VERSION)
.setLayoutVersion(LAYOUT_VERSION);
private final String blockPoolID;
public static Options defaults() {
return new Options();
}
@SuppressWarnings("unchecked")
public ImageWriter(Options opts) throws IOException {
final OutputStream out;
if (null == opts.outStream) {
FileSystem fs = opts.outdir.getFileSystem(opts.getConf());
outfs = (fs instanceof LocalFileSystem)
? ((LocalFileSystem)fs).getRaw()
: fs;
Path tmp = opts.outdir;
if (!outfs.mkdirs(tmp)) {
throw new IOException("Failed to create output dir: " + tmp);
}
try (NNStorage stor = new NNStorage(opts.getConf(),
Arrays.asList(tmp.toUri()), Arrays.asList(tmp.toUri()))) {
NamespaceInfo info = NNStorage.newNamespaceInfo();
if (info.getLayoutVersion() != LAYOUT_VERSION) {
throw new IllegalStateException("Incompatible layout " +
info.getLayoutVersion() + " (expected " + LAYOUT_VERSION + ")");
}
// set the cluster id, if given
if (opts.clusterID.length() > 0) {
info.setClusterID(opts.clusterID);
}
// if block pool id is given
if (opts.blockPoolID.length() > 0) {
info.setBlockPoolID(opts.blockPoolID);
}
stor.format(info);
blockPoolID = info.getBlockPoolID();
}
outdir = new Path(tmp, "current");
out = outfs.create(new Path(outdir, "fsimage_0000000000000000000"));
} else {
outdir = null;
outfs = null;
out = opts.outStream;
blockPoolID = "";
}
digest = MD5Hash.getDigester();
raw = new TrackedOutputStream<>(new DigestOutputStream(
new BufferedOutputStream(out), digest));
compress = opts.compress;
CompressionCodec codec = compress.getImageCodec();
if (codec != null) {
summary.setCodec(codec.getClass().getCanonicalName());
}
startBlock = opts.startBlock;
curBlock = startBlock;
startInode = opts.startInode;
curInode = new AtomicLong(startInode);
dircache = Collections.synchronizedMap(new DirEntryCache(opts.maxdircache));
ugis = null == opts.ugis
? ReflectionUtils.newInstance(opts.ugisClass, opts.getConf())
: opts.ugis;
BlockAliasMap<FileRegion> fmt = null == opts.blocks
? ReflectionUtils.newInstance(opts.aliasMap, opts.getConf())
: opts.blocks;
blocks = fmt.getWriter(null, blockPoolID);
blockIds = null == opts.blockIds
? ReflectionUtils.newInstance(opts.blockIdsClass, opts.getConf())
: opts.blockIds;
// create directory and inode sections as side-files.
// The details are written to files to avoid keeping them in memory.
FileOutputStream dirsTmpStream = null;
try {
dirsTmp = File.createTempFile("fsimg_dir", null);
dirsTmp.deleteOnExit();
dirsTmpStream = new FileOutputStream(dirsTmp);
dirs = beginSection(dirsTmpStream);
} catch (IOException e) {
IOUtils.cleanupWithLogger(null, raw, dirsTmpStream);
throw e;
}
try {
inodesTmp = File.createTempFile("fsimg_inode", null);
inodesTmp.deleteOnExit();
inodes = new FileOutputStream(inodesTmp);
} catch (IOException e) {
IOUtils.cleanupWithLogger(null, raw, dirsTmpStream, dirs);
throw e;
}
raw.write(MAGIC_HEADER);
curSec = raw.pos;
assert raw.pos == MAGIC_HEADER.length;
}
public void accept(TreePath e) throws IOException {
assert e.getParentId() < curInode.get();
// allocate ID
long id = curInode.getAndIncrement();
e.accept(id);
assert e.getId() < curInode.get();
INode n = e.toINode(ugis, blockIds, blocks);
writeInode(n);
if (e.getParentId() > 0) {
// add DirEntry to map, which may page out entries
DirEntry.Builder de = DirEntry.newBuilder()
.setParent(e.getParentId())
.addChildren(e.getId());
dircache.put(e.getParentId(), de);
}
}
@SuppressWarnings("serial")
class DirEntryCache extends LinkedHashMap<Long, DirEntry.Builder> {
// should cache path to root, not evict LRCached
private final int nEntries;
DirEntryCache(int nEntries) {
this.nEntries = nEntries;
}
@Override
public DirEntry.Builder put(Long p, DirEntry.Builder b) {
DirEntry.Builder e = get(p);
if (null == e) {
return super.put(p, b);
}
// merge
e.addAllChildren(b.getChildrenList());
// not strictly conforming
return e;
}
@Override
protected boolean removeEldestEntry(Entry<Long, DirEntry.Builder> be) {
if (size() > nEntries) {
DirEntry d = be.getValue().build();
try {
writeDirEntry(d);
} catch (IOException e) {
throw new RuntimeException(e);
}
return true;
}
return false;
}
}
synchronized void writeInode(INode n) throws IOException {
n.writeDelimitedTo(inodes);
}
synchronized void writeDirEntry(DirEntry e) throws IOException {
e.writeDelimitedTo(dirs);
}
private static int getOndiskSize(GeneratedMessageV3 s) {
return CodedOutputStream.computeUInt32SizeNoTag(s.getSerializedSize())
+ s.getSerializedSize();
}
@Override
public synchronized void close() throws IOException {
if (closed) {
return;
}
for (DirEntry.Builder b : dircache.values()) {
DirEntry e = b.build();
writeDirEntry(e);
}
dircache.clear();
// close side files
IOUtils.cleanupWithLogger(null, dirs, inodes, blocks);
if (null == dirs || null == inodes) {
// init failed
if (raw != null) {
raw.close();
}
return;
}
try {
writeNameSystemSection();
writeINodeSection();
writeDirSection();
writeStringTableSection();
// write summary directly to raw
FileSummary s = summary.build();
s.writeDelimitedTo(raw);
int length = getOndiskSize(s);
byte[] lengthBytes = new byte[4];
ByteBuffer.wrap(lengthBytes).asIntBuffer().put(length);
raw.write(lengthBytes);
} finally {
raw.close();
}
writeMD5("fsimage_0000000000000000000");
closed = true;
}
/**
* Write checksum for image file. Pulled from MD5Utils/internals. Awkward to
* reuse existing tools/utils.
*/
void writeMD5(String imagename) throws IOException {
if (null == outdir) {
return;
}
MD5Hash md5 = new MD5Hash(digest.digest());
String digestString = StringUtils.byteToHexString(md5.getDigest());
Path chk = new Path(outdir, imagename + ".md5");
try (OutputStream out = outfs.create(chk)) {
String md5Line = digestString + " *" + imagename + "\n";
out.write(md5Line.getBytes(Charsets.UTF_8));
}
}
OutputStream beginSection(OutputStream out) throws IOException {
CompressionCodec codec = compress.getImageCodec();
if (null == codec) {
return out;
}
return codec.createOutputStream(out);
}
void endSection(OutputStream out, SectionName name) throws IOException {
CompressionCodec codec = compress.getImageCodec();
if (codec != null) {
((CompressorStream)out).finish();
}
out.flush();
long length = raw.pos - curSec;
summary.addSections(FileSummary.Section.newBuilder()
.setName(name.toString()) // not strictly correct, but name not visible
.setOffset(curSec).setLength(length));
curSec += length;
}
void writeNameSystemSection() throws IOException {
NameSystemSection.Builder b = NameSystemSection.newBuilder()
.setGenstampV1(1000)
.setGenstampV1Limit(0)
.setGenstampV2(1001)
.setLastAllocatedBlockId(blockIds.lastId())
.setTransactionId(0);
NameSystemSection s = b.build();
OutputStream sec = beginSection(raw);
s.writeDelimitedTo(sec);
endSection(sec, SectionName.NS_INFO);
}
void writeINodeSection() throws IOException {
// could reset dict to avoid compression cost in close
INodeSection.Builder b = INodeSection.newBuilder()
.setNumInodes(curInode.get() - startInode)
.setLastInodeId(curInode.get());
INodeSection s = b.build();
OutputStream sec = beginSection(raw);
s.writeDelimitedTo(sec);
// copy inodes
try (FileInputStream in = new FileInputStream(inodesTmp)) {
IOUtils.copyBytes(in, sec, 4096, false);
}
endSection(sec, SectionName.INODE);
}
void writeDirSection() throws IOException {
// No header, so dirs can be written/compressed independently
OutputStream sec = raw;
// copy dirs
try (FileInputStream in = new FileInputStream(dirsTmp)) {
IOUtils.copyBytes(in, sec, 4096, false);
}
endSection(sec, SectionName.INODE_DIR);
}
void writeFilesUCSection() throws IOException {
FilesUnderConstructionSection.Builder b =
FilesUnderConstructionSection.newBuilder();
FilesUnderConstructionSection s = b.build();
OutputStream sec = beginSection(raw);
s.writeDelimitedTo(sec);
endSection(sec, SectionName.FILES_UNDERCONSTRUCTION);
}
void writeSnapshotDiffSection() throws IOException {
SnapshotDiffSection.Builder b = SnapshotDiffSection.newBuilder();
SnapshotDiffSection s = b.build();
OutputStream sec = beginSection(raw);
s.writeDelimitedTo(sec);
endSection(sec, SectionName.SNAPSHOT_DIFF);
}
void writeSecretManagerSection() throws IOException {
SecretManagerSection.Builder b = SecretManagerSection.newBuilder()
.setCurrentId(0)
.setTokenSequenceNumber(0);
SecretManagerSection s = b.build();
OutputStream sec = beginSection(raw);
s.writeDelimitedTo(sec);
endSection(sec, SectionName.SECRET_MANAGER);
}
void writeCacheManagerSection() throws IOException {
CacheManagerSection.Builder b = CacheManagerSection.newBuilder()
.setNumPools(0)
.setNumDirectives(0)
.setNextDirectiveId(1);
CacheManagerSection s = b.build();
OutputStream sec = beginSection(raw);
s.writeDelimitedTo(sec);
endSection(sec, SectionName.CACHE_MANAGER);
}
void writeStringTableSection() throws IOException {
StringTableSection.Builder b = StringTableSection.newBuilder();
Map<Integer, String> u = ugis.ugiMap();
b.setNumEntry(u.size());
StringTableSection s = b.build();
OutputStream sec = beginSection(raw);
s.writeDelimitedTo(sec);
for (Map.Entry<Integer, String> e : u.entrySet()) {
StringTableSection.Entry.Builder x =
StringTableSection.Entry.newBuilder()
.setId(e.getKey())
.setStr(e.getValue());
x.build().writeDelimitedTo(sec);
}
endSection(sec, SectionName.STRING_TABLE);
}
@Override
public synchronized String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{ codec=\"").append(compress.getImageCodec());
sb.append("\", startBlock=").append(startBlock);
sb.append(", curBlock=").append(curBlock);
sb.append(", startInode=").append(startInode);
sb.append(", curInode=").append(curInode);
sb.append(", ugi=").append(ugis);
sb.append(", blockIds=").append(blockIds);
sb.append(", offset=").append(raw.pos);
sb.append(" }");
return sb.toString();
}
static class TrackedOutputStream<T extends OutputStream>
extends FilterOutputStream {
private long pos = 0L;
TrackedOutputStream(T out) {
super(out);
}
@SuppressWarnings("unchecked")
public T getInner() {
return (T) out;
}
@Override
public void write(int b) throws IOException {
out.write(b);
++pos;
}
@Override
public void write(byte[] b) throws IOException {
write(b, 0, b.length);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
out.write(b, off, len);
pos += len;
}
@Override
public void flush() throws IOException {
super.flush();
}
@Override
public void close() throws IOException {
super.close();
}
}
/**
* Configurable options for image generation mapping pluggable components.
*/
public static class Options implements Configurable {
public static final String START_INODE = "hdfs.image.writer.start.inode";
public static final String CACHE_ENTRY = "hdfs.image.writer.cache.entries";
public static final String UGI_CLASS = "hdfs.image.writer.ugi.class";
public static final String BLOCK_RESOLVER_CLASS =
"hdfs.image.writer.blockresolver.class";
private Path outdir;
private Configuration conf;
private OutputStream outStream;
private int maxdircache;
private long startBlock;
private long startInode;
private UGIResolver ugis;
private Class<? extends UGIResolver> ugisClass;
private BlockAliasMap<FileRegion> blocks;
private String clusterID;
private String blockPoolID;
@SuppressWarnings("rawtypes")
private Class<? extends BlockAliasMap> aliasMap;
private BlockResolver blockIds;
private Class<? extends BlockResolver> blockIdsClass;
private FSImageCompression compress =
FSImageCompression.createNoopCompression();
protected Options() {
}
@Override
public void setConf(Configuration conf) {
this.conf = conf;
String def = new File("hdfs/name").toURI().toString();
outdir = new Path(conf.get(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, def));
startBlock = conf.getLong(FixedBlockResolver.START_BLOCK, (1L << 30) + 1);
startInode = conf.getLong(START_INODE, (1L << 14) + 1);
maxdircache = conf.getInt(CACHE_ENTRY, 100);
ugisClass = conf.getClass(UGI_CLASS,
SingleUGIResolver.class, UGIResolver.class);
aliasMap = conf.getClass(
DFSConfigKeys.DFS_PROVIDED_ALIASMAP_CLASS,
NullBlockAliasMap.class, BlockAliasMap.class);
blockIdsClass = conf.getClass(BLOCK_RESOLVER_CLASS,
FixedBlockResolver.class, BlockResolver.class);
clusterID = "";
blockPoolID = "";
}
@Override
public Configuration getConf() {
return conf;
}
public Options output(String out) {
this.outdir = new Path(out);
return this;
}
public Options outStream(OutputStream outStream) {
this.outStream = outStream;
return this;
}
public Options codec(String codec) throws IOException {
this.compress = FSImageCompression.createCompression(getConf(), codec);
return this;
}
public Options cache(int nDirEntries) {
this.maxdircache = nDirEntries;
return this;
}
public Options ugi(UGIResolver ugis) {
this.ugis = ugis;
return this;
}
public Options ugi(Class<? extends UGIResolver> ugisClass) {
this.ugisClass = ugisClass;
return this;
}
public Options blockIds(BlockResolver blockIds) {
this.blockIds = blockIds;
return this;
}
public Options blockIds(Class<? extends BlockResolver> blockIdsClass) {
this.blockIdsClass = blockIdsClass;
return this;
}
public Options blocks(BlockAliasMap<FileRegion> blocks) {
this.blocks = blocks;
return this;
}
@SuppressWarnings("rawtypes")
public Options blocks(Class<? extends BlockAliasMap> blocksClass) {
this.aliasMap = blocksClass;
return this;
}
public Options clusterID(String clusterID) {
this.clusterID = clusterID;
return this;
}
public Options blockPoolID(String blockPoolID) {
this.blockPoolID = blockPoolID;
return this;
}
}
}
| |
/*
* Copyright 2017 Robert Winkler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.swagger2markup.internal.component;
import io.github.swagger2markup.Swagger2MarkupConfig;
import io.github.swagger2markup.Swagger2MarkupConverter;
import io.github.swagger2markup.assertions.DiffUtils;
import io.github.swagger2markup.builder.Swagger2MarkupConfigBuilder;
import io.github.swagger2markup.internal.resolver.DefinitionDocumentResolverFromOperation;
import io.github.swagger2markup.internal.resolver.SecurityDocumentResolver;
import io.github.swagger2markup.internal.utils.PathUtils;
import io.github.swagger2markup.markup.builder.MarkupDocBuilder;
import io.github.swagger2markup.model.SwaggerPathOperation;
import io.swagger.models.Swagger;
import org.apache.commons.io.FileUtils;
import org.junit.Test;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class PathOperationComponentTest extends AbstractComponentTest {
@Test
public void testPathOperationComponent() throws URISyntaxException {
String COMPONENT_NAME = "path_operation";
Path outputDirectory = getOutputFile(COMPONENT_NAME);
FileUtils.deleteQuietly(outputDirectory.toFile());
//Given
Path file = Paths.get(PathOperationComponentTest.class.getResource("/yaml/swagger_petstore.yaml").toURI());
Swagger2MarkupConverter converter = Swagger2MarkupConverter.from(file).build();
Swagger swagger = converter.getContext().getSchema();
io.swagger.models.Path path = swagger.getPaths().get("/pets");
List<SwaggerPathOperation> pathOperations = PathUtils.toPathOperationsList("/pets", path);
Swagger2MarkupConverter.SwaggerContext context = converter.getContext();
MarkupDocBuilder markupDocBuilder = context.createMarkupDocBuilder();
//When
markupDocBuilder = new PathOperationComponent(context,
new DefinitionDocumentResolverFromOperation(context),
new SecurityDocumentResolver(context)).
apply(markupDocBuilder, PathOperationComponent.parameters(pathOperations.get(0)));
markupDocBuilder.writeToFileWithoutExtension(outputDirectory, StandardCharsets.UTF_8);
//Then
Path expectedFile = getExpectedFile(COMPONENT_NAME);
DiffUtils.assertThatFileIsEqual(expectedFile, outputDirectory, getReportName(COMPONENT_NAME));
}
@Test
public void testInlineSchema() throws URISyntaxException {
String COMPONENT_NAME = "path_operation_inline_schema";
Path outputDirectory = getOutputFile(COMPONENT_NAME);
FileUtils.deleteQuietly(outputDirectory.toFile());
//Given
Path file = Paths.get(PathOperationComponentTest.class.getResource("/yaml/swagger_inlineSchema.yaml").toURI());
Swagger2MarkupConverter converter = Swagger2MarkupConverter.from(file).build();
Swagger swagger = converter.getContext().getSchema();
io.swagger.models.Path path = swagger.getPaths().get("/LaunchCommand");
List<SwaggerPathOperation> pathOperations = PathUtils.toPathOperationsList("/LaunchCommand", path);
Swagger2MarkupConverter.SwaggerContext context = converter.getContext();
MarkupDocBuilder markupDocBuilder = context.createMarkupDocBuilder();
//When
markupDocBuilder = new PathOperationComponent(context,
new DefinitionDocumentResolverFromOperation(context),
new SecurityDocumentResolver(context)).
apply(markupDocBuilder, PathOperationComponent.parameters(pathOperations.get(0)));
markupDocBuilder.writeToFileWithoutExtension(outputDirectory, StandardCharsets.UTF_8);
//Then
Path expectedFile = getExpectedFile(COMPONENT_NAME);
DiffUtils.assertThatFileIsEqual(expectedFile, outputDirectory, getReportName(COMPONENT_NAME));
}
@Test
public void testWithPathParamExample() throws URISyntaxException {
String COMPONENT_NAME = "path_operation_with_path_param_example";
Path outputDirectory = getOutputFile(COMPONENT_NAME);
FileUtils.deleteQuietly(outputDirectory.toFile());
Map<String, String> configMap = new HashMap<>();
configMap.put("swagger2markup.generatedExamplesEnabled", "true"); // enable example
//Given
Path file = Paths.get(PathOperationComponentTest.class.getResource("/yaml/swagger_petstore.yaml").toURI());
Swagger2MarkupConfig config = new Swagger2MarkupConfigBuilder(configMap).build();
Swagger2MarkupConverter converter = Swagger2MarkupConverter.from(file).withConfig(config) .build();
Swagger swagger = converter.getContext().getSchema();
io.swagger.models.Path path = swagger.getPaths().get("/pets/{petId}");
List<SwaggerPathOperation> pathOperations = PathUtils.toPathOperationsList("/pets/{petId}", path);
Swagger2MarkupConverter.SwaggerContext context = converter.getContext();
MarkupDocBuilder markupDocBuilder = context.createMarkupDocBuilder();
//When
markupDocBuilder = new PathOperationComponent(context,
new DefinitionDocumentResolverFromOperation(context),
new SecurityDocumentResolver(context)).
apply(markupDocBuilder, PathOperationComponent.parameters(pathOperations.get(0)));
markupDocBuilder.writeToFileWithoutExtension(outputDirectory, StandardCharsets.UTF_8);
//Then
Path expectedFile = getExpectedFile(COMPONENT_NAME);
DiffUtils.assertThatFileIsEqual(expectedFile, outputDirectory, getReportName(COMPONENT_NAME));
}
@Test
public void testWithQueryParamExample() throws URISyntaxException {
String COMPONENT_NAME = "path_operation_with_query_param_example";
Path outputDirectory = getOutputFile(COMPONENT_NAME);
FileUtils.deleteQuietly(outputDirectory.toFile());
Map<String, String> configMap = new HashMap<>();
configMap.put("swagger2markup.generatedExamplesEnabled", "true"); // enable example
//Given
Path file = Paths.get(PathOperationComponentTest.class.getResource("/yaml/swagger_petstore.yaml").toURI());
Swagger2MarkupConfig config = new Swagger2MarkupConfigBuilder(configMap).build();
Swagger2MarkupConverter converter = Swagger2MarkupConverter.from(file).withConfig(config) .build();
Swagger swagger = converter.getContext().getSchema();
io.swagger.models.Path path = swagger.getPaths().get("/pets/findByTags");
List<SwaggerPathOperation> pathOperations = PathUtils.toPathOperationsList("/pets/findByTags", path);
Swagger2MarkupConverter.SwaggerContext context = converter.getContext();
MarkupDocBuilder markupDocBuilder = context.createMarkupDocBuilder();
//When
markupDocBuilder = new PathOperationComponent(context,
new DefinitionDocumentResolverFromOperation(context),
new SecurityDocumentResolver(context)).
apply(markupDocBuilder, PathOperationComponent.parameters(pathOperations.get(0)));
markupDocBuilder.writeToFileWithoutExtension(outputDirectory, StandardCharsets.UTF_8);
//Then
Path expectedFile = getExpectedFile(COMPONENT_NAME);
DiffUtils.assertThatFileIsEqual(expectedFile, outputDirectory, getReportName(COMPONENT_NAME));
}
@Test
public void testWithBodyParamExample() throws URISyntaxException {
String COMPONENT_NAME = "path_operation_with_body_param_example";
Path outputDirectory = getOutputFile(COMPONENT_NAME);
FileUtils.deleteQuietly(outputDirectory.toFile());
Map<String, String> configMap = new HashMap<>();
configMap.put("swagger2markup.generatedExamplesEnabled", "true"); // enable example
//Given
Path file = Paths.get(PathOperationComponentTest.class.getResource("/yaml/swagger_petstore_body_examples.yaml").toURI());
Swagger2MarkupConfig config = new Swagger2MarkupConfigBuilder(configMap).build();
Swagger2MarkupConverter converter = Swagger2MarkupConverter.from(file).withConfig(config) .build();
Swagger swagger = converter.getContext().getSchema();
io.swagger.models.Path path = swagger.getPaths().get("/users");
List<SwaggerPathOperation> pathOperations = PathUtils.toPathOperationsList("/users", path);
Swagger2MarkupConverter.SwaggerContext context = converter.getContext();
MarkupDocBuilder markupDocBuilder = context.createMarkupDocBuilder();
//When
markupDocBuilder = new PathOperationComponent(context,
new DefinitionDocumentResolverFromOperation(context),
new SecurityDocumentResolver(context)).
apply(markupDocBuilder, PathOperationComponent.parameters(pathOperations.get(0)));
markupDocBuilder.writeToFileWithoutExtension(outputDirectory, StandardCharsets.UTF_8);
//Then
Path expectedFile = getExpectedFile(COMPONENT_NAME);
DiffUtils.assertThatFileIsEqual(expectedFile, outputDirectory, getReportName(COMPONENT_NAME));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jbpm;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.impl.DefaultProducer;
import org.apache.camel.util.ExchangeHelper;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.manager.RuntimeEngine;
import org.kie.api.runtime.process.ProcessInstance;
import org.kie.api.task.TaskService;
import org.kie.api.task.model.Attachment;
import org.kie.api.task.model.Content;
import org.kie.api.task.model.OrganizationalEntity;
import org.kie.api.task.model.Status;
import org.kie.api.task.model.Task;
import org.kie.api.task.model.TaskSummary;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JBPMProducer extends DefaultProducer {
private static final transient Logger LOGGER = LoggerFactory.getLogger(JBPMProducer.class);
private KieSession kieSession;
private TaskService taskService;
private JBPMConfiguration configuration;
private RuntimeEngine runtimeEngine;
public JBPMProducer(JBPMEndpoint endpoint, RuntimeEngine runtimeEngine) {
super(endpoint);
this.configuration = endpoint.getConfiguration();
this.runtimeEngine = runtimeEngine;
}
@Override
protected void doStart() throws Exception {
LOGGER.trace("starting producer");
kieSession = runtimeEngine.getKieSession();
taskService = runtimeEngine.getTaskService();
super.doStart();
LOGGER.trace("started producer");
}
@Override
protected void doStop() throws Exception {
super.doStop();
if (kieSession != null) {
kieSession = null;
}
if (taskService != null) {
taskService = null;
}
}
public void process(Exchange exchange) throws Exception {
getOperation(exchange).execute(kieSession, taskService, configuration, exchange);
}
Operation getOperation(Exchange exchange) {
String operation = exchange.getIn().getHeader(JBPMConstants.OPERATION, String.class);
if (operation == null && configuration.getOperation() != null) {
operation = JBPMConstants.OPERATION + configuration.getOperation();
}
if (operation == null) {
operation = JBPMConstants.OPERATION + Operation.startProcess;
}
LOGGER.trace("Operation: [{}]", operation);
return Operation.valueOf(operation.substring(JBPMConstants.OPERATION.length()));
}
enum Operation {
//PROCESS OPERATIONS
startProcess {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
ProcessInstance processInstance = kieSession.startProcess(getProcessId(configuration, exchange), getParameters(configuration, exchange));
setResult(exchange, processInstance);
}
}, abortProcessInstance {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
kieSession.abortProcessInstance(safe(getProcessInstanceId(configuration, exchange)));
}
}, signalEvent {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
Long processInstanceId = getProcessInstanceId(configuration, exchange);
if (processInstanceId != null) {
kieSession.signalEvent(getEventType(configuration, exchange), getEvent(configuration, exchange), processInstanceId);
} else {
kieSession.signalEvent(getEventType(configuration, exchange), getEvent(configuration, exchange));
}
}
}, getProcessInstance {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
ProcessInstance processInstance = kieSession.getProcessInstance(safe(getProcessInstanceId(configuration, exchange)));
setResult(exchange, processInstance);
}
}, getProcessInstances {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
Collection<ProcessInstance> processInstances = kieSession.getProcessInstances();
setResult(exchange, processInstances);
}
},
//RULE OPERATIONS
fireAllRules {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
Integer max = getMaxNumber(configuration, exchange);
int rulesFired;
if (max != null) {
rulesFired = kieSession.fireAllRules(max);
} else {
rulesFired = kieSession.fireAllRules();
}
setResult(exchange, rulesFired);
}
}, getFactCount {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
long factCount = kieSession.getFactCount();
setResult(exchange, factCount);
}
}, getGlobal {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
Object global = kieSession.getGlobal(getIdentifier(configuration, exchange));
setResult(exchange, global);
}
}, setGlobal {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
kieSession.setGlobal(getIdentifier(configuration, exchange), getValue(configuration, exchange));
}
},
//WORK ITEM OPERATIONS
abortWorkItem {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
kieSession.getWorkItemManager().abortWorkItem(safe(getWorkItemId(configuration, exchange)));
}
}, completeWorkItem {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
kieSession.getWorkItemManager().completeWorkItem(safe(getWorkItemId(configuration, exchange)), getParameters(configuration, exchange));
}
},
//TASK OPERATIONS
activateTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.activate(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange));
}
}, addTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
long taskId = taskService.addTask(getTask(configuration, exchange), getParameters(configuration, exchange));
setResult(exchange, taskId);
}
}, claimNextAvailableTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.claimNextAvailable(getUserId(configuration, exchange), getLanguage(configuration, exchange));
}
}, claimTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.claim(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange));
}
}, completeTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.complete(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange), getParameters(configuration, exchange));
}
}, delegateTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.delegate(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange), getTargetUserId(configuration, exchange));
}
}, exitTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.exit(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange));
}
}, failTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.fail(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange), getParameters(configuration, exchange));
}
}, getAttachment {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
Attachment attachment = taskService.getAttachmentById(safe(getAttachmentId(configuration, exchange)));
setResult(exchange, attachment);
}
}, getContent {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
Content content = taskService.getContentById(safe(getContentId(configuration, exchange)));
setResult(exchange, content);
}
}, getTasksAssignedAsBusinessAdministrator {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
List<TaskSummary> taskSummaries = taskService.getTasksAssignedAsBusinessAdministrator(getUserId(configuration, exchange), getLanguage(configuration, exchange));
setResult(exchange, taskSummaries);
}
}, getTasksAssignedAsPotentialOwnerByStatus {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.getTasksAssignedAsPotentialOwnerByStatus(getUserId(configuration, exchange), getStatuses(configuration, exchange), getLanguage(configuration, exchange));
}
}, getTaskByWorkItem {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
Task task = taskService.getTaskByWorkItemId(safe(getWorkItemId(configuration, exchange)));
setResult(exchange, task);
}
}, getTaskBy {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
Task task = taskService.getTaskById(safe(getTaskId(configuration, exchange)));
setResult(exchange, task);
}
}, getTaskContent {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
Map<String, Object> taskContent = taskService.getTaskContent(safe(getTaskId(configuration, exchange)));
setResult(exchange, taskContent);
}
}, getTasksByProcessInstance {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
List<Long> processInstanceIds = taskService.getTasksByProcessInstanceId(safe(getProcessInstanceId(configuration, exchange)));
setResult(exchange, processInstanceIds);
}
}, getTasksByStatusByProcessInstance {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
List<TaskSummary> taskSummaryList = taskService.getTasksByStatusByProcessInstanceId(
safe(getProcessInstanceId(configuration, exchange)), getStatuses(configuration, exchange),
getLanguage(configuration, exchange));
setResult(exchange, taskSummaryList);
}
}, getTasksOwned {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
List<TaskSummary> summaryList = taskService.getTasksOwned(getUserId(configuration, exchange), getLanguage(configuration, exchange));
setResult(exchange, summaryList);
}
}, nominateTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.nominate(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange), getEntities(configuration, exchange));
}
}, releaseTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.release(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange));
}
}, resumeTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.resume(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange));
}
}, skipTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.skip(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange));
}
}, startTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.start(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange));
}
}, stopTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.stop(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange));
}
}, suspendTask {
@Override
void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange) {
taskService.suspend(safe(getTaskId(configuration, exchange)), getUserId(configuration, exchange));
}
};
List<Status> getStatuses(JBPMConfiguration configuration, Exchange exchange) {
List<Status> statusList = exchange.getIn().getHeader(JBPMConstants.STATUS_LIST, List.class);
if (statusList == null) {
statusList = configuration.getStatuses();
}
return statusList;
}
List<OrganizationalEntity> getEntities(JBPMConfiguration configuration, Exchange exchange) {
List<OrganizationalEntity> entityList = exchange.getIn().getHeader(JBPMConstants.ENTITY_LIST, List.class);
if (entityList == null) {
entityList = configuration.getEntities();
}
return entityList;
}
Long getAttachmentId(JBPMConfiguration configuration, Exchange exchange) {
Long attachmentId = exchange.getIn().getHeader(JBPMConstants.ATTACHMENT_ID, Long.class);
if (attachmentId == null) {
attachmentId = configuration.getAttachmentId();
}
return attachmentId;
}
Long getContentId(JBPMConfiguration configuration, Exchange exchange) {
Long contentId = exchange.getIn().getHeader(JBPMConstants.CONTENT_ID, Long.class);
if (contentId == null) {
contentId = configuration.getContentId();
}
return contentId;
}
String getTargetUserId(JBPMConfiguration configuration, Exchange exchange) {
String userId = exchange.getIn().getHeader(JBPMConstants.TARGET_USER_ID, String.class);
if (userId == null) {
userId = configuration.getTargetUserId();
}
return userId;
}
String getLanguage(JBPMConfiguration configuration, Exchange exchange) {
String language = exchange.getIn().getHeader(JBPMConstants.LANGUAGE, String.class);
if (language == null) {
language = configuration.getLanguage();
}
return language;
}
Task getTask(JBPMConfiguration configuration, Exchange exchange) {
Task task = exchange.getIn().getHeader(JBPMConstants.TASK, Task.class);
if (task == null) {
task = configuration.getTask();
}
return task;
}
String getUserId(JBPMConfiguration configuration, Exchange exchange) {
String userId = exchange.getIn().getHeader(JBPMConstants.USER_ID, String.class);
if (userId == null) {
userId = configuration.getUserId();
}
return userId;
}
Long getTaskId(JBPMConfiguration configuration, Exchange exchange) {
Long taskId = exchange.getIn().getHeader(JBPMConstants.TASK_ID, Long.class);
if (taskId == null) {
taskId = configuration.getTaskId();
}
return taskId;
}
Long getWorkItemId(JBPMConfiguration configuration, Exchange exchange) {
Long workItemId = exchange.getIn().getHeader(JBPMConstants.WORK_ITEM_ID, Long.class);
if (workItemId == null) {
workItemId = configuration.getWorkItemId();
}
return workItemId;
}
String getIdentifier(JBPMConfiguration configuration, Exchange exchange) {
String identifier = exchange.getIn().getHeader(JBPMConstants.IDENTIFIER, String.class);
if (identifier == null) {
identifier = configuration.getIdentifier();
}
return identifier;
}
Integer getMaxNumber(JBPMConfiguration configuration, Exchange exchange) {
Integer max = exchange.getIn().getHeader(JBPMConstants.MAX_NUMBER, Integer.class);
if (max == null) {
max = configuration.getMaxNumber();
}
return max;
}
Object getEvent(JBPMConfiguration configuration, Exchange exchange) {
String event = exchange.getIn().getHeader(JBPMConstants.EVENT, String.class);
if (event == null) {
event = configuration.getEvent();
}
return event;
}
String getEventType(JBPMConfiguration configuration, Exchange exchange) {
String eventType = exchange.getIn().getHeader(JBPMConstants.EVENT_TYPE, String.class);
if (eventType == null) {
eventType = configuration.getEventType();
}
return eventType;
}
String getProcessId(JBPMConfiguration configuration, Exchange exchange) {
String processId = exchange.getIn().getHeader(JBPMConstants.PROCESS_ID, String.class);
if (processId == null) {
processId = configuration.getProcessId();
}
return processId;
}
Long getProcessInstanceId(JBPMConfiguration configuration, Exchange exchange) {
Long processInstanceId = exchange.getIn().getHeader(JBPMConstants.PROCESS_INSTANCE_ID, Long.class);
if (processInstanceId == null) {
processInstanceId = configuration.getProcessInstanceId();
}
return processInstanceId;
}
Map<String, Object> getParameters(JBPMConfiguration configuration, Exchange exchange) {
Map<String, Object> parameters = exchange.getIn().getHeader(JBPMConstants.PARAMETERS, Map.class);
if (parameters == null) {
parameters = configuration.getParameters();
}
return parameters;
}
Object getValue(JBPMConfiguration configuration, Exchange exchange) {
Object value = exchange.getIn().getHeader(JBPMConstants.VALUE);
if (value == null) {
value = configuration.getValue();
}
return value;
}
Message getResultMessage(Exchange exchange) {
return ExchangeHelper.isOutCapable(exchange) ? exchange.getOut() : exchange.getIn();
}
long safe(Long aLong) {
return aLong != null ? aLong : 0;
}
void setResult(Exchange exchange, Object result) {
getResultMessage(exchange).setBody(result);
}
abstract void execute(KieSession kieSession, TaskService taskService, JBPMConfiguration configuration, Exchange exchange);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.river.container.examples.browser;
import java.lang.reflect.Field;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.Icon;
/**
*
* @author Sun Microsystems, Inc.
*
* @version 0.2 06/04/98
*
*/
class ObjectNode extends DefaultMutableTreeNode implements java.io.Serializable {
// "classname", "classname fieldName" or "classname fieldName=value"
/**
* @serial
*/
private String name;
/**
* @serial
*/
private Object obj;
/**
* @serial
*/
private Class clazz;
/**
* @serial
*/
private String fieldName;
/**
* @serial
*/
private int arrayIndex = -1;
/**
* @serial
*/
private boolean editable = false;
/**
* @serial
*/
private boolean isLeaf;
/**
* @serial
*/
private boolean isAdministrable; // root level
/**
* @serial
*/
private boolean isControllable; // entry level
/**
* @serial
*/
private boolean isRoot = false;
/**
* @serial
*/
private boolean isEntryTop = false;
// icons
private static Icon[] icons = new Icon[6];
static {
icons[0] = MetalIcons.getBlueFolderIcon(); // Administrable Service, Controllable Attribute
icons[1] = MetalIcons.getGrayFolderIcon(); // Non-administrable Service
icons[2] = MetalIcons.getOrangeFolderIcon(); // Uncontrollable Attribute
icons[3] = MetalIcons.getBlueFileIcon(); // Administrable Service, Controllable Attribute
icons[4] = MetalIcons.getGrayFileIcon(); // Non-administrable Service
icons[5] = MetalIcons.getOrangeFileIcon(); // Uncontrollable Attribute
}
/**
Constructor for a root node.
*/
public ObjectNode(boolean isAdministrable) {
this("Root node", "".getClass(), null, -1, false);
this.isAdministrable = isAdministrable;
this.isRoot = true;
this.isEntryTop = true;
}
/**
Constructor for an entry (attribute) top nodes.
*/
public ObjectNode(Object obj, boolean isControllable) {
this(obj, obj.getClass(), null, -1, false);
this.isControllable = isControllable;
this.isEntryTop = true;
}
/**
Constructor for an ordinary field.
*/
public ObjectNode(Object obj, Class clazz, String fieldName, boolean isLeaf) {
this(obj, clazz, fieldName, -1, isLeaf);
}
/**
Constructor for an array element.
*/
public ObjectNode(Object obj, Class clazz, String fieldName, int arrayIndex, boolean isLeaf) {
this.obj = obj;
this.clazz = clazz;
this.fieldName = fieldName;
this.arrayIndex = arrayIndex;
this.isLeaf = isLeaf;
super.setAllowsChildren(! isLeaf);
setNodeName();
}
private void setNodeName() {
name = Introspector.getTypename(clazz, false);
if(fieldName != null)
name += " " + fieldName;
if(isLeaf) {
//Class clazz = obj.getClass();
String value = "";
if(arrayIndex >= 0)
value += ("[" + arrayIndex + "]");
value += "=";
if(clazz.isPrimitive()){
value += "" + obj;
editable = false;
} else if(Introspector.isWrapper(clazz)){
// Wrapper objects
value += "" + obj;
editable = true;
} else if(Introspector.isString(clazz)) {
value += "\"" + obj + "\"";
editable = true;
} else {
value += (obj == null ? "null" : obj.toString());
}
name += value;
} else if(obj == null) {
name += "=null";
}
super.setUserObject(name);
}
public void add(ObjectNode child){
child.setAdministrable(isAdministrable);
if(! isRoot){
child.setControllable(isControllable);
}
super.add(child);
}
public Object getEntryTop() {
ObjectNode snode = this;
do {
snode = (ObjectNode) snode.getParent();
} while(! snode.isEntryTop());
return snode.getObject();
}
protected boolean isEntryTop() {
return isEntryTop;
}
public void setObjectRecursive() throws NoSuchFieldException, IllegalAccessException {
ObjectNode pnode = this;
do {
pnode = (ObjectNode) pnode.getParent();
Object pobj = pnode.getObject();
// Needs to think about array modifications
Field f = pobj.getClass().getField(fieldName);
f.set(pobj, obj);
} while(! pnode.isEntryTop());
}
public String getTitle() {
return name;
}
public Icon getIcon() {
/*
if(isAdministrable){
if(isControllable){
if(isLeaf) return icons[3];
else return icons[0];
} else {
if(isLeaf) return icons[5];
else return icons[2];
}
} else {
if(isLeaf) return icons[4];
else return icons[1];
}
*/
if(isAdministrable && isControllable)
if(isLeaf) return icons[3];
else return icons[0];
else
if(isLeaf) return icons[4];
else return icons[1];
}
// Overwrite
public void setUserObject(Object obj){
if(obj instanceof String)
name = (String) obj;
super.setUserObject(obj);
}
public Object getUserObject(){
return name;
}
public String toString() {
return name;
}
public String getFieldName() {
return fieldName;
}
public Object getObject() {
return obj;
}
public boolean isEditable() {
return editable;
}
public boolean isAdministrable() {
return isAdministrable;
}
public void setAdministrable(boolean val) {
isAdministrable = val;
}
public boolean isControllable() {
return isControllable;
}
public void setControllable(boolean val) {
isControllable = val;
}
public Object setValue(Object val) throws NumberFormatException {
String clazzName = clazz.getName();
Object newObj = null;
if(val instanceof String || val == null) {
String sval = (String) val;
if(clazzName.equals("java.lang.Integer"))
newObj = new Integer(sval);
else if(clazzName.equals("java.lang.Boolean"))
newObj = new Boolean(sval);
else if(clazzName.equals("java.lang.Byte"))
newObj = new Byte(sval);
else if(clazzName.equals("java.lang.Character"))
newObj = new Character(sval.charAt(0));
else if(clazzName.equals("java.lang.Double"))
newObj = new Double(sval);
else if(clazzName.equals("java.lang.Float"))
newObj = new Float(sval);
else if(clazzName.equals("java.lang.Long"))
newObj = new Long(sval);
else if(clazzName.equals("java.lang.String"))
newObj = new String(sval); // clone
} else if(val.getClass().equals(obj.getClass())) {
// same class type
newObj = val;
}
Object oldObj = obj;
obj = newObj;
setNodeName();
return oldObj;
}
}
| |
/*
* Copyright (c) www.bugull.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bugull.mongo.fs;
import com.bugull.mongo.annotations.Default;
import com.bugull.mongo.utils.StreamUtil;
import com.bugull.mongo.utils.StringUtil;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import com.mongodb.gridfs.GridFS;
import com.mongodb.gridfs.GridFSDBFile;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Get a file from GridFS by HTTP.
*
* @author Frank Wen(xbwen@hotmail.com)
*/
public class HttpFileGetter {
private final static long ONE_YEAR_SECONDS = 365L * 24L * 60L * 60L;
private final static long ONE_YEAR_MILLISECONDS = ONE_YEAR_SECONDS * 1000L;
private HttpServletRequest request;
private HttpServletResponse response;
private String connection;
private String bucket;
private boolean contentMD5;
private final DBObject query = new BasicDBObject(ImageUploader.DIMENSION, null);
public HttpFileGetter(HttpServletRequest request, HttpServletResponse response){
this.request = request;
this.response = response;
}
public void response(String filename) throws ServletException, IOException {
query.put(BuguFS.FILENAME, filename);
if(StringUtil.isEmpty(connection)){
connection = Default.NAME;
}
if(StringUtil.isEmpty(bucket)){
bucket = GridFS.DEFAULT_BUCKET;
}
BuguFS fs = BuguFSFactory.getInstance().create(connection, bucket);
GridFSDBFile f = fs.findOne(query);
if(f == null){
response.setStatus(HttpServletResponse.SC_NOT_FOUND); //404
return;
}
InputStream is = f.getInputStream();
OutputStream os = response.getOutputStream();
try{
int fileLength = (int)f.getLength();
String ext = FileTypeUtil.getExtention(filename);
response.setContentType(FileTypeUtil.getContentType(ext));
String range = request.getHeader("Range");
//normal http request, no "range" in header.
if(StringUtil.isEmpty(range)){
response.setStatus(HttpServletResponse.SC_OK);
response.setContentLength(fileLength);
if(contentMD5){
response.setHeader("Content-MD5", f.getMD5());
}
if(FileTypeUtil.needCache(ext)){
String modifiedSince = request.getHeader("If-Modified-Since");
DateFormat df = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z", Locale.ENGLISH);
df.setTimeZone(TimeZone.getTimeZone("GMT"));
Date uploadDate = f.getUploadDate();
String lastModified = df.format(uploadDate);
if(modifiedSince != null){
Date modifiedDate = null;
Date sinceDate = null;
try{
modifiedDate = df.parse(lastModified);
sinceDate = df.parse(modifiedSince);
}catch(ParseException ex){
//just ignore the exception
}
if(modifiedDate!=null && sinceDate!=null && modifiedDate.compareTo(sinceDate) <= 0){
response.setStatus(HttpServletResponse.SC_NOT_MODIFIED); //Not Modified
return;
}
}
response.setHeader("Cache-Control", "max-age=" + ONE_YEAR_SECONDS);
response.setHeader("Last-Modified", lastModified);
response.setDateHeader("Expires", uploadDate.getTime() + ONE_YEAR_MILLISECONDS);
}else{
response.setHeader("Pragma","no-cache");
response.setHeader("Cache-Control","no-cache");
response.setDateHeader("Expires", 0);
}
f.writeTo(os);
}
//has "range" in header
else{
range = range.substring("bytes=".length());
if(StringUtil.isEmpty(range)){
return;
}
int begin = 0;
int end = fileLength - 1;
boolean onlyLast = range.startsWith("-");
String[] rangeArray = range.split("-");
if(rangeArray.length == 1){
if(onlyLast){
begin = fileLength - Integer.parseInt(rangeArray[0]);
}else{
begin = Integer.parseInt(rangeArray[0]);
}
}else if(rangeArray.length == 2){
begin = Integer.parseInt(rangeArray[0]);
end = Integer.parseInt(rangeArray[1]);
}
response.setStatus(HttpServletResponse.SC_PARTIAL_CONTENT);
int contentLength = end - begin + 1;
response.setContentLength(contentLength);
response.setHeader("Content-Range", "bytes " + begin + "-" + end + "/" + fileLength);
is.skip(begin);
int read = -1;
int bufferSize = (int)f.getChunkSize();
byte[] buffer = new byte[bufferSize];
int remain = contentLength;
int readSize = Math.min(bufferSize, remain);
if(contentMD5){
//use ByteArrayOutputStream to compute MD5
ByteArrayOutputStream baos = new ByteArrayOutputStream();
while( (read = is.read(buffer, 0, readSize)) != -1 ){
baos.write(buffer, 0, read);
remain -= read;
if(remain <= 0){
break;
}
readSize = Math.min(bufferSize, remain);
}
byte[] bytes = baos.toByteArray();
String md5 = StringUtil.encodeMD5(bytes);
if(! StringUtil.isEmpty(md5)){
response.setHeader("Content-MD5", md5.toLowerCase());
}
os.write(bytes);
os.flush();
}
else{
//MD5 is no need, just output the data
while( (read = is.read(buffer, 0, readSize)) != -1 ){
os.write(buffer, 0, read);
os.flush();
remain -= read;
if(remain <= 0){
break;
}
readSize = Math.min(bufferSize, remain);
}
}
}
}finally{
StreamUtil.safeClose(is);
StreamUtil.safeClose(os);
}
}
public void setDimension(String dimension) {
query.put(ImageUploader.DIMENSION, dimension);
}
public void setAttribute(String attribute, Object value) {
query.put(attribute, value);
}
public void setConnection(String connection) {
this.connection = connection;
}
public void setBucket(String bucket) {
this.bucket = bucket;
}
public void setContentMD5(boolean contentMD5) {
this.contentMD5 = contentMD5;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.flink.translation;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.beam.runners.flink.FlinkRunner;
import org.apache.beam.runners.flink.translation.functions.FlinkAssignWindows;
import org.apache.beam.runners.flink.translation.functions.FlinkDoFnFunction;
import org.apache.beam.runners.flink.translation.functions.FlinkMergingNonShuffleReduceFunction;
import org.apache.beam.runners.flink.translation.functions.FlinkMergingPartialReduceFunction;
import org.apache.beam.runners.flink.translation.functions.FlinkMergingReduceFunction;
import org.apache.beam.runners.flink.translation.functions.FlinkMultiOutputDoFnFunction;
import org.apache.beam.runners.flink.translation.functions.FlinkMultiOutputPruningFunction;
import org.apache.beam.runners.flink.translation.functions.FlinkPartialReduceFunction;
import org.apache.beam.runners.flink.translation.functions.FlinkReduceFunction;
import org.apache.beam.runners.flink.translation.types.CoderTypeInformation;
import org.apache.beam.runners.flink.translation.types.KvKeySelector;
import org.apache.beam.runners.flink.translation.wrappers.SourceInputFormat;
import org.apache.beam.sdk.coders.CannotProvideCoderException;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.CoderRegistry;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.coders.ListCoder;
import org.apache.beam.sdk.coders.VoidCoder;
import org.apache.beam.sdk.io.BoundedSource;
import org.apache.beam.sdk.io.Read;
import org.apache.beam.sdk.transforms.Combine;
import org.apache.beam.sdk.transforms.CombineFnBase;
import org.apache.beam.sdk.transforms.DoFn;
import org.apache.beam.sdk.transforms.Flatten;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.ParDo;
import org.apache.beam.sdk.transforms.View;
import org.apache.beam.sdk.transforms.join.RawUnionValue;
import org.apache.beam.sdk.transforms.join.UnionCoder;
import org.apache.beam.sdk.transforms.reflect.DoFnSignature;
import org.apache.beam.sdk.transforms.reflect.DoFnSignatures;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
import org.apache.beam.sdk.transforms.windowing.Window;
import org.apache.beam.sdk.transforms.windowing.WindowFn;
import org.apache.beam.sdk.util.Reshuffle;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.util.WindowingStrategy;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.PValue;
import org.apache.beam.sdk.values.TaggedPValue;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.operators.GroupCombineOperator;
import org.apache.flink.api.java.operators.GroupReduceOperator;
import org.apache.flink.api.java.operators.Grouping;
import org.apache.flink.api.java.operators.MapPartitionOperator;
import org.apache.flink.api.java.operators.SingleInputUdfOperator;
import org.apache.flink.util.Collector;
/**
* Translators for transforming {@link PTransform PTransforms} to
* Flink {@link DataSet DataSets}.
*/
class FlinkBatchTransformTranslators {
// --------------------------------------------------------------------------------------------
// Transform Translator Registry
// --------------------------------------------------------------------------------------------
@SuppressWarnings("rawtypes")
private static final Map<
Class<? extends PTransform>,
FlinkBatchPipelineTranslator.BatchTransformTranslator> TRANSLATORS = new HashMap<>();
static {
TRANSLATORS.put(View.CreatePCollectionView.class, new CreatePCollectionViewTranslatorBatch());
TRANSLATORS.put(Combine.PerKey.class, new CombinePerKeyTranslatorBatch());
TRANSLATORS.put(GroupByKey.class, new GroupByKeyTranslatorBatch());
TRANSLATORS.put(Reshuffle.class, new ReshuffleTranslatorBatch());
TRANSLATORS.put(Flatten.FlattenPCollectionList.class, new FlattenPCollectionTranslatorBatch());
TRANSLATORS.put(Window.Bound.class, new WindowBoundTranslatorBatch());
TRANSLATORS.put(ParDo.Bound.class, new ParDoBoundTranslatorBatch());
TRANSLATORS.put(ParDo.BoundMulti.class, new ParDoBoundMultiTranslatorBatch());
TRANSLATORS.put(Read.Bounded.class, new ReadSourceTranslatorBatch());
}
static FlinkBatchPipelineTranslator.BatchTransformTranslator<?> getTranslator(
PTransform<?, ?> transform) {
return TRANSLATORS.get(transform.getClass());
}
private static class ReadSourceTranslatorBatch<T>
implements FlinkBatchPipelineTranslator.BatchTransformTranslator<Read.Bounded<T>> {
@Override
public void translateNode(Read.Bounded<T> transform, FlinkBatchTranslationContext context) {
String name = transform.getName();
BoundedSource<T> source = transform.getSource();
PCollection<T> output = context.getOutput(transform);
TypeInformation<WindowedValue<T>> typeInformation = context.getTypeInfo(output);
DataSource<WindowedValue<T>> dataSource = new DataSource<>(
context.getExecutionEnvironment(),
new SourceInputFormat<>(source, context.getPipelineOptions()),
typeInformation,
name);
context.setOutputDataSet(output, dataSource);
}
}
private static class WindowBoundTranslatorBatch<T>
implements FlinkBatchPipelineTranslator.BatchTransformTranslator<Window.Bound<T>> {
@Override
public void translateNode(Window.Bound<T> transform, FlinkBatchTranslationContext context) {
PValue input = context.getInput(transform);
TypeInformation<WindowedValue<T>> resultTypeInfo =
context.getTypeInfo(context.getOutput(transform));
DataSet<WindowedValue<T>> inputDataSet = context.getInputDataSet(input);
@SuppressWarnings("unchecked")
final WindowingStrategy<T, ? extends BoundedWindow> windowingStrategy =
(WindowingStrategy<T, ? extends BoundedWindow>)
context.getOutput(transform).getWindowingStrategy();
WindowFn<T, ? extends BoundedWindow> windowFn = windowingStrategy.getWindowFn();
FlinkAssignWindows<T, ? extends BoundedWindow> assignWindowsFunction =
new FlinkAssignWindows<>(windowFn);
DataSet<WindowedValue<T>> resultDataSet = inputDataSet
.flatMap(assignWindowsFunction)
.name(context.getOutput(transform).getName())
.returns(resultTypeInfo);
context.setOutputDataSet(context.getOutput(transform), resultDataSet);
}
}
private static class GroupByKeyTranslatorBatch<K, InputT>
implements FlinkBatchPipelineTranslator.BatchTransformTranslator<GroupByKey<K, InputT>> {
@Override
public void translateNode(
GroupByKey<K, InputT> transform,
FlinkBatchTranslationContext context) {
// for now, this is copied from the Combine.PerKey translater. Once we have the new runner API
// we can replace GroupByKey by a Combine.PerKey with the Concatenate CombineFn
DataSet<WindowedValue<KV<K, InputT>>> inputDataSet =
context.getInputDataSet(context.getInput(transform));
Combine.KeyedCombineFn<K, InputT, List<InputT>, List<InputT>> combineFn =
new Concatenate<InputT>().asKeyedFn();
KvCoder<K, InputT> inputCoder =
(KvCoder<K, InputT>) context.getInput(transform).getCoder();
Coder<List<InputT>> accumulatorCoder;
try {
accumulatorCoder =
combineFn.getAccumulatorCoder(
context.getInput(transform).getPipeline().getCoderRegistry(),
inputCoder.getKeyCoder(),
inputCoder.getValueCoder());
} catch (CannotProvideCoderException e) {
throw new RuntimeException(e);
}
WindowingStrategy<?, ?> windowingStrategy =
context.getInput(transform).getWindowingStrategy();
TypeInformation<WindowedValue<KV<K, List<InputT>>>> partialReduceTypeInfo =
new CoderTypeInformation<>(
WindowedValue.getFullCoder(
KvCoder.of(inputCoder.getKeyCoder(), accumulatorCoder),
windowingStrategy.getWindowFn().windowCoder()));
Grouping<WindowedValue<KV<K, InputT>>> inputGrouping =
inputDataSet.groupBy(new KvKeySelector<InputT, K>(inputCoder.getKeyCoder()));
FlinkPartialReduceFunction<K, InputT, List<InputT>, ?> partialReduceFunction;
FlinkReduceFunction<K, List<InputT>, List<InputT>, ?> reduceFunction;
if (windowingStrategy.getWindowFn().isNonMerging()) {
@SuppressWarnings("unchecked")
WindowingStrategy<?, BoundedWindow> boundedStrategy =
(WindowingStrategy<?, BoundedWindow>) windowingStrategy;
partialReduceFunction = new FlinkPartialReduceFunction<>(
combineFn,
boundedStrategy,
Collections.<PCollectionView<?>, WindowingStrategy<?, ?>>emptyMap(),
context.getPipelineOptions());
reduceFunction = new FlinkReduceFunction<>(
combineFn,
boundedStrategy,
Collections.<PCollectionView<?>, WindowingStrategy<?, ?>>emptyMap(),
context.getPipelineOptions());
} else {
if (!windowingStrategy.getWindowFn().windowCoder().equals(IntervalWindow.getCoder())) {
throw new UnsupportedOperationException(
"Merging WindowFn with windows other than IntervalWindow are not supported.");
}
@SuppressWarnings("unchecked")
WindowingStrategy<?, IntervalWindow> intervalStrategy =
(WindowingStrategy<?, IntervalWindow>) windowingStrategy;
partialReduceFunction = new FlinkMergingPartialReduceFunction<>(
combineFn,
intervalStrategy,
Collections.<PCollectionView<?>, WindowingStrategy<?, ?>>emptyMap(),
context.getPipelineOptions());
reduceFunction = new FlinkMergingReduceFunction<>(
combineFn,
intervalStrategy,
Collections.<PCollectionView<?>, WindowingStrategy<?, ?>>emptyMap(),
context.getPipelineOptions());
}
// Partially GroupReduce the values into the intermediate format AccumT (combine)
GroupCombineOperator<
WindowedValue<KV<K, InputT>>,
WindowedValue<KV<K, List<InputT>>>> groupCombine =
new GroupCombineOperator<>(
inputGrouping,
partialReduceTypeInfo,
partialReduceFunction,
"GroupCombine: " + transform.getName());
Grouping<WindowedValue<KV<K, List<InputT>>>> intermediateGrouping =
groupCombine.groupBy(new KvKeySelector<List<InputT>, K>(inputCoder.getKeyCoder()));
// Fully reduce the values and create output format VO
GroupReduceOperator<
WindowedValue<KV<K, List<InputT>>>, WindowedValue<KV<K, List<InputT>>>> outputDataSet =
new GroupReduceOperator<>(
intermediateGrouping, partialReduceTypeInfo, reduceFunction, transform.getName());
context.setOutputDataSet(context.getOutput(transform), outputDataSet);
}
}
private static class ReshuffleTranslatorBatch<K, InputT>
implements FlinkBatchPipelineTranslator.BatchTransformTranslator<Reshuffle<K, InputT>> {
@Override
public void translateNode(
Reshuffle<K, InputT> transform,
FlinkBatchTranslationContext context) {
DataSet<WindowedValue<KV<K, InputT>>> inputDataSet =
context.getInputDataSet(context.getInput(transform));
context.setOutputDataSet(context.getOutput(transform), inputDataSet.rebalance());
}
}
/**
* Combiner that combines {@code T}s into a single {@code List<T>} containing all inputs.
*
* <p>For internal use to translate {@link GroupByKey}. For a large {@link PCollection} this
* is expected to crash!
*
* <p>This is copied from the dataflow runner code.
*
* @param <T> the type of elements to concatenate.
*/
private static class Concatenate<T> extends Combine.CombineFn<T, List<T>, List<T>> {
@Override
public List<T> createAccumulator() {
return new ArrayList<>();
}
@Override
public List<T> addInput(List<T> accumulator, T input) {
accumulator.add(input);
return accumulator;
}
@Override
public List<T> mergeAccumulators(Iterable<List<T>> accumulators) {
List<T> result = createAccumulator();
for (List<T> accumulator : accumulators) {
result.addAll(accumulator);
}
return result;
}
@Override
public List<T> extractOutput(List<T> accumulator) {
return accumulator;
}
@Override
public Coder<List<T>> getAccumulatorCoder(CoderRegistry registry, Coder<T> inputCoder) {
return ListCoder.of(inputCoder);
}
@Override
public Coder<List<T>> getDefaultOutputCoder(CoderRegistry registry, Coder<T> inputCoder) {
return ListCoder.of(inputCoder);
}
}
private static class CombinePerKeyTranslatorBatch<K, InputT, AccumT, OutputT>
implements FlinkBatchPipelineTranslator.BatchTransformTranslator<
Combine.PerKey<K, InputT, OutputT>> {
@Override
@SuppressWarnings("unchecked")
public void translateNode(
Combine.PerKey<K, InputT, OutputT> transform,
FlinkBatchTranslationContext context) {
DataSet<WindowedValue<KV<K, InputT>>> inputDataSet =
context.getInputDataSet(context.getInput(transform));
CombineFnBase.PerKeyCombineFn<K, InputT, AccumT, OutputT> combineFn =
(CombineFnBase.PerKeyCombineFn<K, InputT, AccumT, OutputT>) transform.getFn();
KvCoder<K, InputT> inputCoder =
(KvCoder<K, InputT>) context.getInput(transform).getCoder();
Coder<AccumT> accumulatorCoder;
try {
accumulatorCoder =
combineFn.getAccumulatorCoder(
context.getInput(transform).getPipeline().getCoderRegistry(),
inputCoder.getKeyCoder(),
inputCoder.getValueCoder());
} catch (CannotProvideCoderException e) {
throw new RuntimeException(e);
}
WindowingStrategy<?, ?> windowingStrategy =
context.getInput(transform).getWindowingStrategy();
TypeInformation<WindowedValue<KV<K, AccumT>>> partialReduceTypeInfo =
context.getTypeInfo(
KvCoder.of(inputCoder.getKeyCoder(), accumulatorCoder),
windowingStrategy);
Grouping<WindowedValue<KV<K, InputT>>> inputGrouping =
inputDataSet.groupBy(new KvKeySelector<InputT, K>(inputCoder.getKeyCoder()));
// construct a map from side input to WindowingStrategy so that
// the OldDoFn runner can map main-input windows to side input windows
Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputStrategies = new HashMap<>();
for (PCollectionView<?> sideInput: transform.getSideInputs()) {
sideInputStrategies.put(sideInput, sideInput.getWindowingStrategyInternal());
}
if (windowingStrategy.getWindowFn().isNonMerging()) {
WindowingStrategy<?, BoundedWindow> boundedStrategy =
(WindowingStrategy<?, BoundedWindow>) windowingStrategy;
FlinkPartialReduceFunction<K, InputT, AccumT, ?> partialReduceFunction =
new FlinkPartialReduceFunction<>(
combineFn,
boundedStrategy,
sideInputStrategies,
context.getPipelineOptions());
FlinkReduceFunction<K, AccumT, OutputT, ?> reduceFunction =
new FlinkReduceFunction<>(
combineFn,
boundedStrategy,
sideInputStrategies,
context.getPipelineOptions());
// Partially GroupReduce the values into the intermediate format AccumT (combine)
GroupCombineOperator<
WindowedValue<KV<K, InputT>>,
WindowedValue<KV<K, AccumT>>> groupCombine =
new GroupCombineOperator<>(
inputGrouping,
partialReduceTypeInfo,
partialReduceFunction,
"GroupCombine: " + transform.getName());
transformSideInputs(transform.getSideInputs(), groupCombine, context);
TypeInformation<WindowedValue<KV<K, OutputT>>> reduceTypeInfo =
context.getTypeInfo(context.getOutput(transform));
Grouping<WindowedValue<KV<K, AccumT>>> intermediateGrouping =
groupCombine.groupBy(new KvKeySelector<AccumT, K>(inputCoder.getKeyCoder()));
// Fully reduce the values and create output format OutputT
GroupReduceOperator<
WindowedValue<KV<K, AccumT>>, WindowedValue<KV<K, OutputT>>> outputDataSet =
new GroupReduceOperator<>(
intermediateGrouping, reduceTypeInfo, reduceFunction, transform.getName());
transformSideInputs(transform.getSideInputs(), outputDataSet, context);
context.setOutputDataSet(context.getOutput(transform), outputDataSet);
} else {
if (!windowingStrategy.getWindowFn().windowCoder().equals(IntervalWindow.getCoder())) {
throw new UnsupportedOperationException(
"Merging WindowFn with windows other than IntervalWindow are not supported.");
}
// for merging windows we can't to a pre-shuffle combine step since
// elements would not be in their correct windows for side-input access
WindowingStrategy<?, IntervalWindow> intervalStrategy =
(WindowingStrategy<?, IntervalWindow>) windowingStrategy;
FlinkMergingNonShuffleReduceFunction<K, InputT, AccumT, OutputT, ?> reduceFunction =
new FlinkMergingNonShuffleReduceFunction<>(
combineFn,
intervalStrategy,
sideInputStrategies,
context.getPipelineOptions());
TypeInformation<WindowedValue<KV<K, OutputT>>> reduceTypeInfo =
context.getTypeInfo(context.getOutput(transform));
Grouping<WindowedValue<KV<K, InputT>>> grouping =
inputDataSet.groupBy(new KvKeySelector<InputT, K>(inputCoder.getKeyCoder()));
// Fully reduce the values and create output format OutputT
GroupReduceOperator<
WindowedValue<KV<K, InputT>>, WindowedValue<KV<K, OutputT>>> outputDataSet =
new GroupReduceOperator<>(
grouping, reduceTypeInfo, reduceFunction, transform.getName());
transformSideInputs(transform.getSideInputs(), outputDataSet, context);
context.setOutputDataSet(context.getOutput(transform), outputDataSet);
}
}
}
private static void rejectSplittable(DoFn<?, ?> doFn) {
DoFnSignature signature = DoFnSignatures.getSignature(doFn.getClass());
if (signature.processElement().isSplittable()) {
throw new UnsupportedOperationException(
String.format(
"%s does not currently support splittable DoFn: %s",
FlinkRunner.class.getSimpleName(), doFn));
}
}
private static void rejectStateAndTimers(DoFn<?, ?> doFn) {
DoFnSignature signature = DoFnSignatures.getSignature(doFn.getClass());
if (signature.stateDeclarations().size() > 0) {
throw new UnsupportedOperationException(
String.format(
"Found %s annotations on %s, but %s cannot yet be used with state in the %s.",
DoFn.StateId.class.getSimpleName(),
doFn.getClass().getName(),
DoFn.class.getSimpleName(),
FlinkRunner.class.getSimpleName()));
}
if (signature.timerDeclarations().size() > 0) {
throw new UnsupportedOperationException(
String.format(
"Found %s annotations on %s, but %s cannot yet be used with timers in the %s.",
DoFn.TimerId.class.getSimpleName(),
doFn.getClass().getName(),
DoFn.class.getSimpleName(),
FlinkRunner.class.getSimpleName()));
}
}
private static class ParDoBoundTranslatorBatch<InputT, OutputT>
implements FlinkBatchPipelineTranslator.BatchTransformTranslator<
ParDo.Bound<InputT, OutputT>> {
@Override
public void translateNode(
ParDo.Bound<InputT, OutputT> transform,
FlinkBatchTranslationContext context) {
DoFn<InputT, OutputT> doFn = transform.getFn();
rejectSplittable(doFn);
rejectStateAndTimers(doFn);
DataSet<WindowedValue<InputT>> inputDataSet =
context.getInputDataSet(context.getInput(transform));
TypeInformation<WindowedValue<OutputT>> typeInformation =
context.getTypeInfo(context.getOutput(transform));
List<PCollectionView<?>> sideInputs = transform.getSideInputs();
// construct a map from side input to WindowingStrategy so that
// the OldDoFn runner can map main-input windows to side input windows
Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputStrategies = new HashMap<>();
for (PCollectionView<?> sideInput: sideInputs) {
sideInputStrategies.put(sideInput, sideInput.getWindowingStrategyInternal());
}
FlinkDoFnFunction<InputT, OutputT> doFnWrapper =
new FlinkDoFnFunction<>(
doFn,
context.getOutput(transform).getWindowingStrategy(),
sideInputStrategies,
context.getPipelineOptions());
MapPartitionOperator<WindowedValue<InputT>, WindowedValue<OutputT>> outputDataSet =
new MapPartitionOperator<>(
inputDataSet,
typeInformation,
doFnWrapper,
transform.getName());
transformSideInputs(sideInputs, outputDataSet, context);
context.setOutputDataSet(context.getOutput(transform), outputDataSet);
}
}
private static class ParDoBoundMultiTranslatorBatch<InputT, OutputT>
implements FlinkBatchPipelineTranslator.BatchTransformTranslator<
ParDo.BoundMulti<InputT, OutputT>> {
@Override
public void translateNode(
ParDo.BoundMulti<InputT, OutputT> transform,
FlinkBatchTranslationContext context) {
DoFn<InputT, OutputT> doFn = transform.getFn();
rejectSplittable(doFn);
rejectStateAndTimers(doFn);
DataSet<WindowedValue<InputT>> inputDataSet =
context.getInputDataSet(context.getInput(transform));
List<TaggedPValue> outputs = context.getOutputs(transform);
Map<TupleTag<?>, Integer> outputMap = Maps.newHashMap();
// put the main output at index 0, FlinkMultiOutputDoFnFunction expects this
outputMap.put(transform.getMainOutputTag(), 0);
int count = 1;
for (TaggedPValue taggedValue : outputs) {
if (!outputMap.containsKey(taggedValue.getTag())) {
outputMap.put(taggedValue.getTag(), count++);
}
}
// assume that the windowing strategy is the same for all outputs
WindowingStrategy<?, ?> windowingStrategy = null;
// collect all output Coders and create a UnionCoder for our tagged outputs
List<Coder<?>> outputCoders = Lists.newArrayList();
for (TaggedPValue taggedValue : outputs) {
checkState(
taggedValue.getValue() instanceof PCollection,
"Within ParDo, got a non-PCollection output %s of type %s",
taggedValue.getValue(),
taggedValue.getValue().getClass().getSimpleName());
PCollection<?> coll = (PCollection<?>) taggedValue.getValue();
outputCoders.add(coll.getCoder());
windowingStrategy = coll.getWindowingStrategy();
}
if (windowingStrategy == null) {
throw new IllegalStateException("No outputs defined.");
}
UnionCoder unionCoder = UnionCoder.of(outputCoders);
TypeInformation<WindowedValue<RawUnionValue>> typeInformation =
new CoderTypeInformation<>(
WindowedValue.getFullCoder(
unionCoder,
windowingStrategy.getWindowFn().windowCoder()));
List<PCollectionView<?>> sideInputs = transform.getSideInputs();
// construct a map from side input to WindowingStrategy so that
// the OldDoFn runner can map main-input windows to side input windows
Map<PCollectionView<?>, WindowingStrategy<?, ?>> sideInputStrategies = new HashMap<>();
for (PCollectionView<?> sideInput: sideInputs) {
sideInputStrategies.put(sideInput, sideInput.getWindowingStrategyInternal());
}
@SuppressWarnings("unchecked")
FlinkMultiOutputDoFnFunction<InputT, OutputT> doFnWrapper =
new FlinkMultiOutputDoFnFunction(
doFn,
windowingStrategy,
sideInputStrategies,
context.getPipelineOptions(),
outputMap);
MapPartitionOperator<WindowedValue<InputT>, WindowedValue<RawUnionValue>> taggedDataSet =
new MapPartitionOperator<>(
inputDataSet,
typeInformation,
doFnWrapper,
transform.getName());
transformSideInputs(sideInputs, taggedDataSet, context);
for (TaggedPValue output : outputs) {
pruneOutput(
taggedDataSet,
context,
outputMap.get(output.getTag()),
(PCollection) output.getValue());
}
}
private <T> void pruneOutput(
MapPartitionOperator<WindowedValue<InputT>, WindowedValue<RawUnionValue>> taggedDataSet,
FlinkBatchTranslationContext context,
int integerTag,
PCollection<T> collection) {
TypeInformation<WindowedValue<T>> outputType = context.getTypeInfo(collection);
FlinkMultiOutputPruningFunction<T> pruningFunction =
new FlinkMultiOutputPruningFunction<>(integerTag);
FlatMapOperator<WindowedValue<RawUnionValue>, WindowedValue<T>> pruningOperator =
new FlatMapOperator<>(
taggedDataSet,
outputType,
pruningFunction,
collection.getName());
context.setOutputDataSet(collection, pruningOperator);
}
}
private static class FlattenPCollectionTranslatorBatch<T>
implements FlinkBatchPipelineTranslator.BatchTransformTranslator<
Flatten.FlattenPCollectionList<T>> {
@Override
@SuppressWarnings("unchecked")
public void translateNode(
Flatten.FlattenPCollectionList<T> transform,
FlinkBatchTranslationContext context) {
List<TaggedPValue> allInputs = context.getInputs(transform);
DataSet<WindowedValue<T>> result = null;
if (allInputs.isEmpty()) {
// create an empty dummy source to satisfy downstream operations
// we cannot create an empty source in Flink, therefore we have to
// add the flatMap that simply never forwards the single element
DataSource<String> dummySource =
context.getExecutionEnvironment().fromElements("dummy");
result = dummySource.flatMap(new FlatMapFunction<String, WindowedValue<T>>() {
@Override
public void flatMap(String s, Collector<WindowedValue<T>> collector) throws Exception {
// never return anything
}
}).returns(
new CoderTypeInformation<>(
WindowedValue.getFullCoder(
(Coder<T>) VoidCoder.of(),
GlobalWindow.Coder.INSTANCE)));
} else {
for (TaggedPValue taggedPc : allInputs) {
checkArgument(
taggedPc.getValue() instanceof PCollection,
"Got non-PCollection input to flatten: %s of type %s",
taggedPc.getValue(),
taggedPc.getValue().getClass().getSimpleName());
PCollection<T> collection = (PCollection<T>) taggedPc.getValue();
DataSet<WindowedValue<T>> current = context.getInputDataSet(collection);
if (result == null) {
result = current;
} else {
result = result.union(current);
}
}
}
// insert a dummy filter, there seems to be a bug in Flink
// that produces duplicate elements after the union in some cases
// if we don't
result = result.filter(new FilterFunction<WindowedValue<T>>() {
@Override
public boolean filter(WindowedValue<T> tWindowedValue) throws Exception {
return true;
}
}).name("UnionFixFilter");
context.setOutputDataSet(context.getOutput(transform), result);
}
}
private static class CreatePCollectionViewTranslatorBatch<ElemT, ViewT>
implements FlinkBatchPipelineTranslator.BatchTransformTranslator<
View.CreatePCollectionView<ElemT, ViewT>> {
@Override
public void translateNode(
View.CreatePCollectionView<ElemT, ViewT> transform,
FlinkBatchTranslationContext context) {
DataSet<WindowedValue<ElemT>> inputDataSet =
context.getInputDataSet(context.getInput(transform));
PCollectionView<ViewT> input = transform.getView();
context.setSideInputDataSet(input, inputDataSet);
}
}
private static void transformSideInputs(
List<PCollectionView<?>> sideInputs,
SingleInputUdfOperator<?, ?, ?> outputDataSet,
FlinkBatchTranslationContext context) {
// get corresponding Flink broadcast DataSets
for (PCollectionView<?> input : sideInputs) {
DataSet<?> broadcastSet = context.getSideInputDataSet(input);
outputDataSet.withBroadcastSet(broadcastSet, input.getTagInternal().getId());
}
}
private FlinkBatchTransformTranslators() {}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.io.compress;
import java.io.DataInput;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.AbstractSet;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ParameterizedClass;
import org.apache.cassandra.db.TypeSizes;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.IVersionedSerializer;
import org.apache.cassandra.io.util.DataOutputPlus;
public class CompressionParameters
{
public final static int DEFAULT_CHUNK_LENGTH = 65536;
public final static double DEFAULT_CRC_CHECK_CHANCE = 1.0;
public final static IVersionedSerializer<CompressionParameters> serializer = new Serializer();
public static final String SSTABLE_COMPRESSION = "sstable_compression";
public static final String CHUNK_LENGTH_KB = "chunk_length_kb";
public static final String CRC_CHECK_CHANCE = "crc_check_chance";
public static final Set<String> GLOBAL_OPTIONS = ImmutableSet.of(CRC_CHECK_CHANCE);
public final ICompressor sstableCompressor;
private final Integer chunkLength;
private volatile double crcCheckChance;
public final Map<String, String> otherOptions; // Unrecognized options, can be use by the compressor
private CFMetaData liveMetadata;
public static CompressionParameters create(Map<? extends CharSequence, ? extends CharSequence> opts) throws ConfigurationException
{
Map<String, String> options = copyOptions(opts);
String sstableCompressionClass = options.get(SSTABLE_COMPRESSION);
String chunkLength = options.get(CHUNK_LENGTH_KB);
options.remove(SSTABLE_COMPRESSION);
options.remove(CHUNK_LENGTH_KB);
CompressionParameters cp = new CompressionParameters(sstableCompressionClass, parseChunkLength(chunkLength), options);
cp.validate();
return cp;
}
public CompressionParameters(String sstableCompressorClass, Integer chunkLength, Map<String, String> otherOptions) throws ConfigurationException
{
this(createCompressor(parseCompressorClass(sstableCompressorClass), otherOptions), chunkLength, otherOptions);
}
public CompressionParameters(ICompressor sstableCompressor)
{
// can't try/catch as first statement in the constructor, thus repeating constructor code here.
this.sstableCompressor = sstableCompressor;
chunkLength = null;
otherOptions = Collections.emptyMap();
crcCheckChance = DEFAULT_CRC_CHECK_CHANCE;
}
public CompressionParameters(ICompressor sstableCompressor, Integer chunkLength, Map<String, String> otherOptions) throws ConfigurationException
{
this.sstableCompressor = sstableCompressor;
this.chunkLength = chunkLength;
this.otherOptions = otherOptions;
String chance = otherOptions.get(CRC_CHECK_CHANCE);
this.crcCheckChance = (chance == null) ? DEFAULT_CRC_CHECK_CHANCE : parseCrcCheckChance(chance);
}
public CompressionParameters copy()
{
return new CompressionParameters(sstableCompressor, chunkLength, new HashMap<>(otherOptions));
}
public void setLiveMetadata(final CFMetaData liveMetadata)
{
if (liveMetadata == null)
return;
this.liveMetadata = liveMetadata;
}
public void setCrcCheckChance(double crcCheckChance) throws ConfigurationException
{
validateCrcCheckChance(crcCheckChance);
this.crcCheckChance = crcCheckChance;
if (liveMetadata != null && this != liveMetadata.compressionParameters)
liveMetadata.compressionParameters.setCrcCheckChance(crcCheckChance);
}
public double getCrcCheckChance()
{
return liveMetadata == null ? this.crcCheckChance : liveMetadata.compressionParameters.crcCheckChance;
}
private static double parseCrcCheckChance(String crcCheckChance) throws ConfigurationException
{
try
{
double chance = Double.parseDouble(crcCheckChance);
validateCrcCheckChance(chance);
return chance;
}
catch (NumberFormatException e)
{
throw new ConfigurationException("crc_check_chance should be a double");
}
}
private static void validateCrcCheckChance(double crcCheckChance) throws ConfigurationException
{
if (crcCheckChance < 0.0d || crcCheckChance > 1.0d)
throw new ConfigurationException("crc_check_chance should be between 0.0 and 1.0");
}
public int chunkLength()
{
return chunkLength == null ? DEFAULT_CHUNK_LENGTH : chunkLength;
}
private static Class<?> parseCompressorClass(String className) throws ConfigurationException
{
if (className == null || className.isEmpty())
return null;
className = className.contains(".") ? className : "org.apache.cassandra.io.compress." + className;
try
{
return Class.forName(className);
}
catch (Exception e)
{
throw new ConfigurationException("Could not create Compression for type " + className, e);
}
}
private static ICompressor createCompressor(Class<?> compressorClass, Map<String, String> compressionOptions) throws ConfigurationException
{
if (compressorClass == null)
{
if (!compressionOptions.isEmpty())
throw new ConfigurationException("Unknown compression options (" + compressionOptions.keySet() + ") since no compression class found");
return null;
}
try
{
Method method = compressorClass.getMethod("create", Map.class);
ICompressor compressor = (ICompressor)method.invoke(null, compressionOptions);
// Check for unknown options
AbstractSet<String> supportedOpts = Sets.union(compressor.supportedOptions(), GLOBAL_OPTIONS);
for (String provided : compressionOptions.keySet())
if (!supportedOpts.contains(provided))
throw new ConfigurationException("Unknown compression options " + provided);
return compressor;
}
catch (NoSuchMethodException e)
{
throw new ConfigurationException("create method not found", e);
}
catch (SecurityException e)
{
throw new ConfigurationException("Access forbiden", e);
}
catch (IllegalAccessException e)
{
throw new ConfigurationException("Cannot access method create in " + compressorClass.getName(), e);
}
catch (InvocationTargetException e)
{
Throwable cause = e.getCause();
throw new ConfigurationException(String.format("%s.create() threw an error: %s",
compressorClass.getSimpleName(),
cause == null ? e.getClass().getName() + " " + e.getMessage() : cause.getClass().getName() + " " + cause.getMessage()),
e);
}
catch (ExceptionInInitializerError e)
{
throw new ConfigurationException("Cannot initialize class " + compressorClass.getName());
}
}
public static ICompressor createCompressor(ParameterizedClass compression) throws ConfigurationException {
return createCompressor(parseCompressorClass(compression.class_name), copyOptions(compression.parameters));
}
private static Map<String, String> copyOptions(Map<? extends CharSequence, ? extends CharSequence> co)
{
if (co == null || co.isEmpty())
return Collections.<String, String>emptyMap();
Map<String, String> compressionOptions = new HashMap<String, String>();
for (Map.Entry<? extends CharSequence, ? extends CharSequence> entry : co.entrySet())
{
compressionOptions.put(entry.getKey().toString(), entry.getValue().toString());
}
return compressionOptions;
}
/**
* Parse the chunk length (in KB) and returns it as bytes.
*
* @param chLengthKB the length of the chunk to parse
* @return the chunk length in bytes
* @throws ConfigurationException if the chunk size is too large
*/
public static Integer parseChunkLength(String chLengthKB) throws ConfigurationException
{
if (chLengthKB == null)
return null;
try
{
int parsed = Integer.parseInt(chLengthKB);
if (parsed > Integer.MAX_VALUE / 1024)
throw new ConfigurationException("Value of " + CHUNK_LENGTH_KB + " is too large (" + parsed + ")");
return 1024 * parsed;
}
catch (NumberFormatException e)
{
throw new ConfigurationException("Invalid value for " + CHUNK_LENGTH_KB, e);
}
}
// chunkLength must be a power of 2 because we assume so when
// computing the chunk number from an uncompressed file offset (see
// CompressedRandomAccessReader.decompresseChunk())
public void validate() throws ConfigurationException
{
// if chunk length was not set (chunkLength == null), this is fine, default will be used
if (chunkLength != null)
{
if (chunkLength <= 0)
throw new ConfigurationException("Invalid negative or null " + CHUNK_LENGTH_KB);
int c = chunkLength;
boolean found = false;
while (c != 0)
{
if ((c & 0x01) != 0)
{
if (found)
throw new ConfigurationException(CHUNK_LENGTH_KB + " must be a power of 2");
else
found = true;
}
c >>= 1;
}
}
validateCrcCheckChance(crcCheckChance);
}
public Map<String, String> asThriftOptions()
{
Map<String, String> options = new HashMap<String, String>(otherOptions);
if (sstableCompressor == null)
return options;
options.put(SSTABLE_COMPRESSION, sstableCompressor.getClass().getName());
if (chunkLength != null)
options.put(CHUNK_LENGTH_KB, chunkLengthInKB());
return options;
}
private String chunkLengthInKB()
{
return String.valueOf(chunkLength() / 1024);
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
{
return true;
}
else if (obj == null || obj.getClass() != getClass())
{
return false;
}
CompressionParameters cp = (CompressionParameters) obj;
return new EqualsBuilder()
.append(sstableCompressor, cp.sstableCompressor)
.append(chunkLength, cp.chunkLength)
.append(otherOptions, cp.otherOptions)
.isEquals();
}
@Override
public int hashCode()
{
return new HashCodeBuilder(29, 1597)
.append(sstableCompressor)
.append(chunkLength)
.append(otherOptions)
.toHashCode();
}
static class Serializer implements IVersionedSerializer<CompressionParameters>
{
public void serialize(CompressionParameters parameters, DataOutputPlus out, int version) throws IOException
{
out.writeUTF(parameters.sstableCompressor.getClass().getSimpleName());
out.writeInt(parameters.otherOptions.size());
for (Map.Entry<String, String> entry : parameters.otherOptions.entrySet())
{
out.writeUTF(entry.getKey());
out.writeUTF(entry.getValue());
}
out.writeInt(parameters.chunkLength());
}
public CompressionParameters deserialize(DataInput in, int version) throws IOException
{
String compressorName = in.readUTF();
int optionCount = in.readInt();
Map<String, String> options = new HashMap<String, String>();
for (int i = 0; i < optionCount; ++i)
{
String key = in.readUTF();
String value = in.readUTF();
options.put(key, value);
}
int chunkLength = in.readInt();
CompressionParameters parameters;
try
{
parameters = new CompressionParameters(compressorName, chunkLength, options);
}
catch (ConfigurationException e)
{
throw new RuntimeException("Cannot create CompressionParameters for parameters", e);
}
return parameters;
}
public long serializedSize(CompressionParameters parameters, int version)
{
long size = TypeSizes.NATIVE.sizeof(parameters.sstableCompressor.getClass().getSimpleName());
size += TypeSizes.NATIVE.sizeof(parameters.otherOptions.size());
for (Map.Entry<String, String> entry : parameters.otherOptions.entrySet())
{
size += TypeSizes.NATIVE.sizeof(entry.getKey());
size += TypeSizes.NATIVE.sizeof(entry.getValue());
}
size += TypeSizes.NATIVE.sizeof(parameters.chunkLength());
return size;
}
}
}
| |
/**
* Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.product.fxopt;
import java.io.Serializable;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import org.joda.beans.Bean;
import org.joda.beans.BeanDefinition;
import org.joda.beans.ImmutableBean;
import org.joda.beans.ImmutableDefaults;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.opengamma.strata.basics.ReferenceData;
import com.opengamma.strata.basics.currency.AdjustablePayment;
import com.opengamma.strata.product.ProductTrade;
import com.opengamma.strata.product.ResolvableTrade;
import com.opengamma.strata.product.TradeInfo;
/**
* A trade in a vanilla FX option.
* <p>
* An Over-The-Counter (OTC) trade in an {@link FxVanillaOption}.
* <p>
* An FX option is a financial instrument that provides an option based on the future value of
* a foreign exchange. The option is European, exercised only on the exercise date.
*/
@BeanDefinition
public final class FxVanillaOptionTrade
implements ProductTrade, ResolvableTrade<ResolvedFxVanillaOptionTrade>, ImmutableBean, Serializable {
/**
* The additional trade information, defaulted to an empty instance.
* <p>
* This allows additional information to be attached to the trade.
*/
@PropertyDefinition(overrideGet = true)
private final TradeInfo info;
/**
* The FX option product that was agreed when the trade occurred.
* <p>
* The product captures the contracted financial details of the trade.
*/
@PropertyDefinition(validate = "notNull", overrideGet = true)
private final FxVanillaOption product;
/**
* The premium of the FX option.
* <p>
* The premium sign should be compatible with the product Long/Short flag.
* This means that the premium is negative for long and positive for short.
*/
@PropertyDefinition(validate = "notNull")
private final AdjustablePayment premium;
//-------------------------------------------------------------------------
@ImmutableDefaults
private static void applyDefaults(Builder builder) {
builder.info = TradeInfo.empty();
}
@Override
public ResolvedFxVanillaOptionTrade resolve(ReferenceData refData) {
return ResolvedFxVanillaOptionTrade.builder()
.info(info)
.product(product.resolve(refData))
.premium(premium.resolve(refData))
.build();
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code FxVanillaOptionTrade}.
* @return the meta-bean, not null
*/
public static FxVanillaOptionTrade.Meta meta() {
return FxVanillaOptionTrade.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(FxVanillaOptionTrade.Meta.INSTANCE);
}
/**
* The serialization version id.
*/
private static final long serialVersionUID = 1L;
/**
* Returns a builder used to create an instance of the bean.
* @return the builder, not null
*/
public static FxVanillaOptionTrade.Builder builder() {
return new FxVanillaOptionTrade.Builder();
}
private FxVanillaOptionTrade(
TradeInfo info,
FxVanillaOption product,
AdjustablePayment premium) {
JodaBeanUtils.notNull(product, "product");
JodaBeanUtils.notNull(premium, "premium");
this.info = info;
this.product = product;
this.premium = premium;
}
@Override
public FxVanillaOptionTrade.Meta metaBean() {
return FxVanillaOptionTrade.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
/**
* Gets the additional trade information, defaulted to an empty instance.
* <p>
* This allows additional information to be attached to the trade.
* @return the value of the property
*/
@Override
public TradeInfo getInfo() {
return info;
}
//-----------------------------------------------------------------------
/**
* Gets the FX option product that was agreed when the trade occurred.
* <p>
* The product captures the contracted financial details of the trade.
* @return the value of the property, not null
*/
@Override
public FxVanillaOption getProduct() {
return product;
}
//-----------------------------------------------------------------------
/**
* Gets the premium of the FX option.
* <p>
* The premium sign should be compatible with the product Long/Short flag.
* This means that the premium is negative for long and positive for short.
* @return the value of the property, not null
*/
public AdjustablePayment getPremium() {
return premium;
}
//-----------------------------------------------------------------------
/**
* Returns a builder that allows this bean to be mutated.
* @return the mutable builder, not null
*/
public Builder toBuilder() {
return new Builder(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
FxVanillaOptionTrade other = (FxVanillaOptionTrade) obj;
return JodaBeanUtils.equal(info, other.info) &&
JodaBeanUtils.equal(product, other.product) &&
JodaBeanUtils.equal(premium, other.premium);
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(info);
hash = hash * 31 + JodaBeanUtils.hashCode(product);
hash = hash * 31 + JodaBeanUtils.hashCode(premium);
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(128);
buf.append("FxVanillaOptionTrade{");
buf.append("info").append('=').append(info).append(',').append(' ');
buf.append("product").append('=').append(product).append(',').append(' ');
buf.append("premium").append('=').append(JodaBeanUtils.toString(premium));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code FxVanillaOptionTrade}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code info} property.
*/
private final MetaProperty<TradeInfo> info = DirectMetaProperty.ofImmutable(
this, "info", FxVanillaOptionTrade.class, TradeInfo.class);
/**
* The meta-property for the {@code product} property.
*/
private final MetaProperty<FxVanillaOption> product = DirectMetaProperty.ofImmutable(
this, "product", FxVanillaOptionTrade.class, FxVanillaOption.class);
/**
* The meta-property for the {@code premium} property.
*/
private final MetaProperty<AdjustablePayment> premium = DirectMetaProperty.ofImmutable(
this, "premium", FxVanillaOptionTrade.class, AdjustablePayment.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"info",
"product",
"premium");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 3237038: // info
return info;
case -309474065: // product
return product;
case -318452137: // premium
return premium;
}
return super.metaPropertyGet(propertyName);
}
@Override
public FxVanillaOptionTrade.Builder builder() {
return new FxVanillaOptionTrade.Builder();
}
@Override
public Class<? extends FxVanillaOptionTrade> beanType() {
return FxVanillaOptionTrade.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code info} property.
* @return the meta-property, not null
*/
public MetaProperty<TradeInfo> info() {
return info;
}
/**
* The meta-property for the {@code product} property.
* @return the meta-property, not null
*/
public MetaProperty<FxVanillaOption> product() {
return product;
}
/**
* The meta-property for the {@code premium} property.
* @return the meta-property, not null
*/
public MetaProperty<AdjustablePayment> premium() {
return premium;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 3237038: // info
return ((FxVanillaOptionTrade) bean).getInfo();
case -309474065: // product
return ((FxVanillaOptionTrade) bean).getProduct();
case -318452137: // premium
return ((FxVanillaOptionTrade) bean).getPremium();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code FxVanillaOptionTrade}.
*/
public static final class Builder extends DirectFieldsBeanBuilder<FxVanillaOptionTrade> {
private TradeInfo info;
private FxVanillaOption product;
private AdjustablePayment premium;
/**
* Restricted constructor.
*/
private Builder() {
applyDefaults(this);
}
/**
* Restricted copy constructor.
* @param beanToCopy the bean to copy from, not null
*/
private Builder(FxVanillaOptionTrade beanToCopy) {
this.info = beanToCopy.getInfo();
this.product = beanToCopy.getProduct();
this.premium = beanToCopy.getPremium();
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 3237038: // info
return info;
case -309474065: // product
return product;
case -318452137: // premium
return premium;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 3237038: // info
this.info = (TradeInfo) newValue;
break;
case -309474065: // product
this.product = (FxVanillaOption) newValue;
break;
case -318452137: // premium
this.premium = (AdjustablePayment) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public Builder setString(String propertyName, String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public Builder setString(MetaProperty<?> property, String value) {
super.setString(property, value);
return this;
}
@Override
public Builder setAll(Map<String, ? extends Object> propertyValueMap) {
super.setAll(propertyValueMap);
return this;
}
@Override
public FxVanillaOptionTrade build() {
return new FxVanillaOptionTrade(
info,
product,
premium);
}
//-----------------------------------------------------------------------
/**
* Sets the additional trade information, defaulted to an empty instance.
* <p>
* This allows additional information to be attached to the trade.
* @param info the new value
* @return this, for chaining, not null
*/
public Builder info(TradeInfo info) {
this.info = info;
return this;
}
/**
* Sets the FX option product that was agreed when the trade occurred.
* <p>
* The product captures the contracted financial details of the trade.
* @param product the new value, not null
* @return this, for chaining, not null
*/
public Builder product(FxVanillaOption product) {
JodaBeanUtils.notNull(product, "product");
this.product = product;
return this;
}
/**
* Sets the premium of the FX option.
* <p>
* The premium sign should be compatible with the product Long/Short flag.
* This means that the premium is negative for long and positive for short.
* @param premium the new value, not null
* @return this, for chaining, not null
*/
public Builder premium(AdjustablePayment premium) {
JodaBeanUtils.notNull(premium, "premium");
this.premium = premium;
return this;
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(128);
buf.append("FxVanillaOptionTrade.Builder{");
buf.append("info").append('=').append(JodaBeanUtils.toString(info)).append(',').append(' ');
buf.append("product").append('=').append(JodaBeanUtils.toString(product)).append(',').append(' ');
buf.append("premium").append('=').append(JodaBeanUtils.toString(premium));
buf.append('}');
return buf.toString();
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
package hex.tree;
import hex.Distribution;
import water.MRTask;
import water.H2O.H2OCountedCompleter;
import water.fvec.C0DChunk;
import water.fvec.Chunk;
import water.util.AtomicUtils;
/** Score and Build Histogram
*
* <p>Fuse 2 conceptual passes into one:
*
* <dl>
*
* <dt>Pass 1:</dt><dd>Score a prior partially-built tree model, and make new Node assignments to
* every row. This involves pulling out the current assigned DecidedNode,
* "scoring" the row against that Node's decision criteria, and assigning the
* row to a new child UndecidedNode (and giving it an improved prediction).</dd>
*
* <dt>Pass 2:</dt><dd>Build new summary DHistograms on the new child UndecidedNodes
* every row got assigned into. Collect counts, mean, variance, min,
* max per bin, per column.</dd>
* </dl>
*
* <p>The result is a set of DHistogram arrays; one DHistogram array for each
* unique 'leaf' in the tree being histogramed in parallel. These have node
* ID's (nids) from 'leaf' to 'tree._len'. Each DHistogram array is for all
* the columns in that 'leaf'.
*
* <p>The other result is a prediction "score" for the whole dataset, based on
* the previous passes' DHistograms.
*/
public class ScoreBuildHistogram extends MRTask<ScoreBuildHistogram> {
final int _k; // Which tree
final int _ncols;// Active feature columns
final int _nbins;// Numerical columns: Number of bins in each histogram
final int _nbins_cats;// Categorical columns: Number of bins in each histogram
final DTree _tree; // Read-only, shared (except at the histograms in the Nodes)
final int _leaf; // Number of active leaves (per tree)
// Histograms for every tree, split & active column
final DHistogram _hcs[/*tree-relative node-id*/][/*column*/];
final boolean _subset; // True if working a subset of cols
final Distribution.Family _family;
public ScoreBuildHistogram(H2OCountedCompleter cc, int k, int ncols, int nbins, int nbins_cats, DTree tree, int leaf, DHistogram hcs[][], boolean subset, Distribution.Family family) {
super(cc);
_k = k;
_ncols= ncols;
_nbins= nbins;
_nbins_cats= nbins_cats;
_tree = tree;
_leaf = leaf;
_hcs = hcs;
_subset = subset;
_modifiesInputs = true;
_family = family;
}
/** Marker for already decided row. */
static public final int DECIDED_ROW = -1;
/** Marker for sampled out rows */
static public final int OUT_OF_BAG = -2;
static public boolean isOOBRow(int nid) { return nid <= OUT_OF_BAG; }
static public boolean isDecidedRow(int nid) { return nid == DECIDED_ROW; }
static public int oob2Nid(int oobNid) { return -oobNid + OUT_OF_BAG; }
static public int nid2Oob(int nid) { return -nid + OUT_OF_BAG; }
// Once-per-node shared init
@Override public void setupLocal( ) {
// Init all the internal tree fields after shipping over the wire
_tree.init_tree();
// Allocate local shared memory histograms
for( int l=_leaf; l<_tree._len; l++ ) {
DTree.UndecidedNode udn = _tree.undecided(l);
DHistogram hs[] = _hcs[l-_leaf];
int sCols[] = udn._scoreCols;
if( sCols != null ) { // Sub-selecting just some columns?
for( int col : sCols ) // For tracked cols
hs[col].init();
} else { // Else all columns
for( int j=0; j<_ncols; j++) // For all columns
if( hs[j] != null ) // Tracking this column?
hs[j].init();
}
}
}
@Override public void map( Chunk[] chks ) {
final Chunk wrks = chks[_ncols+2];
final Chunk nids = chks[_ncols+3];
final Chunk weight = chks.length >= _ncols+5 ? chks[_ncols+4] : new C0DChunk(1, chks[0].len());
// Pass 1: Score a prior partially-built tree model, and make new Node
// assignments to every row. This involves pulling out the current
// assigned DecidedNode, "scoring" the row against that Node's decision
// criteria, and assigning the row to a new child UndecidedNode (and
// giving it an improved prediction).
int nnids[] = new int[nids._len];
if( _leaf > 0) // Prior pass exists?
score_decide(chks,nids,nnids);
else // Just flag all the NA rows
for( int row=0; row<nids._len; row++ )
if( isDecidedRow((int)nids.atd(row)) ) nnids[row] = -1;
// Pass 2: accumulate all rows, cols into histograms
if( _subset ) accum_subset(chks,wrks,weight,nnids);
else accum_all (chks,wrks,weight,nnids);
}
@Override public void reduce( ScoreBuildHistogram sbh ) {
// Merge histograms
if( sbh._hcs == _hcs ) return; // Local histograms all shared; free to merge
// Distributed histograms need a little work
for( int i=0; i<_hcs.length; i++ ) {
DHistogram hs1[] = _hcs[i], hs2[] = sbh._hcs[i];
if( hs1 == null ) _hcs[i] = hs2;
else if( hs2 != null )
for( int j=0; j<hs1.length; j++ )
if( hs1[j] == null ) hs1[j] = hs2[j];
else if( hs2[j] != null )
hs1[j].add(hs2[j]);
}
}
// Pass 1: Score a prior partially-built tree model, and make new Node
// assignments to every row. This involves pulling out the current
// assigned DecidedNode, "scoring" the row against that Node's decision
// criteria, and assigning the row to a new child UndecidedNode (and
// giving it an improved prediction).
private void score_decide(Chunk chks[], Chunk nids, int nnids[]) {
for( int row=0; row<nids._len; row++ ) { // Over all rows
int nid = (int)nids.at8(row); // Get Node to decide from
if( isDecidedRow(nid)) { // already done
nnids[row] = nid-_leaf; // will be negative, flagging a completed row
continue;
}
// Score row against current decisions & assign new split
boolean oob = isOOBRow(nid);
if( oob ) nid = oob2Nid(nid); // sampled away - we track the position in the tree
DTree.DecidedNode dn = _tree.decided(nid);
if( dn._split._col == -1 ) { // Might have a leftover non-split
if( DTree.isRootNode(dn) ) { nnids[row] = nid-_leaf; continue; }
nid = dn._pid; // Use the parent split decision then
int xnid = oob ? nid2Oob(nid) : nid;
nids.set(row, xnid);
nnids[row] = xnid-_leaf;
dn = _tree.decided(nid); // Parent steers us
}
assert !isDecidedRow(nid);
nid = dn.ns(chks,row); // Move down the tree 1 level
if( !isDecidedRow(nid) ) {
if( oob ) nid = nid2Oob(nid); // Re-apply OOB encoding
nids.set(row, nid);
}
nnids[row] = nid-_leaf;
}
}
// All rows, some cols, accumulate histograms
private void accum_subset(Chunk chks[], Chunk wrks, Chunk weight, int nnids[]) {
for( int row=0; row<nnids.length; row++ ) { // Over all rows
int nid = nnids[row]; // Get Node to decide from
if( nid >= 0 ) { // row already predicts perfectly or OOB
assert !Double.isNaN(wrks.atd(row)); // Already marked as sampled-away
DHistogram nhs[] = _hcs[nid];
int sCols[] = _tree.undecided(nid+_leaf)._scoreCols; // Columns to score (null, or a list of selected cols)
//FIXME/TODO: sum into local variables, do atomic increment once at the end, similar to accum_all
for( int col : sCols ) { // For tracked cols
double w = weight.atd(row);
if (w == 0) continue;
nhs[col].incr((float) chks[col].atd(row), wrks.atd(row), w); // Histogram row/col
}
}
}
}
// All rows, all cols, accumulate histograms. This is the hot hot inner
// loop of GBM, so we do some non-standard optimizations. The rows in this
// chunk are spread out amongst a modest set of NodeIDs/splits. Normally
// we would visit the rows in row-order, but this visits the NIDs in random
// order. The hot-part of this code updates the histograms racily (via
// atomic updates) - once-per-row. This optimized version updates the
// histograms once-per-NID, but requires pre-sorting the rows by NID.
private void accum_all(Chunk chks[], Chunk wrks, Chunk weight, int nnids[]) {
// Sort the rows by NID, so we visit all the same NIDs in a row
// Find the count of unique NIDs in this chunk
int nh[] = new int[_hcs.length+1];
for( int i : nnids ) if( i >= 0 ) nh[i+1]++;
// Rollup the histogram of rows-per-NID in this chunk
for( int i=0; i<_hcs.length; i++ ) nh[i+1] += nh[i];
// Splat the rows into NID-groups
int rows[] = new int[nnids.length];
for( int row=0; row<nnids.length; row++ )
if( nnids[row] >= 0 )
rows[nh[nnids[row]]++] = row;
// rows[] has Chunk-local ROW-numbers now, in-order, grouped by NID.
// nh[] lists the start of each new NID, and is indexed by NID+1.
accum_all2(chks,wrks,weight,nh,rows);
}
// For all columns, for all NIDs, for all ROWS...
private void accum_all2(Chunk chks[], Chunk wrks, Chunk weight, int nh[], int[] rows) {
final DHistogram hcs[][] = _hcs;
if( hcs.length==0 ) return; // Unlikely fast cutout
// Local temp arrays, no atomic updates.
double bins[] = new double[Math.max(_nbins, _nbins_cats)];
double sums[] = new double[Math.max(_nbins, _nbins_cats)];
double ssqs[] = new double[Math.max(_nbins, _nbins_cats)];
// For All Columns
for( int c=0; c<_ncols; c++) { // for all columns
Chunk chk = chks[c];
// For All NIDs
for( int n=0; n<hcs.length; n++ ) {
final DRealHistogram rh = ((DRealHistogram)hcs[n][c]);
if( rh==null ) continue; // Ignore untracked columns in this split
final int lo = n==0 ? 0 : nh[n-1];
final int hi = nh[n];
float min = rh._min2;
float max = rh._maxIn;
// While most of the time we are limited to nbins, we allow more bins
// in a few cases (top-level splits have few total bins across all
// the (few) splits) so it's safe to bin more; also categoricals want
// to split one bin-per-level no matter how many levels).
if( rh._bins.length >= bins.length ) { // Grow bins if needed
bins = new double[rh._bins.length];
sums = new double[rh._bins.length];
ssqs = new double[rh._bins.length];
}
// Gather all the data for this set of rows, for 1 column and 1 split/NID
// Gather min/max, sums and sum-squares.
for( int xrow=lo; xrow<hi; xrow++ ) {
int row = rows[xrow];
double w = weight.atd(row);
if (w == 0) continue;
float col_data = (float)chk.atd(row);
if( col_data < min ) min = col_data;
if( col_data > max ) max = col_data;
int b = rh.bin(col_data); // Compute bin# via linear interpolation
double resp = wrks.atd(row);
bins[b] += w; // Bump count in bin
sums[b] += w*resp;
ssqs[b] += w*resp*resp;
}
// Add all the data into the Histogram (atomically add)
rh.setMin(min); // Track actual lower/upper bound per-bin
rh.setMax(max);
for( int b=0; b<rh._bins.length; b++ ) { // Bump counts in bins
if( bins[b] != 0 ) { AtomicUtils.DoubleArray.add(rh._bins,b,bins[b]); bins[b]=0; }
if( sums[b] != 0 ) { rh.incr1(b,sums[b],ssqs[b]); sums[b]=ssqs[b]=0; }
}
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gateway;
import com.carrotsearch.hppc.ObjectLongHashMap;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.*;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.StoreFileMetaData;
import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
/**
*
*/
public class GatewayAllocator extends AbstractComponent {
public static final String INDEX_RECOVERY_INITIAL_SHARDS = "index.recovery.initial_shards";
private final String initialShards;
private final TransportNodesListGatewayStartedShards startedAction;
private final TransportNodesListShardStoreMetaData storeAction;
private RoutingService routingService;
private final ConcurrentMap<ShardId, AsyncShardFetch<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards>> asyncFetchStarted = ConcurrentCollections.newConcurrentMap();
private final ConcurrentMap<ShardId, AsyncShardFetch<TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData>> asyncFetchStore = ConcurrentCollections.newConcurrentMap();
@Inject
public GatewayAllocator(Settings settings, TransportNodesListGatewayStartedShards startedAction, TransportNodesListShardStoreMetaData storeAction) {
super(settings);
this.startedAction = startedAction;
this.storeAction = storeAction;
this.initialShards = settings.get("gateway.initial_shards", settings.get("gateway.local.initial_shards", "quorum"));
logger.debug("using initial_shards [{}]", initialShards);
}
public void setReallocation(final ClusterService clusterService, final RoutingService routingService) {
this.routingService = routingService;
clusterService.add(new ClusterStateListener() {
@Override
public void clusterChanged(ClusterChangedEvent event) {
boolean cleanCache = false;
DiscoveryNode localNode = event.state().nodes().localNode();
if (localNode != null) {
if (localNode.masterNode() == true && event.localNodeMaster() == false) {
cleanCache = true;
}
} else {
cleanCache = true;
}
if (cleanCache) {
Releasables.close(asyncFetchStarted.values());
asyncFetchStarted.clear();
Releasables.close(asyncFetchStore.values());
asyncFetchStore.clear();
}
}
});
}
public int getNumberOfInFlightFetch() {
int count = 0;
for (AsyncShardFetch<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> fetch : asyncFetchStarted.values()) {
count += fetch.getNumberOfInFlightFetches();
}
for (AsyncShardFetch<TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> fetch : asyncFetchStore.values()) {
count += fetch.getNumberOfInFlightFetches();
}
return count;
}
public void applyStartedShards(StartedRerouteAllocation allocation) {
for (ShardRouting shard : allocation.startedShards()) {
Releasables.close(asyncFetchStarted.remove(shard.shardId()));
Releasables.close(asyncFetchStore.remove(shard.shardId()));
}
}
public void applyFailedShards(FailedRerouteAllocation allocation) {
for (FailedRerouteAllocation.FailedShard shard : allocation.failedShards()) {
Releasables.close(asyncFetchStarted.remove(shard.shard.shardId()));
Releasables.close(asyncFetchStore.remove(shard.shard.shardId()));
}
}
/**
* Return {@code true} if the index is configured to allow shards to be
* recovered on any node
*/
private boolean recoverOnAnyNode(@IndexSettings Settings idxSettings) {
return IndexMetaData.isOnSharedFilesystem(idxSettings) &&
idxSettings.getAsBoolean(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false);
}
public boolean allocateUnassigned(RoutingAllocation allocation) {
boolean changed = false;
DiscoveryNodes nodes = allocation.nodes();
RoutingNodes routingNodes = allocation.routingNodes();
// First, handle primaries, they must find a place to be allocated on here
final MetaData metaData = routingNodes.metaData();
RoutingNodes.UnassignedShards unassigned = routingNodes.unassigned();
unassigned.sort(new PriorityComparator() {
@Override
protected Settings getIndexSettings(String index) {
IndexMetaData indexMetaData = metaData.index(index);
return indexMetaData.getSettings();
}
}); // sort for priority ordering
Iterator<ShardRouting> unassignedIterator = unassigned.iterator();
while (unassignedIterator.hasNext()) {
ShardRouting shard = unassignedIterator.next();
if (!shard.primary()) {
continue;
}
// this is an API allocation, ignore since we know there is no data...
if (!routingNodes.routingTable().index(shard.index()).shard(shard.id()).primaryAllocatedPostApi()) {
continue;
}
AsyncShardFetch<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> fetch = asyncFetchStarted.get(shard.shardId());
if (fetch == null) {
fetch = new InternalAsyncFetch<>(logger, "shard_started", shard.shardId(), startedAction);
asyncFetchStarted.put(shard.shardId(), fetch);
}
AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> shardState = fetch.fetchData(nodes, metaData, allocation.getIgnoreNodes(shard.shardId()));
if (shardState.hasData() == false) {
logger.trace("{}: ignoring allocation, still fetching shard started state", shard);
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
continue;
}
shardState.processAllocation(allocation);
IndexMetaData indexMetaData = metaData.index(shard.getIndex());
/**
* Build a map of DiscoveryNodes to shard state number for the given shard.
* A state of -1 means the shard does not exist on the node, where any
* shard state >= 0 is the state version of the shard on that node's disk.
*
* A shard on shared storage will return at least shard state 0 for all
* nodes, indicating that the shard can be allocated to any node.
*/
ObjectLongHashMap<DiscoveryNode> nodesState = new ObjectLongHashMap<>();
for (TransportNodesListGatewayStartedShards.NodeGatewayStartedShards nodeShardState : shardState.getData().values()) {
long version = nodeShardState.version();
// -1 version means it does not exists, which is what the API returns, and what we expect to
logger.trace("[{}] on node [{}] has version [{}] of shard", shard, nodeShardState.getNode(), version);
nodesState.put(nodeShardState.getNode(), version);
}
int numberOfAllocationsFound = 0;
long highestVersion = -1;
final Map<DiscoveryNode, Long> nodesWithVersion = Maps.newHashMap();
assert !nodesState.containsKey(null);
final Object[] keys = nodesState.keys;
final long[] values = nodesState.values;
Settings idxSettings = indexMetaData.settings();
for (int i = 0; i < keys.length; i++) {
if (keys[i] == null) {
continue;
}
DiscoveryNode node = (DiscoveryNode) keys[i];
long version = values[i];
// since we don't check in NO allocation, we need to double check here
if (allocation.shouldIgnoreShardForNode(shard.shardId(), node.id())) {
continue;
}
if (recoverOnAnyNode(idxSettings)) {
numberOfAllocationsFound++;
if (version > highestVersion) {
highestVersion = version;
}
// We always put the node without clearing the map
nodesWithVersion.put(node, version);
} else if (version != -1) {
numberOfAllocationsFound++;
// If we've found a new "best" candidate, clear the
// current candidates and add it
if (version > highestVersion) {
highestVersion = version;
nodesWithVersion.clear();
nodesWithVersion.put(node, version);
} else if (version == highestVersion) {
// If the candidate is the same, add it to the
// list, but keep the current candidate
nodesWithVersion.put(node, version);
}
}
}
// Now that we have a map of nodes to versions along with the
// number of allocations found (and not ignored), we need to sort
// it so the node with the highest version is at the beginning
List<DiscoveryNode> nodesWithHighestVersion = Lists.newArrayList();
nodesWithHighestVersion.addAll(nodesWithVersion.keySet());
CollectionUtil.timSort(nodesWithHighestVersion, new Comparator<DiscoveryNode>() {
@Override
public int compare(DiscoveryNode o1, DiscoveryNode o2) {
return Long.compare(nodesWithVersion.get(o2), nodesWithVersion.get(o1));
}
});
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}] found {} allocations of {}, highest version: [{}]",
shard.index(), shard.id(), numberOfAllocationsFound, shard, highestVersion);
}
if (logger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder("[");
for (DiscoveryNode n : nodesWithHighestVersion) {
sb.append("[");
sb.append(n.getName());
sb.append("]");
sb.append(" -> ");
sb.append(nodesWithVersion.get(n));
sb.append(", ");
}
sb.append("]");
logger.trace("{} candidates for allocation: {}", shard, sb.toString());
}
// check if the counts meets the minimum set
int requiredAllocation = 1;
// if we restore from a repository one copy is more then enough
if (shard.restoreSource() == null) {
try {
String initialShards = indexMetaData.settings().get(INDEX_RECOVERY_INITIAL_SHARDS, settings.get(INDEX_RECOVERY_INITIAL_SHARDS, this.initialShards));
if ("quorum".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 1) {
requiredAllocation = ((1 + indexMetaData.numberOfReplicas()) / 2) + 1;
}
} else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 2) {
requiredAllocation = ((1 + indexMetaData.numberOfReplicas()) / 2);
}
} else if ("one".equals(initialShards)) {
requiredAllocation = 1;
} else if ("full".equals(initialShards) || "all".equals(initialShards)) {
requiredAllocation = indexMetaData.numberOfReplicas() + 1;
} else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 1) {
requiredAllocation = indexMetaData.numberOfReplicas();
}
} else {
requiredAllocation = Integer.parseInt(initialShards);
}
} catch (Exception e) {
logger.warn("[{}][{}] failed to derived initial_shards from value {}, ignore allocation for {}", shard.index(), shard.id(), initialShards, shard);
}
}
// not enough found for this shard, continue...
if (numberOfAllocationsFound < requiredAllocation) {
// if we are restoring this shard we still can allocate
if (shard.restoreSource() == null) {
// we can't really allocate, so ignore it and continue
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: not allocating, number_of_allocated_shards_found [{}], required_number [{}]", shard.index(), shard.id(), numberOfAllocationsFound, requiredAllocation);
}
} else if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: missing local data, will restore from [{}]", shard.index(), shard.id(), shard.restoreSource());
}
continue;
}
Set<DiscoveryNode> throttledNodes = Sets.newHashSet();
Set<DiscoveryNode> noNodes = Sets.newHashSet();
for (DiscoveryNode discoNode : nodesWithHighestVersion) {
RoutingNode node = routingNodes.node(discoNode.id());
if (node == null) {
continue;
}
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.THROTTLE) {
throttledNodes.add(discoNode);
} else if (decision.type() == Decision.Type.NO) {
noNodes.add(discoNode);
} else {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, discoNode);
}
// we found a match
changed = true;
// make sure we create one with the version from the recovered state
routingNodes.initialize(new ShardRouting(shard, highestVersion), node.nodeId());
unassignedIterator.remove();
// found a node, so no throttling, no "no", and break out of the loop
throttledNodes.clear();
noNodes.clear();
break;
}
}
if (throttledNodes.isEmpty()) {
// if we have a node that we "can't" allocate to, force allocation, since this is our master data!
if (!noNodes.isEmpty()) {
DiscoveryNode discoNode = noNodes.iterator().next();
RoutingNode node = routingNodes.node(discoNode.id());
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: forcing allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, discoNode);
}
// we found a match
changed = true;
// make sure we create one with the version from the recovered state
routingNodes.initialize(new ShardRouting(shard, highestVersion), node.nodeId());
unassignedIterator.remove();
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: throttling allocation [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, throttledNodes);
}
// we are throttling this, but we have enough to allocate to this node, ignore it for now
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
}
}
if (!routingNodes.hasUnassigned()) {
return changed;
}
// Now, handle replicas, try to assign them to nodes that are similar to the one the primary was allocated on
unassignedIterator = unassigned.iterator();
while (unassignedIterator.hasNext()) {
ShardRouting shard = unassignedIterator.next();
if (shard.primary()) {
continue;
}
// pre-check if it can be allocated to any node that currently exists, so we won't list the store for it for nothing
boolean canBeAllocatedToAtLeastOneNode = false;
for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) {
RoutingNode node = routingNodes.node(cursor.value.id());
if (node == null) {
continue;
}
// if we can't allocate it on a node, ignore it, for example, this handles
// cases for only allocating a replica after a primary
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.YES) {
canBeAllocatedToAtLeastOneNode = true;
break;
}
}
if (!canBeAllocatedToAtLeastOneNode) {
logger.trace("{}: ignoring allocation, can't be allocated on any node", shard);
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
continue;
}
AsyncShardFetch<TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> fetch = asyncFetchStore.get(shard.shardId());
if (fetch == null) {
fetch = new InternalAsyncFetch<>(logger, "shard_store", shard.shardId(), storeAction);
asyncFetchStore.put(shard.shardId(), fetch);
}
AsyncShardFetch.FetchResult<TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> shardStores = fetch.fetchData(nodes, metaData, allocation.getIgnoreNodes(shard.shardId()));
if (shardStores.hasData() == false) {
logger.trace("{}: ignoring allocation, still fetching shard stores", shard);
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
continue; // still fetching
}
shardStores.processAllocation(allocation);
long lastSizeMatched = 0;
DiscoveryNode lastDiscoNodeMatched = null;
RoutingNode lastNodeMatched = null;
boolean hasReplicaData = false;
IndexMetaData indexMetaData = metaData.index(shard.getIndex());
for (Map.Entry<DiscoveryNode, TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> nodeStoreEntry : shardStores.getData().entrySet()) {
DiscoveryNode discoNode = nodeStoreEntry.getKey();
TransportNodesListShardStoreMetaData.StoreFilesMetaData storeFilesMetaData = nodeStoreEntry.getValue().storeFilesMetaData();
logger.trace("{}: checking node [{}]", shard, discoNode);
if (storeFilesMetaData == null) {
// already allocated on that node...
continue;
}
RoutingNode node = routingNodes.node(discoNode.id());
if (node == null) {
continue;
}
// check if we can allocate on that node...
// we only check for NO, since if this node is THROTTLING and it has enough "same data"
// then we will try and assign it next time
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.NO) {
continue;
}
// if it is already allocated, we can't assign to it...
if (storeFilesMetaData.allocated()) {
continue;
}
if (!shard.primary()) {
hasReplicaData |= storeFilesMetaData.iterator().hasNext();
ShardRouting primaryShard = routingNodes.activePrimary(shard);
if (primaryShard != null) {
assert primaryShard.active();
DiscoveryNode primaryNode = nodes.get(primaryShard.currentNodeId());
if (primaryNode != null) {
TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData primaryNodeFilesStore = shardStores.getData().get(primaryNode);
if (primaryNodeFilesStore != null) {
TransportNodesListShardStoreMetaData.StoreFilesMetaData primaryNodeStore = primaryNodeFilesStore.storeFilesMetaData();
if (primaryNodeStore != null && primaryNodeStore.allocated()) {
long sizeMatched = 0;
String primarySyncId = primaryNodeStore.syncId();
String replicaSyncId = storeFilesMetaData.syncId();
// see if we have a sync id we can make use of
if (replicaSyncId != null && replicaSyncId.equals(primarySyncId)) {
logger.trace("{}: node [{}] has same sync id {} as primary", shard, discoNode.name(), replicaSyncId);
lastNodeMatched = node;
lastSizeMatched = Long.MAX_VALUE;
lastDiscoNodeMatched = discoNode;
} else {
for (StoreFileMetaData storeFileMetaData : storeFilesMetaData) {
String metaDataFileName = storeFileMetaData.name();
if (primaryNodeStore.fileExists(metaDataFileName) && primaryNodeStore.file(metaDataFileName).isSame(storeFileMetaData)) {
sizeMatched += storeFileMetaData.length();
}
}
logger.trace("{}: node [{}] has [{}/{}] bytes of re-usable data",
shard, discoNode.name(), new ByteSizeValue(sizeMatched), sizeMatched);
if (sizeMatched > lastSizeMatched) {
lastSizeMatched = sizeMatched;
lastDiscoNodeMatched = discoNode;
lastNodeMatched = node;
}
}
}
}
}
}
}
}
if (lastNodeMatched != null) {
// we only check on THROTTLE since we checked before before on NO
Decision decision = allocation.deciders().canAllocate(shard, lastNodeMatched, allocation);
if (decision.type() == Decision.Type.THROTTLE) {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: throttling allocation [{}] to [{}] in order to reuse its unallocated persistent store with total_size [{}]", shard.index(), shard.id(), shard, lastDiscoNodeMatched, new ByteSizeValue(lastSizeMatched));
}
// we are throttling this, but we have enough to allocate to this node, ignore it for now
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
} else {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: allocating [{}] to [{}] in order to reuse its unallocated persistent store with total_size [{}]", shard.index(), shard.id(), shard, lastDiscoNodeMatched, new ByteSizeValue(lastSizeMatched));
}
// we found a match
changed = true;
routingNodes.initialize(shard, lastNodeMatched.nodeId());
unassignedIterator.remove();
}
} else if (hasReplicaData == false) {
// if we didn't manage to find *any* data (regardless of matching sizes), check if the allocation
// of the replica shard needs to be delayed, and if so, add it to the ignore unassigned list
// note: we only care about replica in delayed allocation, since if we have an unassigned primary it
// will anyhow wait to find an existing copy of the shard to be allocated
// note: the other side of the equation is scheduling a reroute in a timely manner, which happens in the RoutingService
long delay = shard.unassignedInfo().getDelayAllocationExpirationIn(settings, indexMetaData.getSettings());
if (delay > 0) {
logger.debug("[{}][{}]: delaying allocation of [{}] for [{}]", shard.index(), shard.id(), shard, TimeValue.timeValueMillis(delay));
/**
* mark it as changed, since we want to kick a publishing to schedule future allocation,
* see {@link org.elasticsearch.cluster.routing.RoutingService#clusterChanged(ClusterChangedEvent)}).
*/
changed = true;
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
}
}
}
return changed;
}
class InternalAsyncFetch<T extends BaseNodeResponse> extends AsyncShardFetch<T> {
public InternalAsyncFetch(ESLogger logger, String type, ShardId shardId, List<? extends BaseNodesResponse<T>, T> action) {
super(logger, type, shardId, action);
}
@Override
protected void reroute(ShardId shardId, String reason) {
logger.trace("{} scheduling reroute for {}", shardId, reason);
routingService.reroute("async_shard_fetch");
}
}
}
| |
package com.kodcu.other;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.fop.apps.FopFactory;
import org.joox.JOOX;
import org.joox.Match;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import javax.imageio.ImageIO;
import javax.json.JsonReader;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import java.awt.image.BufferedImage;
import java.io.*;
import java.net.MalformedURLException;
import java.nio.charset.Charset;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.function.BiPredicate;
import java.util.stream.Stream;
/**
* Created by usta on 09.05.2014.
*/
public class IOHelper {
private static final Logger logger = LoggerFactory.getLogger(IOHelper.class);
public static void writeToFile(File file, String content, StandardOpenOption... openOption) {
writeToFile(file.toPath(), content, openOption);
}
public static Optional<IOException> writeToFile(Path path, String content, StandardOpenOption... openOption) {
try {
Files.write(path, content.getBytes(Charset.forName("UTF-8")), openOption);
} catch (IOException e) {
logger.error("Problem occured while writing to {}", path, e);
return Optional.of(e);
}
return Optional.empty();
}
public static void writeToFile(Path path, byte[] content, StandardOpenOption... openOption) {
try {
Files.write(path, content, openOption);
} catch (IOException e) {
logger.error("Problem occured while writing {}", path, e);
}
}
public static String readFile(InputStream inputStream) {
String content = "";
try {
content = IOUtils.toString(inputStream, "UTF-8");
IOUtils.closeQuietly(inputStream);
} catch (IOException e) {
logger.error("Problem occured while reading inputstream", e);
}
return content;
}
public static String readFile(Path path) {
String content = "";
try (InputStream is = Files.newInputStream(path, StandardOpenOption.READ)) {
content = IOUtils.toString(is, "UTF-8");
} catch (IOException e) {
logger.error("Problem occured while reading file {}", path, e);
}
return content;
}
public static void createDirectories(Path path) {
try {
Files.createDirectories(path);
} catch (IOException e) {
logger.error("Problem occured while creating directories {}", path, e);
}
}
public static Path createTempFile(String suffix) {
try {
return Files.createTempFile("asciidoc-temp", suffix);
} catch (IOException e) {
logger.error("Problem occured while creating temp file", e);
}
return null;
}
public static Path createTempFile(Path path, String suffix) {
if (Objects.isNull(path)) {
return createTempFile(suffix);
}
try {
return Files.createTempFile(path, "asciidoc-temp", suffix);
} catch (IOException e) {
logger.error("Problem occured while creating temp file {}", path, e);
}
return null;
}
public static void copy(Path source, Path target, CopyOption... copyOptions) {
try {
Files.copy(source, target, copyOptions);
} catch (IOException e) {
logger.error("Problem occured while copying {} to {}", source, target, e);
}
}
public static String pathToUrl(Path path) {
try {
return path.toUri().toURL().toString();
} catch (MalformedURLException e) {
logger.error("Problem occured while getting URL of {}", path, e);
}
return null;
}
public static Stream<Path> list(Path path) {
try {
return Files.list(path);
} catch (IOException e) {
logger.error("Problem occured while listing {}", path, e);
}
return Stream.empty();
}
public static void imageWrite(BufferedImage bufferedImage, String format, File output) {
try {
ImageIO.write(bufferedImage, format, output);
} catch (IOException e) {
logger.error("Problem occured while writing buff image to {}", output, e);
}
}
public static byte[] readAllBytes(Path path) {
try {
return Files.readAllBytes(path);
} catch (IOException e) {
logger.error("Problem occured while reading {}", path, e);
}
return new byte[]{};
}
public static void move(Path source, Path target, StandardCopyOption... option) {
try {
Files.move(source, target, option);
} catch (IOException e) {
logger.error("Problem occured while moving {} to {}", source, target, e);
}
}
public static Match $(InputSource inputSource) {
try {
return JOOX.$(inputSource);
} catch (SAXException | IOException e) {
logger.error("Problem occured while selecting Match", e);
}
return null;
}
public static Match $(File file) {
try {
return JOOX.$(file);
} catch (SAXException | IOException e) {
logger.error("Problem occured while selecting Match for {}", file, e);
}
return JOOX.$();
}
public static void transform(Transformer transformer, StreamSource xmlSource, StreamResult streamResult) {
try {
transformer.transform(xmlSource, streamResult);
} catch (TransformerException e) {
logger.error("Problem occured while transforming XML Source to Stream result", e);
}
}
public static void matchWrite(Match root, File file) {
try {
root.write(file);
} catch (IOException e) {
logger.error("Problem occured while writing XML Match to {}", file, e);
}
}
public static void copyDirectoryToDirectory(File source, File target) {
try {
FileUtils.copyDirectoryToDirectory(source, target);
} catch (IOException e) {
logger.error("Problem occured while copying {} to {}", source, target, e);
}
}
public static void setUserConfig(FopFactory fopFactory, String configUri) {
try {
fopFactory.setUserConfig(configUri);
} catch (SAXException | IOException e) {
logger.error("Problem occured while setting {} as UserConfig", configUri, e);
}
}
public static void deleteIfExists(Path path) {
try {
Files.deleteIfExists(path);
} catch (IOException e) {
logger.error("Problem occured while deleting {}", path, e);
}
}
public static void copyDirectory(Path sourceDir, Path targetDir) {
try {
FileUtils.copyDirectory(sourceDir.toFile(), targetDir.toFile());
} catch (IOException e) {
logger.error("Problem occured while copying {} to {}", sourceDir, targetDir, e);
}
}
public static Stream<Path> find(Path start, int maxDepth, BiPredicate<Path, BasicFileAttributes> matcher, FileVisitOption... options) {
try {
return Files.find(start, Integer.MAX_VALUE, matcher, options);
} catch (IOException e) {
logger.error("Problem occured while finding in path {}", start, e);
}
return Stream.empty();
}
public static boolean isHidden(Path path) {
try {
return Files.isHidden(path) || path.getFileName().toString().startsWith(".");
} catch (IOException e) {
logger.error("Problem occured while detecting hidden path {}", path, e);
}
return false;
}
public static void copyFileToDirectory(File file, File directory) {
try {
FileUtils.copyFileToDirectory(file, directory);
} catch (IOException e) {
logger.error("Problem occured while copying {} to {}", file, directory, e);
}
}
public static void copyFile(File file, File dest) {
try {
FileUtils.copyFile(file, dest);
} catch (IOException e) {
logger.error("Problem occured while copying {} to {}", file, dest, e);
}
}
public static void createDirectory(Path path) {
try {
Files.createDirectory(path);
} catch (IOException e) {
logger.error("Problem occured while creating {} path", path, e);
}
}
public static void deleteDirectory(Path path) {
try {
FileUtils.deleteDirectory(path.toFile());
} catch (IOException e) {
logger.error("Problem occured while deleting {} path", path, e);
}
}
public static List<String> readAllLines(Path path) {
try {
return Files.readAllLines(path);
} catch (IOException e) {
logger.error("Problem occured while reading {} path", path, e);
}
return new ArrayList<>();
}
public static FileReader fileReader(Path path) {
try {
return new FileReader(path.toFile());
} catch (FileNotFoundException e) {
logger.error("Problem occured while creating FileReader for {} path", path, e);
}
return null;
}
public static void close(Closeable... closeables) {
for (Closeable closeable : closeables) {
try {
closeable.close();
} catch (IOException e) {
logger.error("Problem occured while closing resource");
}
}
}
}
| |
/**********************************************************************
Copyright (c) 2004 Andy Jefferson and others. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributors:
2005 Andy Jefferson - added capability to have String or long type discriminators
...
**********************************************************************/
package org.datanucleus.store.mapped.mapping;
import org.datanucleus.ClassNameConstants;
import org.datanucleus.ExecutionContext;
import org.datanucleus.metadata.ColumnMetaData;
import org.datanucleus.metadata.DiscriminatorMetaData;
import org.datanucleus.metadata.DiscriminatorStrategy;
import org.datanucleus.store.mapped.DatastoreAdapter;
import org.datanucleus.store.mapped.DatastoreContainerObject;
import org.datanucleus.store.mapped.DatastoreField;
import org.datanucleus.store.mapped.DatastoreIdentifier;
import org.datanucleus.store.mapped.IdentifierFactory;
import org.datanucleus.store.mapped.MappedStoreManager;
/**
* Mapping for a discriminator column in a table used in inheritance.
* The discriminator column is, by default, a String type, typically VARCHAR.
* It can however be "long" based if the user specifies INTEGER, BIGINT, or NUMERIC as the jdbc-type.
* In the latter case we make the necessary conversions between value types in this mapping class.
*
* This class is for internal use only. It should not be used in user mappings nor extended.
*/
public class DiscriminatorMapping extends SingleFieldMapping
{
private final JavaTypeMapping delegate;
/**
* Constructor.
* @param dba Datastore Adapter
* @param table Datastore table
* @param delegate The JavaTypeMapping to delegate storage
* @param dismd Metadata for the discriminator
*/
public DiscriminatorMapping(DatastoreAdapter dba, DatastoreContainerObject table,
JavaTypeMapping delegate, DiscriminatorMetaData dismd)
{
initialize(table.getStoreManager(), delegate.getType());
this.datastoreContainer = table;
this.delegate = delegate;
IdentifierFactory idFactory = table.getStoreManager().getIdentifierFactory();
DatastoreIdentifier id = null;
if (dismd.getColumnMetaData() == null)
{
// No column name so generate a default
id = idFactory.newDiscriminatorFieldIdentifier();
ColumnMetaData colmd = new ColumnMetaData();
colmd.setName(id.getIdentifierName());
dismd.setColumnMetaData(colmd);
}
else
{
// Column metadata defined
ColumnMetaData colmd = dismd.getColumnMetaData();
if (colmd.getName() == null)
{
// No name defined so create one and set it
id = idFactory.newDiscriminatorFieldIdentifier();
colmd.setName(id.getIdentifierName());
}
else
{
// Name defined so just generate identifier
id = idFactory.newDatastoreFieldIdentifier(colmd.getName());
}
}
DatastoreField column = table.addDatastoreField(getType(), id, this, dismd.getColumnMetaData());
table.getStoreManager().getMappingManager().createDatastoreMapping(delegate, column,
getType());
}
/**
* Accessor for the type represented here, returning the class itself
* @return This class.
*/
public Class getJavaType()
{
return DiscriminatorMapping.class;
}
/**
* Mutator for the object in this column
* @param ec ExecutionContext
* @param preparedStatement The statement
* @param exprIndex The indexes
* @param value The value to set it to
*/
public void setObject(ExecutionContext ec, Object preparedStatement, int[] exprIndex, Object value)
{
Object valueObj = value;
if (value instanceof java.lang.String)
{
if (getType().equals(ClassNameConstants.LONG) || getType().equals(ClassNameConstants.JAVA_LANG_LONG))
{
valueObj = Long.valueOf((String)value);
}
}
delegate.setObject(ec, preparedStatement, exprIndex, valueObj);
}
/**
* Accessor for the object in this column
* @param ec ExecutionContext
* @param resultSet The ResultSet to get the value from
* @param exprIndex The indexes
* @return The object
*/
public Object getObject(ExecutionContext ec, Object resultSet, int[] exprIndex)
{
Object value = delegate.getObject(ec, resultSet, exprIndex);
Object valueObj = value;
if (value instanceof java.lang.String)
{
if (getType().equals(ClassNameConstants.LONG) || getType().equals(ClassNameConstants.JAVA_LANG_LONG))
{
valueObj = Long.valueOf((String)value);
}
}
return valueObj;
}
/**
* Accessor for the number of datastore fields.
* @return Number of datastore fields
*/
public int getNumberOfDatastoreMappings()
{
return delegate.getNumberOfDatastoreMappings();
}
/**
* Accessor for a datastore mapping
* @param index Index of the mapping
* @return The datastore mapping.
*/
public DatastoreMapping getDatastoreMapping(int index)
{
return delegate.getDatastoreMapping(index);
}
/**
* Accessor for the datastore mappings for this java type.
* @return The datastore mapping(s)
*/
public DatastoreMapping[] getDatastoreMappings()
{
return delegate.getDatastoreMappings();
}
/**
* Mutator to add a datastore mapping
* @param datastoreMapping Datastore mapping
*/
public void addDatastoreMapping(DatastoreMapping datastoreMapping)
{
delegate.addDatastoreMapping(datastoreMapping);
}
/**
* Convenience method to create a discriminator mapping in the specified table, using the provided
* discriminator metadata.
* @param table The table
* @param dismd The discriminator metadata
* @return Discriminator mapping
*/
public static DiscriminatorMapping createDiscriminatorMapping(DatastoreContainerObject table,
DiscriminatorMetaData dismd)
{
MappedStoreManager storeMgr = table.getStoreManager();
MappingManager mapMgr = storeMgr.getMappingManager();
if (dismd.getStrategy() == DiscriminatorStrategy.CLASS_NAME)
{
return new DiscriminatorStringMapping(storeMgr.getDatastoreAdapter(), table,
mapMgr.getMapping(String.class), dismd);
}
else if (dismd.getStrategy() == DiscriminatorStrategy.VALUE_MAP)
{
ColumnMetaData disColmd = dismd.getColumnMetaData();
if (disColmd != null && disColmd.getJdbcType() != null)
{
if (disColmd.getJdbcType().equalsIgnoreCase("INTEGER") ||
disColmd.getJdbcType().equalsIgnoreCase("BIGINT") ||
disColmd.getJdbcType().equalsIgnoreCase("NUMERIC"))
{
return new DiscriminatorLongMapping(storeMgr.getDatastoreAdapter(), table,
mapMgr.getMapping(Long.class), dismd);
}
else
{
return new DiscriminatorStringMapping(storeMgr.getDatastoreAdapter(), table,
mapMgr.getMapping(String.class), dismd);
}
}
else
{
return new DiscriminatorStringMapping(storeMgr.getDatastoreAdapter(), table,
mapMgr.getMapping(String.class), dismd);
}
}
return null;
}
}
| |
package com.gtm.cpims.model.base;
import java.io.Serializable;
/**
* This is an object that contains data related to the BS_FINANCIALICCHECKYR_FINAL table.
* Do not modify this class because it will be overwritten if the configuration file
* related to this class is modified.
*
* @hibernate.class
* table="BS_FINANCIALICCHECKYR_FINAL"
*/
public abstract class BaseBsFinancialiccheckyrFinal implements Serializable {
public static String REF = "BsFinancialiccheckyrFinal";
public static String PROP_BANKTYPENAME = "Banktypename";
public static String PROP_B1 = "B1";
public static String PROP_BANKAREA = "Bankarea";
public static String PROP_F1 = "F1";
public static String PROP_A1 = "A1";
public static String PROP_BANKNAME = "Bankname";
public static String PROP_E1 = "E1";
public static String PROP_BANKNO = "Bankno";
public static String PROP_BANKAREAINDEX = "Bankareaindex";
public static String PROP_BANKTYPENO = "Banktypeno";
public static String PROP_D1 = "D1";
public static String PROP_SUBMITNO = "Submitno";
public static String PROP_DURING = "During";
public static String PROP_BANKTYPEINDEX = "Banktypeindex";
public static String PROP_C1 = "C1";
public static String PROP_ID = "Id";
public static String PROP_G1 = "G1";
// constructors
public BaseBsFinancialiccheckyrFinal () {
initialize();
}
/**
* Constructor for primary key
*/
public BaseBsFinancialiccheckyrFinal (java.lang.String id) {
this.setId(id);
initialize();
}
protected void initialize () {}
private int hashCode = Integer.MIN_VALUE;
// primary key
private java.lang.String id;
// fields
private java.lang.String bankno;
private java.lang.String bankname;
private java.lang.String banktypeno;
private java.lang.String banktypename;
private java.lang.String during;
private java.lang.Integer banktypeindex;
private java.lang.Integer bankareaindex;
private java.lang.String bankarea;
private java.lang.String submitno;
private java.lang.String a1;
private java.lang.String b1;
private java.lang.String c1;
private java.lang.String d1;
private java.lang.String e1;
private java.lang.String f1;
private java.lang.String g1;
/**
* Return the unique identifier of this class
* @hibernate.id
* generator-class="org.hibernate.id.UUIDHexGenerator"
* column="ID"
*/
public java.lang.String getId () {
return id;
}
/**
* Set the unique identifier of this class
* @param id the new ID
*/
public void setId (java.lang.String id) {
this.id = id;
this.hashCode = Integer.MIN_VALUE;
}
/**
* Return the value associated with the column: BANKNO
*/
public java.lang.String getBankno () {
return bankno;
}
/**
* Set the value related to the column: BANKNO
* @param bankno the BANKNO value
*/
public void setBankno (java.lang.String bankno) {
this.bankno = bankno;
}
/**
* Return the value associated with the column: BANKNAME
*/
public java.lang.String getBankname () {
return bankname;
}
/**
* Set the value related to the column: BANKNAME
* @param bankname the BANKNAME value
*/
public void setBankname (java.lang.String bankname) {
this.bankname = bankname;
}
/**
* Return the value associated with the column: BANKTYPENO
*/
public java.lang.String getBanktypeno () {
return banktypeno;
}
/**
* Set the value related to the column: BANKTYPENO
* @param banktypeno the BANKTYPENO value
*/
public void setBanktypeno (java.lang.String banktypeno) {
this.banktypeno = banktypeno;
}
/**
* Return the value associated with the column: BANKTYPENAME
*/
public java.lang.String getBanktypename () {
return banktypename;
}
/**
* Set the value related to the column: BANKTYPENAME
* @param banktypename the BANKTYPENAME value
*/
public void setBanktypename (java.lang.String banktypename) {
this.banktypename = banktypename;
}
/**
* Return the value associated with the column: DURING
*/
public java.lang.String getDuring () {
return during;
}
/**
* Set the value related to the column: DURING
* @param during the DURING value
*/
public void setDuring (java.lang.String during) {
this.during = during;
}
/**
* Return the value associated with the column: BANKTYPEINDEX
*/
public java.lang.Integer getBanktypeindex () {
return banktypeindex;
}
/**
* Set the value related to the column: BANKTYPEINDEX
* @param banktypeindex the BANKTYPEINDEX value
*/
public void setBanktypeindex (java.lang.Integer banktypeindex) {
this.banktypeindex = banktypeindex;
}
/**
* Return the value associated with the column: BANKAREAINDEX
*/
public java.lang.Integer getBankareaindex () {
return bankareaindex;
}
/**
* Set the value related to the column: BANKAREAINDEX
* @param bankareaindex the BANKAREAINDEX value
*/
public void setBankareaindex (java.lang.Integer bankareaindex) {
this.bankareaindex = bankareaindex;
}
/**
* Return the value associated with the column: BANKAREA
*/
public java.lang.String getBankarea () {
return bankarea;
}
/**
* Set the value related to the column: BANKAREA
* @param bankarea the BANKAREA value
*/
public void setBankarea (java.lang.String bankarea) {
this.bankarea = bankarea;
}
/**
* Return the value associated with the column: SUBMITNO
*/
public java.lang.String getSubmitno () {
return submitno;
}
/**
* Set the value related to the column: SUBMITNO
* @param submitno the SUBMITNO value
*/
public void setSubmitno (java.lang.String submitno) {
this.submitno = submitno;
}
/**
* Return the value associated with the column: A1
*/
public java.lang.String getA1 () {
return a1;
}
/**
* Set the value related to the column: A1
* @param a1 the A1 value
*/
public void setA1 (java.lang.String a1) {
this.a1 = a1;
}
/**
* Return the value associated with the column: B1
*/
public java.lang.String getB1 () {
return b1;
}
/**
* Set the value related to the column: B1
* @param b1 the B1 value
*/
public void setB1 (java.lang.String b1) {
this.b1 = b1;
}
/**
* Return the value associated with the column: C1
*/
public java.lang.String getC1 () {
return c1;
}
/**
* Set the value related to the column: C1
* @param c1 the C1 value
*/
public void setC1 (java.lang.String c1) {
this.c1 = c1;
}
/**
* Return the value associated with the column: D1
*/
public java.lang.String getD1 () {
return d1;
}
/**
* Set the value related to the column: D1
* @param d1 the D1 value
*/
public void setD1 (java.lang.String d1) {
this.d1 = d1;
}
/**
* Return the value associated with the column: E1
*/
public java.lang.String getE1 () {
return e1;
}
/**
* Set the value related to the column: E1
* @param e1 the E1 value
*/
public void setE1 (java.lang.String e1) {
this.e1 = e1;
}
/**
* Return the value associated with the column: F1
*/
public java.lang.String getF1 () {
return f1;
}
/**
* Set the value related to the column: F1
* @param f1 the F1 value
*/
public void setF1 (java.lang.String f1) {
this.f1 = f1;
}
/**
* Return the value associated with the column: G1
*/
public java.lang.String getG1 () {
return g1;
}
/**
* Set the value related to the column: G1
* @param g1 the G1 value
*/
public void setG1 (java.lang.String g1) {
this.g1 = g1;
}
public boolean equals (Object obj) {
if (null == obj) return false;
if (!(obj instanceof com.gtm.cpims.model.BsFinancialiccheckyrFinal)) return false;
else {
com.gtm.cpims.model.BsFinancialiccheckyrFinal bsFinancialiccheckyrFinal = (com.gtm.cpims.model.BsFinancialiccheckyrFinal) obj;
if (null == this.getId() || null == bsFinancialiccheckyrFinal.getId()) return false;
else return (this.getId().equals(bsFinancialiccheckyrFinal.getId()));
}
}
public int hashCode () {
if (Integer.MIN_VALUE == this.hashCode) {
if (null == this.getId()) return super.hashCode();
else {
String hashStr = this.getClass().getName() + ":" + this.getId().hashCode();
this.hashCode = hashStr.hashCode();
}
}
return this.hashCode;
}
public String toString () {
return super.toString();
}
}
| |
package me.calebjones.spacelaunchnow.widgets.wordtimer;
import android.app.PendingIntent;
import android.appwidget.AppWidgetManager;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.view.View;
import android.widget.RemoteViews;
import java.util.Calendar;
import java.util.Date;
import io.realm.Realm;
import io.realm.RealmQuery;
import io.realm.RealmResults;
import io.realm.Sort;
import me.calebjones.spacelaunchnow.R;
import me.calebjones.spacelaunchnow.common.utils.UniqueIdentifier;
import me.calebjones.spacelaunchnow.common.utils.Utils;
import me.calebjones.spacelaunchnow.common.prefs.SwitchPreferences;
import me.calebjones.spacelaunchnow.common.content.util.QueryBuilder;
import me.calebjones.spacelaunchnow.data.models.main.Launch;
import me.calebjones.spacelaunchnow.common.ui.launchdetail.activity.LaunchDetailActivity;
import me.calebjones.spacelaunchnow.widgets.WidgetBroadcastReceiver;
import timber.log.Timber;
public class LaunchWordTimerManager {
private Context context;
private AppWidgetManager appWidgetManager;
private RemoteViews remoteViews;
private SwitchPreferences switchPreferences;
public LaunchWordTimerManager(Context context){
this.context = context;
appWidgetManager = AppWidgetManager.getInstance(context);
}
public void updateAppWidget(int appWidgetId) {
Timber.v("UpdateAppWidget %s", appWidgetId);
Bundle options = appWidgetManager.getAppWidgetOptions(appWidgetId);
int minWidth = options.getInt(AppWidgetManager.OPTION_APPWIDGET_MIN_WIDTH);
int maxWidth = options.getInt(AppWidgetManager.OPTION_APPWIDGET_MAX_WIDTH);
int minHeight = options.getInt(AppWidgetManager.OPTION_APPWIDGET_MIN_HEIGHT);
int maxHeight = options.getInt(AppWidgetManager.OPTION_APPWIDGET_MAX_HEIGHT);
Timber.v("Size: [%s-%s] x [%s-%s]", minWidth, maxWidth, minHeight, maxHeight);
Launch launch = getLaunch(context);
if (minWidth <= 200 || minHeight <= 100) {
remoteViews = new RemoteViews(context.getPackageName(),
R.layout.widget_launch_word_timer_small_dark
);
} else if (minWidth <= 320) {
remoteViews = new RemoteViews(context.getPackageName(),
R.layout.widget_launch_word_timer_dark
);
} else {
remoteViews = new RemoteViews(context.getPackageName(),
R.layout.widget_launch_word_timer_large_dark
);
}
if (launch != null) {
setLaunchName(launch);
setMissionName(launch);
setRefreshIntent(launch);
setWidgetStyle();
setLaunchTimer(launch);
} else {
remoteViews.setTextViewText(R.id.widget_launch_name, "Unknown Launch");
remoteViews.setTextViewText(R.id.widget_mission_name, "Unknown Mission");
}
Timber.v("Publishing widget update.");
appWidgetManager.updateAppWidget(appWidgetId, remoteViews);
}
private Launch getLaunch(Context context) {
Date date = new Date();
switchPreferences = SwitchPreferences.getInstance(context);
Realm mRealm = Realm.getDefaultInstance();
RealmResults<Launch> launchRealms;
if (switchPreferences.getAllSwitch()) {
RealmQuery<Launch> query = mRealm.where(Launch.class)
.greaterThanOrEqualTo("net", date);
if (switchPreferences.getTBDSwitch()) {
query.equalTo("status.id", 1);
}
launchRealms = query.findAll().sort("net", Sort.ASCENDING);
Timber.v("loadLaunches - Realm query created.");
} else {
launchRealms = QueryBuilder.buildUpcomingSwitchQuery(context, mRealm, false);
Timber.v("loadLaunches - Filtered Realm query created.");
}
for (Launch launch : launchRealms) {
if (launch.getNet() != null) {
return launch;
}
}
return null;
}
private void setRefreshIntent(Launch launch) {
Intent nextIntent = new Intent(context, WidgetBroadcastReceiver.class);
PendingIntent refreshPending = PendingIntent.getBroadcast(context, 0, nextIntent, 0);
remoteViews.setOnClickPendingIntent(R.id.widget_refresh_button, refreshPending);
Intent exploreIntent = new Intent(context, LaunchDetailActivity.class);
exploreIntent.putExtra("TYPE", "launch");
exploreIntent.putExtra("launchID", launch.getId());
exploreIntent.setData(Uri.parse(exploreIntent.toUri(Intent.URI_INTENT_SCHEME)));
exploreIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
PendingIntent actionPendingIntent = PendingIntent.getActivity(context, UniqueIdentifier.getID(), exploreIntent, PendingIntent.FLAG_UPDATE_CURRENT);
remoteViews.setOnClickPendingIntent(R.id.widget_countdown_timer_frame, actionPendingIntent);
}
private void setMissionName(Launch launch) {
String missionName = getMissionName(launch);
if (missionName != null) {
remoteViews.setTextViewText(R.id.widget_mission_name, missionName);
} else {
remoteViews.setTextViewText(R.id.widget_mission_name, "Unknown Mission");
}
}
private void setLaunchTimer(Launch launch) {
long millisUntilFinished = getFutureMilli(launch) - System.currentTimeMillis();
// Calculate the Days/Hours/Mins/Seconds numerically.
long longDays = millisUntilFinished / 86400000;
long longHours = (millisUntilFinished / 3600000) % 24;
// Update the views=
remoteViews.setTextViewText(R.id.countdown_days, String.valueOf(longDays));
remoteViews.setTextViewText(R.id.countdown_hours, String.valueOf(longHours));
}
private void setWidgetStyle() {
SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(context);
int colorWhite = 0xFFFFFFFF;
int colorSecondaryWhite = 0xB3FFFFFF;
int colorBackground = 0xFF303030;
boolean widgetRounderCorners = sharedPref.getBoolean("widget_theme_round_corner", true);
int widgetTextColor = sharedPref.getInt("widget_text_color",colorWhite);
int widgetBackgroundColor = sharedPref.getInt("widget_background_color", colorBackground);
int widgetSecondaryTextColor = sharedPref.getInt("widget_secondary_text_color",colorSecondaryWhite);
int widgetIconColor = sharedPref.getInt("widget_icon_color",colorWhite);
if(widgetRounderCorners)
remoteViews.setImageViewResource(R.id.bgcolor, R.drawable.rounded);
else
remoteViews.setImageViewResource(R.id.bgcolor, R.drawable.squared);
Timber.v("Configuring widget");
int widgetAlpha = Color.alpha(widgetBackgroundColor);
int red = Color.red(widgetBackgroundColor);
int green = Color.green(widgetBackgroundColor);
int blue = Color.blue(widgetBackgroundColor);
remoteViews.setInt(R.id.bgcolor, "setColorFilter", Color.rgb(red,green,blue));
remoteViews.setInt(R.id.bgcolor, "setAlpha", widgetAlpha);
remoteViews.setTextColor(R.id.widget_launch_name, widgetTextColor);
remoteViews.setTextColor(R.id.widget_mission_name, widgetSecondaryTextColor);
remoteViews.setTextColor(R.id.countdown_days, widgetTextColor);
remoteViews.setTextColor(R.id.countdown_days_label, widgetSecondaryTextColor);
remoteViews.setTextColor(R.id.countdown_hours, widgetTextColor);
remoteViews.setTextColor(R.id.countdown_hours_label, widgetSecondaryTextColor);
remoteViews.setInt(R.id.widget_refresh_button, "setColorFilter", widgetIconColor);
if (sharedPref.getBoolean("widget_refresh_enabled", false)) {
remoteViews.setViewVisibility(R.id.widget_refresh_button, View.GONE);
} else if (!sharedPref.getBoolean("widget_refresh_enabled", false)) {
remoteViews.setViewVisibility(R.id.widget_refresh_button, View.VISIBLE);
}
}
private void setLaunchName(Launch launchRealm) {
String launchName = getLaunchName(launchRealm);
if (launchName != null) {
remoteViews.setTextViewText(R.id.widget_launch_name, launchName);
} else {
remoteViews.setTextViewText(R.id.widget_launch_name, "Unknown Launch");
}
}
private String getLaunchName(Launch launchRealm) {
//Replace with launch
if (launchRealm.getRocket().getConfiguration() != null) {
//Replace with mission name
return launchRealm.getRocket().getConfiguration().getName();
} else {
return null;
}
}
private String getMissionName(Launch launchRealm) {
if (launchRealm.getMission() != null) {
//Replace with mission name
return launchRealm.getMission().getName();
} else {
return null;
}
}
private long getFutureMilli(Launch launchRealm) {
return getLaunchDate(launchRealm).getTimeInMillis();
}
private Calendar getLaunchDate(Launch launchRealm) {
return Utils.DateToCalendar(launchRealm.getNet());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.kafka.clients.producer;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.kafka.clients.NetworkClient;
import org.apache.kafka.clients.producer.internals.Metadata;
import org.apache.kafka.clients.producer.internals.Partitioner;
import org.apache.kafka.clients.producer.internals.RecordAccumulator;
import org.apache.kafka.clients.producer.internals.Sender;
import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.common.errors.ApiException;
import org.apache.kafka.common.errors.RecordTooLargeException;
import org.apache.kafka.common.errors.TimeoutException;
import org.apache.kafka.common.metrics.JmxReporter;
import org.apache.kafka.common.metrics.MetricConfig;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.MetricsReporter;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.network.Selector;
import org.apache.kafka.common.record.CompressionType;
import org.apache.kafka.common.record.Record;
import org.apache.kafka.common.record.Records;
import org.apache.kafka.common.utils.ClientUtils;
import org.apache.kafka.common.utils.KafkaThread;
import org.apache.kafka.common.utils.SystemTime;
import org.apache.kafka.common.utils.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A Kafka client that publishes records to the Kafka cluster.
* <P>
* The producer is <i>thread safe</i> and should generally be shared among all threads for best performance.
* <p>
* The producer manages a single background thread that does I/O as well as a TCP connection to each of the brokers it
* needs to communicate with. Failure to close the producer after use will leak these resources.
*/
public class KafkaProducer implements Producer {
private static final Logger log = LoggerFactory.getLogger(KafkaProducer.class);
private final Partitioner partitioner;
private final int maxRequestSize;
private final long metadataFetchTimeoutMs;
private final long totalMemorySize;
private final Metadata metadata;
private final RecordAccumulator accumulator;
private final Sender sender;
private final Metrics metrics;
private final Thread ioThread;
private final CompressionType compressionType;
private final Sensor errors;
private final Time time;
/**
* A producer is instantiated by providing a set of key-value pairs as configuration. Valid configuration strings
* are documented <a href="http://kafka.apache.org/documentation.html#producerconfigs">here</a>. Values can be
* either strings or Objects of the appropriate type (for example a numeric configuration would accept either the
* string "42" or the integer 42).
*/
public KafkaProducer(Map<String, Object> configs) {
this(new ProducerConfig(configs));
}
/**
* A producer is instantiated by providing a set of key-value pairs as configuration. Valid configuration strings
* are documented <a href="http://kafka.apache.org/documentation.html#producerconfigs">here</a>.
*/
public KafkaProducer(Properties properties) {
this(new ProducerConfig(properties));
}
private KafkaProducer(ProducerConfig config) {
log.trace("Starting the Kafka producer");
this.time = new SystemTime();
MetricConfig metricConfig = new MetricConfig().samples(config.getInt(ProducerConfig.METRICS_NUM_SAMPLES_CONFIG))
.timeWindow(config.getLong(ProducerConfig.METRICS_SAMPLE_WINDOW_MS_CONFIG),
TimeUnit.MILLISECONDS);
String clientId = config.getString(ProducerConfig.CLIENT_ID_CONFIG);
String jmxPrefix = "kafka.producer." + (clientId.length() > 0 ? clientId + "." : "");
List<MetricsReporter> reporters = config.getConfiguredInstances(ProducerConfig.METRIC_REPORTER_CLASSES_CONFIG,
MetricsReporter.class);
reporters.add(new JmxReporter(jmxPrefix));
this.metrics = new Metrics(metricConfig, reporters, time);
this.partitioner = new Partitioner();
long retryBackoffMs = config.getLong(ProducerConfig.RETRY_BACKOFF_MS_CONFIG);
this.metadataFetchTimeoutMs = config.getLong(ProducerConfig.METADATA_FETCH_TIMEOUT_CONFIG);
this.metadata = new Metadata(retryBackoffMs, config.getLong(ProducerConfig.METADATA_MAX_AGE_CONFIG));
this.maxRequestSize = config.getInt(ProducerConfig.MAX_REQUEST_SIZE_CONFIG);
this.totalMemorySize = config.getLong(ProducerConfig.BUFFER_MEMORY_CONFIG);
this.compressionType = CompressionType.forName(config.getString(ProducerConfig.COMPRESSION_TYPE_CONFIG));
this.accumulator = new RecordAccumulator(config.getInt(ProducerConfig.BATCH_SIZE_CONFIG),
this.totalMemorySize,
config.getLong(ProducerConfig.LINGER_MS_CONFIG),
retryBackoffMs,
config.getBoolean(ProducerConfig.BLOCK_ON_BUFFER_FULL_CONFIG),
metrics,
time);
List<InetSocketAddress> addresses = ClientUtils.parseAndValidateAddresses(config.getList(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG));
this.metadata.update(Cluster.bootstrap(addresses), time.milliseconds());
NetworkClient client = new NetworkClient(new Selector(this.metrics, time),
this.metadata,
clientId,
config.getInt(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION),
config.getLong(ProducerConfig.RECONNECT_BACKOFF_MS_CONFIG),
config.getInt(ProducerConfig.SEND_BUFFER_CONFIG),
config.getInt(ProducerConfig.RECEIVE_BUFFER_CONFIG));
this.sender = new Sender(client,
this.metadata,
this.accumulator,
config.getInt(ProducerConfig.MAX_REQUEST_SIZE_CONFIG),
(short) parseAcks(config.getString(ProducerConfig.ACKS_CONFIG)),
config.getInt(ProducerConfig.RETRIES_CONFIG),
config.getInt(ProducerConfig.TIMEOUT_CONFIG),
this.metrics,
new SystemTime());
this.ioThread = new KafkaThread("kafka-producer-network-thread", this.sender, true);
this.ioThread.start();
this.errors = this.metrics.sensor("errors");
config.logUnused();
log.debug("Kafka producer started");
}
private static int parseAcks(String acksString) {
try {
return acksString.trim().toLowerCase().equals("all") ? -1 : Integer.parseInt(acksString.trim());
} catch (NumberFormatException e) {
throw new ConfigException("Invalid configuration value for 'acks': " + acksString);
}
}
/**
* Asynchronously send a record to a topic. Equivalent to {@link #send(ProducerRecord, Callback) send(record, null)}
*/
@Override
public Future<RecordMetadata> send(ProducerRecord record) {
return send(record, null);
}
/**
* Asynchronously send a record to a topic and invoke the provided callback when the send has been acknowledged.
* <p>
* The send is asynchronous and this method will return immediately once the record has been stored in the buffer of
* records waiting to be sent. This allows sending many records in parallel without blocking to wait for the
* response after each one.
* <p>
* The result of the send is a {@link RecordMetadata} specifying the partition the record was sent to and the offset
* it was assigned.
* <p>
* Since the send call is asynchronous it returns a {@link java.util.concurrent.Future Future} for the
* {@link RecordMetadata} that will be assigned to this record. Invoking {@link java.util.concurrent.Future#get()
* get()} on this future will result in the metadata for the record or throw any exception that occurred while
* sending the record.
* <p>
* If you want to simulate a simple blocking call you can do the following:
*
* <pre>
* producer.send(new ProducerRecord("the-topic", "key, "value")).get();
* </pre>
* <p>
* Those desiring fully non-blocking usage can make use of the {@link Callback} parameter to provide a callback that
* will be invoked when the request is complete.
*
* <pre>
* ProducerRecord record = new ProducerRecord("the-topic", "key, "value");
* producer.send(myRecord,
* new Callback() {
* public void onCompletion(RecordMetadata metadata, Exception e) {
* if(e != null)
* e.printStackTrace();
* System.out.println("The offset of the record we just sent is: " + metadata.offset());
* }
* });
* </pre>
*
* Callbacks for records being sent to the same partition are guaranteed to execute in order. That is, in the
* following example <code>callback1</code> is guaranteed to execute before <code>callback2</code>:
*
* <pre>
* producer.send(new ProducerRecord(topic, partition, key, value), callback1);
* producer.send(new ProducerRecord(topic, partition, key2, value2), callback2);
* </pre>
* <p>
* Note that callbacks will generally execute in the I/O thread of the producer and so should be reasonably fast or
* they will delay the sending of messages from other threads. If you want to execute blocking or computationally
* expensive callbacks it is recommended to use your own {@link java.util.concurrent.Executor} in the callback body
* to parallelize processing.
* <p>
* The producer manages a buffer of records waiting to be sent. This buffer has a hard limit on it's size, which is
* controlled by the configuration <code>total.memory.bytes</code>. If <code>send()</code> is called faster than the
* I/O thread can transfer data to the brokers the buffer will eventually run out of space. The default behavior in
* this case is to block the send call until the I/O thread catches up and more buffer space is available. However
* in cases where non-blocking usage is desired the setting <code>block.on.buffer.full=false</code> will cause the
* producer to instead throw an exception when buffer memory is exhausted.
*
* @param record The record to send
* @param callback A user-supplied callback to execute when the record has been acknowledged by the server (null
* indicates no callback)
*/
@Override
public Future<RecordMetadata> send(ProducerRecord record, Callback callback) {
try {
// first make sure the metadata for the topic is available
waitOnMetadata(record.topic(), this.metadataFetchTimeoutMs);
int partition = partitioner.partition(record, metadata.fetch());
int serializedSize = Records.LOG_OVERHEAD + Record.recordSize(record.key(), record.value());
ensureValidRecordSize(serializedSize);
TopicPartition tp = new TopicPartition(record.topic(), partition);
log.trace("Sending record {} with callback {} to topic {} partition {}", record, callback, record.topic(), partition);
RecordAccumulator.RecordAppendResult result = accumulator.append(tp, record.key(), record.value(), compressionType, callback);
if (result.batchIsFull || result.newBatchCreated) {
log.trace("Waking up the sender since topic {} partition {} is either full or getting a new batch", record.topic(), partition);
this.sender.wakeup();
}
return result.future;
// Handling exceptions and record the errors;
// For API exceptions return them in the future,
// for other exceptions throw directly
} catch (ApiException e) {
log.debug("Exception occurred during message send:", e);
if (callback != null)
callback.onCompletion(null, e);
this.errors.record();
return new FutureFailure(e);
} catch (InterruptedException e) {
this.errors.record();
throw new KafkaException(e);
} catch (KafkaException e) {
this.errors.record();
throw e;
}
}
/**
* Wait for cluster metadata including partitions for the given topic to be available.
* @param topic The topic we want metadata for
* @param maxWaitMs The maximum time in ms for waiting on the metadata
*/
private void waitOnMetadata(String topic, long maxWaitMs) {
if (metadata.fetch().partitionsForTopic(topic) != null) {
return;
} else {
long begin = time.milliseconds();
long remainingWaitMs = maxWaitMs;
while (metadata.fetch().partitionsForTopic(topic) == null) {
log.trace("Requesting metadata update for topic {}.", topic);
int version = metadata.requestUpdate();
metadata.add(topic);
sender.wakeup();
metadata.awaitUpdate(version, remainingWaitMs);
long elapsed = time.milliseconds() - begin;
if (elapsed >= maxWaitMs)
throw new TimeoutException("Failed to update metadata after " + maxWaitMs + " ms.");
remainingWaitMs = maxWaitMs - elapsed;
}
}
}
/**
* Validate that the record size isn't too large
*/
private void ensureValidRecordSize(int size) {
if (size > this.maxRequestSize)
throw new RecordTooLargeException("The message is " + size +
" bytes when serialized which is larger than the maximum request size you have configured with the " +
ProducerConfig.MAX_REQUEST_SIZE_CONFIG +
" configuration.");
if (size > this.totalMemorySize)
throw new RecordTooLargeException("The message is " + size +
" bytes when serialized which is larger than the total memory buffer you have configured with the " +
ProducerConfig.BUFFER_MEMORY_CONFIG +
" configuration.");
}
@Override
public List<PartitionInfo> partitionsFor(String topic) {
waitOnMetadata(topic, this.metadataFetchTimeoutMs);
return this.metadata.fetch().partitionsForTopic(topic);
}
@Override
public Map<String, ? extends Metric> metrics() {
return Collections.unmodifiableMap(this.metrics.metrics());
}
/**
* Close this producer. This method blocks until all in-flight requests complete.
*/
@Override
public void close() {
log.trace("Closing the Kafka producer.");
this.sender.initiateClose();
try {
this.ioThread.join();
} catch (InterruptedException e) {
throw new KafkaException(e);
}
this.metrics.close();
log.debug("The Kafka producer has closed.");
}
private static class FutureFailure implements Future<RecordMetadata> {
private final ExecutionException exception;
public FutureFailure(Exception exception) {
this.exception = new ExecutionException(exception);
}
@Override
public boolean cancel(boolean interrupt) {
return false;
}
@Override
public RecordMetadata get() throws ExecutionException {
throw this.exception;
}
@Override
public RecordMetadata get(long timeout, TimeUnit unit) throws ExecutionException {
throw this.exception;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return true;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.nodelabels;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager;
import org.apache.hadoop.yarn.nodelabels.NodeLabel;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeLabelsUpdateSchedulerEvent;
import org.apache.hadoop.yarn.util.resource.Resources;
import com.google.common.collect.ImmutableSet;
public class RMNodeLabelsManager extends CommonNodeLabelsManager {
protected static class Queue {
protected Set<String> acccessibleNodeLabels;
protected Resource resource;
protected Queue() {
acccessibleNodeLabels =
Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
resource = Resource.newInstance(0, 0);
}
}
ConcurrentMap<String, Queue> queueCollections =
new ConcurrentHashMap<String, Queue>();
protected AccessControlList adminAcl;
private RMContext rmContext = null;
@Override
protected void serviceInit(Configuration conf) throws Exception {
super.serviceInit(conf);
adminAcl =
new AccessControlList(conf.get(YarnConfiguration.YARN_ADMIN_ACL,
YarnConfiguration.DEFAULT_YARN_ADMIN_ACL));
}
@Override
public void addLabelsToNode(Map<NodeId, Set<String>> addedLabelsToNode)
throws IOException {
try {
writeLock.lock();
// get nodesCollection before edition
Map<String, Host> before = cloneNodeMap(addedLabelsToNode.keySet());
super.addLabelsToNode(addedLabelsToNode);
// get nodesCollection after edition
Map<String, Host> after = cloneNodeMap(addedLabelsToNode.keySet());
// update running nodes resources
updateResourceMappings(before, after);
} finally {
writeLock.unlock();
}
}
protected void checkRemoveFromClusterNodeLabelsOfQueue(
Collection<String> labelsToRemove) throws IOException {
// Check if label to remove doesn't existed or null/empty, will throw
// exception if any of labels to remove doesn't meet requirement
for (String label : labelsToRemove) {
label = normalizeLabel(label);
// check if any queue contains this label
for (Entry<String, Queue> entry : queueCollections.entrySet()) {
String queueName = entry.getKey();
Set<String> queueLabels = entry.getValue().acccessibleNodeLabels;
if (queueLabels.contains(label)) {
throw new IOException("Cannot remove label=" + label
+ ", because queue=" + queueName + " is using this label. "
+ "Please remove label on queue before remove the label");
}
}
}
}
@Override
public void removeFromClusterNodeLabels(Collection<String> labelsToRemove)
throws IOException {
try {
writeLock.lock();
checkRemoveFromClusterNodeLabelsOfQueue(labelsToRemove);
// copy before NMs
Map<String, Host> before = cloneNodeMap();
super.removeFromClusterNodeLabels(labelsToRemove);
updateResourceMappings(before, nodeCollections);
} finally {
writeLock.unlock();
}
}
@Override
public void
removeLabelsFromNode(Map<NodeId, Set<String>> removeLabelsFromNode)
throws IOException {
try {
writeLock.lock();
// get nodesCollection before edition
Map<String, Host> before =
cloneNodeMap(removeLabelsFromNode.keySet());
super.removeLabelsFromNode(removeLabelsFromNode);
// get nodesCollection before edition
Map<String, Host> after = cloneNodeMap(removeLabelsFromNode.keySet());
// update running nodes resources
updateResourceMappings(before, after);
} finally {
writeLock.unlock();
}
}
@Override
public void replaceLabelsOnNode(Map<NodeId, Set<String>> replaceLabelsToNode)
throws IOException {
try {
writeLock.lock();
// get nodesCollection before edition
Map<String, Host> before = cloneNodeMap(replaceLabelsToNode.keySet());
super.replaceLabelsOnNode(replaceLabelsToNode);
// get nodesCollection after edition
Map<String, Host> after = cloneNodeMap(replaceLabelsToNode.keySet());
// update running nodes resources
updateResourceMappings(before, after);
} finally {
writeLock.unlock();
}
}
/*
* Following methods are used for setting if a node is up and running, and it
* will update running nodes resource
*/
public void activateNode(NodeId nodeId, Resource resource) {
try {
writeLock.lock();
// save if we have a node before
Map<String, Host> before = cloneNodeMap(ImmutableSet.of(nodeId));
createHostIfNonExisted(nodeId.getHost());
try {
createNodeIfNonExisted(nodeId);
} catch (IOException e) {
LOG.error("This shouldn't happen, cannot get host in nodeCollection"
+ " associated to the node being activated");
return;
}
Node nm = getNMInNodeSet(nodeId);
nm.resource = resource;
nm.running = true;
// Add node in labelsCollection
Set<String> labelsForNode = getLabelsByNode(nodeId);
if (labelsForNode != null) {
for (String label : labelsForNode) {
NodeLabel labelInfo = labelCollections.get(label);
if(labelInfo != null) {
labelInfo.addNodeId(nodeId);
}
}
}
// get the node after edition
Map<String, Host> after = cloneNodeMap(ImmutableSet.of(nodeId));
updateResourceMappings(before, after);
} finally {
writeLock.unlock();
}
}
/*
* Following methods are used for setting if a node unregistered to RM
*/
public void deactivateNode(NodeId nodeId) {
try {
writeLock.lock();
// save if we have a node before
Map<String, Host> before = cloneNodeMap(ImmutableSet.of(nodeId));
Node nm = getNMInNodeSet(nodeId);
if (null != nm) {
// set nm is not running, and its resource = 0
nm.running = false;
nm.resource = Resource.newInstance(0, 0);
}
// get the node after edition
Map<String, Host> after = cloneNodeMap(ImmutableSet.of(nodeId));
updateResourceMappings(before, after);
} finally {
writeLock.unlock();
}
}
public void updateNodeResource(NodeId node, Resource newResource) throws IOException {
deactivateNode(node);
activateNode(node, newResource);
}
public void reinitializeQueueLabels(Map<String, Set<String>> queueToLabels) {
try {
writeLock.lock();
// clear before set
this.queueCollections.clear();
for (Entry<String, Set<String>> entry : queueToLabels.entrySet()) {
String queue = entry.getKey();
Queue q = new Queue();
this.queueCollections.put(queue, q);
Set<String> labels = entry.getValue();
if (labels.contains(ANY)) {
continue;
}
q.acccessibleNodeLabels.addAll(labels);
for (Host host : nodeCollections.values()) {
for (Entry<NodeId, Node> nentry : host.nms.entrySet()) {
NodeId nodeId = nentry.getKey();
Node nm = nentry.getValue();
if (nm.running && isNodeUsableByQueue(getLabelsByNode(nodeId), q)) {
Resources.addTo(q.resource, nm.resource);
}
}
}
}
} finally {
writeLock.unlock();
}
}
public Resource getQueueResource(String queueName, Set<String> queueLabels,
Resource clusterResource) {
try {
readLock.lock();
if (queueLabels.contains(ANY)) {
return clusterResource;
}
Queue q = queueCollections.get(queueName);
if (null == q) {
return Resources.none();
}
return q.resource;
} finally {
readLock.unlock();
}
}
public Set<String> getLabelsOnNode(NodeId nodeId) {
try {
readLock.lock();
Set<String> nodeLabels = getLabelsByNode(nodeId);
return Collections.unmodifiableSet(nodeLabels);
} finally {
readLock.unlock();
}
}
public boolean containsNodeLabel(String label) {
try {
readLock.lock();
return label != null
&& (label.isEmpty() || labelCollections.containsKey(label));
} finally {
readLock.unlock();
}
}
private Map<String, Host> cloneNodeMap(Set<NodeId> nodesToCopy) {
Map<String, Host> map = new HashMap<String, Host>();
for (NodeId nodeId : nodesToCopy) {
if (!map.containsKey(nodeId.getHost())) {
Host originalN = nodeCollections.get(nodeId.getHost());
if (null == originalN) {
continue;
}
Host n = originalN.copy();
n.nms.clear();
map.put(nodeId.getHost(), n);
}
Host n = map.get(nodeId.getHost());
if (WILDCARD_PORT == nodeId.getPort()) {
for (Entry<NodeId, Node> entry : nodeCollections
.get(nodeId.getHost()).nms.entrySet()) {
n.nms.put(entry.getKey(), entry.getValue().copy());
}
} else {
Node nm = getNMInNodeSet(nodeId);
if (null != nm) {
n.nms.put(nodeId, nm.copy());
}
}
}
return map;
}
@SuppressWarnings("unchecked")
private void updateResourceMappings(Map<String, Host> before,
Map<String, Host> after) {
// Get NMs in before only
Set<NodeId> allNMs = new HashSet<NodeId>();
for (Entry<String, Host> entry : before.entrySet()) {
allNMs.addAll(entry.getValue().nms.keySet());
}
for (Entry<String, Host> entry : after.entrySet()) {
allNMs.addAll(entry.getValue().nms.keySet());
}
// Map used to notify RM
Map<NodeId, Set<String>> newNodeToLabelsMap =
new HashMap<NodeId, Set<String>>();
// traverse all nms
for (NodeId nodeId : allNMs) {
Node oldNM;
if ((oldNM = getNMInNodeSet(nodeId, before, true)) != null) {
Set<String> oldLabels = getLabelsByNode(nodeId, before);
// no label in the past
if (oldLabels.isEmpty()) {
// update labels
NodeLabel label = labelCollections.get(NO_LABEL);
label.removeNode(oldNM.resource);
// update queues, all queue can access this node
for (Queue q : queueCollections.values()) {
Resources.subtractFrom(q.resource, oldNM.resource);
}
} else {
// update labels
for (String labelName : oldLabels) {
NodeLabel label = labelCollections.get(labelName);
if (null == label) {
continue;
}
label.removeNode(oldNM.resource);
}
// update queues, only queue can access this node will be subtract
for (Queue q : queueCollections.values()) {
if (isNodeUsableByQueue(oldLabels, q)) {
Resources.subtractFrom(q.resource, oldNM.resource);
}
}
}
}
Node newNM;
if ((newNM = getNMInNodeSet(nodeId, after, true)) != null) {
Set<String> newLabels = getLabelsByNode(nodeId, after);
newNodeToLabelsMap.put(nodeId, ImmutableSet.copyOf(newLabels));
// no label in the past
if (newLabels.isEmpty()) {
// update labels
NodeLabel label = labelCollections.get(NO_LABEL);
label.addNode(newNM.resource);
// update queues, all queue can access this node
for (Queue q : queueCollections.values()) {
Resources.addTo(q.resource, newNM.resource);
}
} else {
// update labels
for (String labelName : newLabels) {
NodeLabel label = labelCollections.get(labelName);
label.addNode(newNM.resource);
}
// update queues, only queue can access this node will be subtract
for (Queue q : queueCollections.values()) {
if (isNodeUsableByQueue(newLabels, q)) {
Resources.addTo(q.resource, newNM.resource);
}
}
}
}
}
// Notify RM
if (rmContext != null && rmContext.getDispatcher() != null) {
rmContext.getDispatcher().getEventHandler().handle(
new NodeLabelsUpdateSchedulerEvent(newNodeToLabelsMap));
}
}
public Resource getResourceByLabel(String label, Resource clusterResource) {
label = normalizeLabel(label);
try {
readLock.lock();
if (null == labelCollections.get(label)) {
return Resources.none();
}
return labelCollections.get(label).getResource();
} finally {
readLock.unlock();
}
}
private boolean isNodeUsableByQueue(Set<String> nodeLabels, Queue q) {
// node without any labels can be accessed by any queue
if (nodeLabels == null || nodeLabels.isEmpty()
|| (nodeLabels.size() == 1 && nodeLabels.contains(NO_LABEL))) {
return true;
}
for (String label : nodeLabels) {
if (q.acccessibleNodeLabels.contains(label)) {
return true;
}
}
return false;
}
private Map<String, Host> cloneNodeMap() {
Set<NodeId> nodesToCopy = new HashSet<NodeId>();
for (String nodeName : nodeCollections.keySet()) {
nodesToCopy.add(NodeId.newInstance(nodeName, WILDCARD_PORT));
}
return cloneNodeMap(nodesToCopy);
}
public boolean checkAccess(UserGroupInformation user) {
// make sure only admin can invoke
// this method
if (adminAcl.isUserAllowed(user)) {
return true;
}
return false;
}
public void setRMContext(RMContext rmContext) {
this.rmContext = rmContext;
}
public List<NodeLabel> pullRMNodeLabelsInfo() {
try {
readLock.lock();
List<NodeLabel> infos = new ArrayList<NodeLabel>();
for (Entry<String, NodeLabel> entry : labelCollections.entrySet()) {
NodeLabel label = entry.getValue();
infos.add(label.getCopy());
}
Collections.sort(infos);
return infos;
} finally {
readLock.unlock();
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.impl.analysis;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.*;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
public class JavaHighlightUtil {
public static boolean isSerializable(@NotNull PsiClass aClass) {
return isSerializable(aClass, "java.io.Serializable");
}
public static boolean isSerializable(@NotNull PsiClass aClass,
String serializableClassName) {
Project project = aClass.getManager().getProject();
PsiClass serializableClass = JavaPsiFacade.getInstance(project).findClass(serializableClassName, aClass.getResolveScope());
return serializableClass != null && aClass.isInheritor(serializableClass, true);
}
public static boolean isSerializationRelatedMethod(PsiMethod method, PsiClass containingClass) {
if (containingClass == null) return false;
if (method.isConstructor()) {
if (isSerializable(containingClass, "java.io.Externalizable") &&
method.getParameterList().isEmpty() &&
method.hasModifierProperty(PsiModifier.PUBLIC)) {
return true;
}
return false;
}
if (method.hasModifierProperty(PsiModifier.STATIC)) return false;
@NonNls String name = method.getName();
PsiParameter[] parameters = method.getParameterList().getParameters();
PsiType returnType = method.getReturnType();
if ("readObjectNoData".equals(name)) {
return parameters.length == 0 && TypeConversionUtil.isVoidType(returnType) && isSerializable(containingClass);
}
if ("readObject".equals(name)) {
return parameters.length == 1
&& parameters[0].getType().equalsToText("java.io.ObjectInputStream")
&& TypeConversionUtil.isVoidType(returnType) && method.hasModifierProperty(PsiModifier.PRIVATE)
&& isSerializable(containingClass);
}
if ("readResolve".equals(name)) {
return parameters.length == 0
&& returnType != null
&& returnType.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)
&& (containingClass.hasModifierProperty(PsiModifier.ABSTRACT) || isSerializable(containingClass));
}
if ("writeReplace".equals(name)) {
return parameters.length == 0
&& returnType != null
&& returnType.equalsToText(CommonClassNames.JAVA_LANG_OBJECT)
&& (containingClass.hasModifierProperty(PsiModifier.ABSTRACT) || isSerializable(containingClass));
}
if ("writeObject".equals(name)) {
return parameters.length == 1
&& TypeConversionUtil.isVoidType(returnType)
&& parameters[0].getType().equalsToText("java.io.ObjectOutputStream")
&& method.hasModifierProperty(PsiModifier.PRIVATE)
&& isSerializable(containingClass);
}
return false;
}
@NotNull
public static String formatType(@Nullable PsiType type) {
return type == null ? PsiKeyword.NULL : type.getInternalCanonicalText();
}
@Nullable
private static PsiType getArrayInitializerType(@NotNull PsiArrayInitializerExpression element) {
PsiType typeCheckResult = sameType(element.getInitializers());
return typeCheckResult != null ? typeCheckResult.createArrayType() : null;
}
@Nullable
public static PsiType sameType(@NotNull PsiExpression[] expressions) {
PsiType type = null;
for (PsiExpression expression : expressions) {
final PsiType currentType;
if (expression instanceof PsiArrayInitializerExpression) {
currentType = getArrayInitializerType((PsiArrayInitializerExpression)expression);
}
else {
currentType = expression.getType();
}
if (type == null) {
type = currentType;
}
else if (!type.equals(currentType)) {
return null;
}
}
return type;
}
@NotNull
public static String formatMethod(@NotNull PsiMethod method) {
return PsiFormatUtil.formatMethod(method, PsiSubstitutor.EMPTY, PsiFormatUtilBase.SHOW_NAME | PsiFormatUtilBase.SHOW_PARAMETERS,
PsiFormatUtilBase.SHOW_TYPE);
}
public static boolean isSuperOrThisCall(PsiStatement statement, boolean testForSuper, boolean testForThis) {
if (!(statement instanceof PsiExpressionStatement)) return false;
PsiExpression expression = ((PsiExpressionStatement)statement).getExpression();
if (!(expression instanceof PsiMethodCallExpression)) return false;
final PsiReferenceExpression methodExpression = ((PsiMethodCallExpression)expression).getMethodExpression();
if (testForSuper) {
if ("super".equals(methodExpression.getText())) return true;
}
if (testForThis) {
if ("this".equals(methodExpression.getText())) return true;
}
return false;
}
/**
* return all constructors which are referred from this constructor by
* this (...) at the beginning of the constructor body
* @return referring constructor
*/
@Nullable public static List<PsiMethod> getChainedConstructors(PsiMethod constructor) {
final ConstructorVisitorInfo info = new ConstructorVisitorInfo();
visitConstructorChain(constructor, info);
if (info.visitedConstructors != null) info.visitedConstructors.remove(constructor);
return info.visitedConstructors;
}
static void visitConstructorChain(PsiMethod constructor, @NotNull ConstructorVisitorInfo info) {
while (true) {
if (constructor == null) return;
final PsiCodeBlock body = constructor.getBody();
if (body == null) return;
final PsiStatement[] statements = body.getStatements();
if (statements.length == 0) return;
final PsiStatement statement = statements[0];
final PsiElement element = new PsiMatcherImpl(statement)
.dot(PsiMatchers.hasClass(PsiExpressionStatement.class))
.firstChild(PsiMatchers.hasClass(PsiMethodCallExpression.class))
.firstChild(PsiMatchers.hasClass(PsiReferenceExpression.class))
.firstChild(PsiMatchers.hasClass(PsiKeyword.class))
.dot(PsiMatchers.hasText(PsiKeyword.THIS))
.parent(null)
.parent(null)
.getElement();
if (element == null) return;
PsiMethodCallExpression methodCall = (PsiMethodCallExpression)element;
PsiMethod method = methodCall.resolveMethod();
if (method == null) return;
if (info.visitedConstructors != null && info.visitedConstructors.contains(method)) {
info.recursivelyCalledConstructor = method;
return;
}
if (info.visitedConstructors == null) info.visitedConstructors = new ArrayList<>(5);
info.visitedConstructors.add(method);
constructor = method;
}
}
@Nullable
public static String checkPsiTypeUseInContext(@Nullable PsiType type, @NotNull PsiElement context) {
if (type instanceof PsiPrimitiveType) return null;
if (type instanceof PsiArrayType) return checkPsiTypeUseInContext(((PsiArrayType) type).getComponentType(), context);
if (PsiUtil.resolveClassInType(type) != null) return null;
if (type instanceof PsiClassType) return checkClassType((PsiClassType)type, context);
return "Invalid Java type";
}
private static String checkClassType(@NotNull PsiClassType type, @NotNull PsiElement context) {
String className = PsiNameHelper.getQualifiedClassName(type.getCanonicalText(false), true);
if (classExists(context, className)) {
return getClassInaccessibleMessage(context, className);
}
return "Invalid Java type";
}
private static boolean classExists(@NotNull PsiElement context, String className) {
return JavaPsiFacade.getInstance(context.getProject()).findClass(className, GlobalSearchScope.allScope(context.getProject())) != null;
}
@NotNull
private static String getClassInaccessibleMessage(@NotNull PsiElement context, String className) {
Module module = ModuleUtilCore.findModuleForPsiElement(context);
return "Class '" + className + "' is not accessible " + (module == null ? "here" : "from module '" + module.getName() + "'");
}
static class ConstructorVisitorInfo {
List<PsiMethod> visitedConstructors;
PsiMethod recursivelyCalledConstructor;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/services/google_ads_field_service.proto
package com.google.ads.googleads.v9.services;
/**
* <pre>
* Response message for [GoogleAdsFieldService.SearchGoogleAdsFields][google.ads.googleads.v9.services.GoogleAdsFieldService.SearchGoogleAdsFields].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse}
*/
public final class SearchGoogleAdsFieldsResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse)
SearchGoogleAdsFieldsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchGoogleAdsFieldsResponse.newBuilder() to construct.
private SearchGoogleAdsFieldsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchGoogleAdsFieldsResponse() {
results_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new SearchGoogleAdsFieldsResponse();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SearchGoogleAdsFieldsResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
results_ = new java.util.ArrayList<com.google.ads.googleads.v9.resources.GoogleAdsField>();
mutable_bitField0_ |= 0x00000001;
}
results_.add(
input.readMessage(com.google.ads.googleads.v9.resources.GoogleAdsField.parser(), extensionRegistry));
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
nextPageToken_ = s;
break;
}
case 24: {
totalResultsCount_ = input.readInt64();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
results_ = java.util.Collections.unmodifiableList(results_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.services.GoogleAdsFieldServiceProto.internal_static_google_ads_googleads_v9_services_SearchGoogleAdsFieldsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.services.GoogleAdsFieldServiceProto.internal_static_google_ads_googleads_v9_services_SearchGoogleAdsFieldsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse.class, com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse.Builder.class);
}
public static final int RESULTS_FIELD_NUMBER = 1;
private java.util.List<com.google.ads.googleads.v9.resources.GoogleAdsField> results_;
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v9.resources.GoogleAdsField> getResultsList() {
return results_;
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v9.resources.GoogleAdsFieldOrBuilder>
getResultsOrBuilderList() {
return results_;
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
@java.lang.Override
public int getResultsCount() {
return results_.size();
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v9.resources.GoogleAdsField getResults(int index) {
return results_.get(index);
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v9.resources.GoogleAdsFieldOrBuilder getResultsOrBuilder(
int index) {
return results_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
private volatile java.lang.Object nextPageToken_;
/**
* <pre>
* Pagination token used to retrieve the next page of results. Pass the
* content of this string as the `page_token` attribute of the next request.
* `next_page_token` is not returned for the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results. Pass the
* content of this string as the `page_token` attribute of the next request.
* `next_page_token` is not returned for the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TOTAL_RESULTS_COUNT_FIELD_NUMBER = 3;
private long totalResultsCount_;
/**
* <pre>
* Total number of results that match the query ignoring the LIMIT clause.
* </pre>
*
* <code>int64 total_results_count = 3;</code>
* @return The totalResultsCount.
*/
@java.lang.Override
public long getTotalResultsCount() {
return totalResultsCount_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < results_.size(); i++) {
output.writeMessage(1, results_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
if (totalResultsCount_ != 0L) {
output.writeInt64(3, totalResultsCount_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < results_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, results_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
if (totalResultsCount_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(3, totalResultsCount_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse)) {
return super.equals(obj);
}
com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse other = (com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse) obj;
if (!getResultsList()
.equals(other.getResultsList())) return false;
if (!getNextPageToken()
.equals(other.getNextPageToken())) return false;
if (getTotalResultsCount()
!= other.getTotalResultsCount()) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResultsCount() > 0) {
hash = (37 * hash) + RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getResultsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (37 * hash) + TOTAL_RESULTS_COUNT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getTotalResultsCount());
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Response message for [GoogleAdsFieldService.SearchGoogleAdsFields][google.ads.googleads.v9.services.GoogleAdsFieldService.SearchGoogleAdsFields].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse)
com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.services.GoogleAdsFieldServiceProto.internal_static_google_ads_googleads_v9_services_SearchGoogleAdsFieldsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.services.GoogleAdsFieldServiceProto.internal_static_google_ads_googleads_v9_services_SearchGoogleAdsFieldsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse.class, com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse.Builder.class);
}
// Construct using com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getResultsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
resultsBuilder_.clear();
}
nextPageToken_ = "";
totalResultsCount_ = 0L;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v9.services.GoogleAdsFieldServiceProto.internal_static_google_ads_googleads_v9_services_SearchGoogleAdsFieldsResponse_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse getDefaultInstanceForType() {
return com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse build() {
com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse buildPartial() {
com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse result = new com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse(this);
int from_bitField0_ = bitField0_;
if (resultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
results_ = java.util.Collections.unmodifiableList(results_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.results_ = results_;
} else {
result.results_ = resultsBuilder_.build();
}
result.nextPageToken_ = nextPageToken_;
result.totalResultsCount_ = totalResultsCount_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse) {
return mergeFrom((com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse other) {
if (other == com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse.getDefaultInstance()) return this;
if (resultsBuilder_ == null) {
if (!other.results_.isEmpty()) {
if (results_.isEmpty()) {
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResultsIsMutable();
results_.addAll(other.results_);
}
onChanged();
}
} else {
if (!other.results_.isEmpty()) {
if (resultsBuilder_.isEmpty()) {
resultsBuilder_.dispose();
resultsBuilder_ = null;
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
resultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getResultsFieldBuilder() : null;
} else {
resultsBuilder_.addAllMessages(other.results_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
onChanged();
}
if (other.getTotalResultsCount() != 0L) {
setTotalResultsCount(other.getTotalResultsCount());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v9.resources.GoogleAdsField> results_ =
java.util.Collections.emptyList();
private void ensureResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
results_ = new java.util.ArrayList<com.google.ads.googleads.v9.resources.GoogleAdsField>(results_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v9.resources.GoogleAdsField, com.google.ads.googleads.v9.resources.GoogleAdsField.Builder, com.google.ads.googleads.v9.resources.GoogleAdsFieldOrBuilder> resultsBuilder_;
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v9.resources.GoogleAdsField> getResultsList() {
if (resultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(results_);
} else {
return resultsBuilder_.getMessageList();
}
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public int getResultsCount() {
if (resultsBuilder_ == null) {
return results_.size();
} else {
return resultsBuilder_.getCount();
}
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public com.google.ads.googleads.v9.resources.GoogleAdsField getResults(int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessage(index);
}
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public Builder setResults(
int index, com.google.ads.googleads.v9.resources.GoogleAdsField value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.set(index, value);
onChanged();
} else {
resultsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public Builder setResults(
int index, com.google.ads.googleads.v9.resources.GoogleAdsField.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.set(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public Builder addResults(com.google.ads.googleads.v9.resources.GoogleAdsField value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(value);
onChanged();
} else {
resultsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public Builder addResults(
int index, com.google.ads.googleads.v9.resources.GoogleAdsField value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(index, value);
onChanged();
} else {
resultsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public Builder addResults(
com.google.ads.googleads.v9.resources.GoogleAdsField.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public Builder addResults(
int index, com.google.ads.googleads.v9.resources.GoogleAdsField.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public Builder addAllResults(
java.lang.Iterable<? extends com.google.ads.googleads.v9.resources.GoogleAdsField> values) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, results_);
onChanged();
} else {
resultsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public Builder clearResults() {
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resultsBuilder_.clear();
}
return this;
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public Builder removeResults(int index) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.remove(index);
onChanged();
} else {
resultsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public com.google.ads.googleads.v9.resources.GoogleAdsField.Builder getResultsBuilder(
int index) {
return getResultsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public com.google.ads.googleads.v9.resources.GoogleAdsFieldOrBuilder getResultsOrBuilder(
int index) {
if (resultsBuilder_ == null) {
return results_.get(index); } else {
return resultsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public java.util.List<? extends com.google.ads.googleads.v9.resources.GoogleAdsFieldOrBuilder>
getResultsOrBuilderList() {
if (resultsBuilder_ != null) {
return resultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(results_);
}
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public com.google.ads.googleads.v9.resources.GoogleAdsField.Builder addResultsBuilder() {
return getResultsFieldBuilder().addBuilder(
com.google.ads.googleads.v9.resources.GoogleAdsField.getDefaultInstance());
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public com.google.ads.googleads.v9.resources.GoogleAdsField.Builder addResultsBuilder(
int index) {
return getResultsFieldBuilder().addBuilder(
index, com.google.ads.googleads.v9.resources.GoogleAdsField.getDefaultInstance());
}
/**
* <pre>
* The list of fields that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v9.resources.GoogleAdsField results = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v9.resources.GoogleAdsField.Builder>
getResultsBuilderList() {
return getResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v9.resources.GoogleAdsField, com.google.ads.googleads.v9.resources.GoogleAdsField.Builder, com.google.ads.googleads.v9.resources.GoogleAdsFieldOrBuilder>
getResultsFieldBuilder() {
if (resultsBuilder_ == null) {
resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v9.resources.GoogleAdsField, com.google.ads.googleads.v9.resources.GoogleAdsField.Builder, com.google.ads.googleads.v9.resources.GoogleAdsFieldOrBuilder>(
results_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
results_ = null;
}
return resultsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results. Pass the
* content of this string as the `page_token` attribute of the next request.
* `next_page_token` is not returned for the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results. Pass the
* content of this string as the `page_token` attribute of the next request.
* `next_page_token` is not returned for the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results. Pass the
* content of this string as the `page_token` attribute of the next request.
* `next_page_token` is not returned for the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results. Pass the
* content of this string as the `page_token` attribute of the next request.
* `next_page_token` is not returned for the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results. Pass the
* content of this string as the `page_token` attribute of the next request.
* `next_page_token` is not returned for the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
onChanged();
return this;
}
private long totalResultsCount_ ;
/**
* <pre>
* Total number of results that match the query ignoring the LIMIT clause.
* </pre>
*
* <code>int64 total_results_count = 3;</code>
* @return The totalResultsCount.
*/
@java.lang.Override
public long getTotalResultsCount() {
return totalResultsCount_;
}
/**
* <pre>
* Total number of results that match the query ignoring the LIMIT clause.
* </pre>
*
* <code>int64 total_results_count = 3;</code>
* @param value The totalResultsCount to set.
* @return This builder for chaining.
*/
public Builder setTotalResultsCount(long value) {
totalResultsCount_ = value;
onChanged();
return this;
}
/**
* <pre>
* Total number of results that match the query ignoring the LIMIT clause.
* </pre>
*
* <code>int64 total_results_count = 3;</code>
* @return This builder for chaining.
*/
public Builder clearTotalResultsCount() {
totalResultsCount_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse)
private static final com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse();
}
public static com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchGoogleAdsFieldsResponse>
PARSER = new com.google.protobuf.AbstractParser<SearchGoogleAdsFieldsResponse>() {
@java.lang.Override
public SearchGoogleAdsFieldsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SearchGoogleAdsFieldsResponse(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<SearchGoogleAdsFieldsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchGoogleAdsFieldsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v9.services.SearchGoogleAdsFieldsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package com.example.imageconsumerdemo;
import kafka.api.FetchRequest;
import kafka.api.FetchRequestBuilder;
import kafka.api.PartitionOffsetRequestInfo;
import kafka.common.ErrorMapping;
import kafka.common.TopicAndPartition;
import kafka.javaapi.*;
import kafka.javaapi.consumer.SimpleConsumer;
import kafka.javaapi.message.ByteBufferMessageSet;
import kafka.message.MessageAndOffset;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.dos.pubsub.document.BinaryDocument;
import com.dos.pubsub.document.Document;
import com.dos.pubsub.document.DocumentDecoder;
import com.dos.pubsub.document.StringDocument;
import android.os.AsyncTask;
import android.util.Log;
public class KafkaSimpleConsumer extends AsyncTask<String, Void, ArrayList<Byte[]>> {
private Long byte_offset;
private Long maxReads;
private String topic;
private int partition;
private List<String> seeds = new ArrayList<String>();
private int port;
private List<String> m_replicaBrokers = new ArrayList<String>();
ArrayList<Byte[]> bytes_list = new ArrayList<Byte[]>();
protected ArrayList<Byte[]> doInBackground(String... params) {
// Maximum number of times the message is fetched from the server
this.maxReads = Long.parseLong(params[0]);
this.topic = params[1]; // Topic is the second param
// Partition of the topic from which we are going to fetch
this.partition = Integer.parseInt(params[2]);
this.seeds.add(params[3]); // Broker IP
this.port = Integer.parseInt(params[4]); // Port no
// We should remember the last offset of message we fetched.
// This is prevent us from fetched the old data always
this.byte_offset = Long.parseLong(params[5]);
try {
run(maxReads, topic, partition, seeds, port);
} catch (Exception e) {
System.out.println("Oops:" + e);
e.printStackTrace();
}
return bytes_list;
}
public KafkaSimpleConsumer() {
m_replicaBrokers = new ArrayList<String>();
}
public void run(long a_maxReads, String a_topic, int a_partition, List<String> a_seedBrokers, int a_port) throws Exception {
// find the meta data about the topic and partition we are interested in
//
PartitionMetadata metadata = findLeader(a_seedBrokers, a_port, a_topic, a_partition);
if (metadata == null) {
Log.d("metadata", "Can't find metadata for Topic and Partition. Exiting" );
return;
}
if (metadata.leader() == null) {
Log.d("metadata", "Can't find Leader for Topic and Partition. Exiting" );
return;
}
String leadBroker = metadata.leader().host();
String clientName = "Client_" + a_topic + "_" + a_partition;
SimpleConsumer consumer = new SimpleConsumer(leadBroker, a_port, 100000, 3000000, clientName);
// The last offset the client used
long readOffset = getLastOffset(consumer,a_topic, a_partition, kafka.api.OffsetRequest.EarliestTime(), clientName);
//long readOffset = byte_offset;
int numErrors = 0;
// List of chunks the publisher sent
ArrayList<BinaryDocument> docs = new ArrayList<BinaryDocument>();
Byte[] byte_array;
int byte_array_length = 0;
// This decoder is in pubsub.jar
DocumentDecoder mydecoder = new DocumentDecoder(null);
while (a_maxReads > 0) {
if (consumer == null) {
consumer = new SimpleConsumer(leadBroker, a_port, 100000, 3000000, clientName);
}
FetchRequest req = new FetchRequestBuilder()
.clientId(clientName)
.addFetch(a_topic, a_partition, readOffset, 3000000)
.build();
FetchResponse fetchResponse = consumer.fetch(req);
if (fetchResponse.hasError()) {
numErrors++;
// Something went wrong!
short code = fetchResponse.errorCode(a_topic, a_partition);
Log.e("fetch_error","Error fetching data from the Broker:" + leadBroker + " Reason: " + code);
if (numErrors > 5) break;
if (code == ErrorMapping.OffsetOutOfRangeCode()) {
// We asked for an invalid offset. For simple case ask for the last element to reset
readOffset = getLastOffset(consumer,a_topic, a_partition, kafka.api.OffsetRequest.LatestTime(), clientName);
continue;
}
consumer.close();
consumer = null;
leadBroker = findNewLeader(leadBroker, a_topic, a_partition, a_port);
continue;
}
numErrors = 0;
long numRead = 0;
for (MessageAndOffset messageAndOffset : fetchResponse.messageSet(a_topic, a_partition)) {
long currentOffset = messageAndOffset.offset();
if (currentOffset < readOffset) {
continue;
}
readOffset = messageAndOffset.nextOffset();
byte_offset = readOffset;
ByteBuffer payload = messageAndOffset.message().payload();
byte[] bytes = new byte[payload.limit()];
payload.get(bytes);
BinaryDocument bin_doc = (BinaryDocument)mydecoder.fromBytes(bytes);
// if it is the first chunk, we start collecting the chunks
if (bin_doc.IsFirst()) {
docs = new ArrayList<BinaryDocument>();
byte_array_length = 0;
}
byte_array_length += bin_doc.data.length;
docs.add(bin_doc);
// If it is the last chunk, then we collect it and add it to the list
// which is returned to the activity calling the KafkaSimpleConsumer
if (bin_doc.IsLast()) {
byte_array = new Byte[byte_array_length];
int i=0;
int j;
for (BinaryDocument doc : docs) {
for (j=0; j<doc.data.length; j++) {
byte_array[i] = doc.data[j];
i++;
}
}
bytes_list.add(byte_array);
}
numRead++;
a_maxReads--;
}
return;
/* if (numRead == 0) {
try {
Thread.sleep(1000);
} catch (InterruptedException ie) {
ie.printStackTrace();
}
}
a_maxReads--;*/
}
/*
System.out.println("its over");
if (consumer != null) consumer.close();*/
}
public static long getLastOffset(SimpleConsumer consumer, String topic, int partition,
long whichTime, String clientName) {
TopicAndPartition topicAndPartition = new TopicAndPartition(topic, partition);
Map<TopicAndPartition, PartitionOffsetRequestInfo> requestInfo = new HashMap<TopicAndPartition, PartitionOffsetRequestInfo>();
requestInfo.put(topicAndPartition, new PartitionOffsetRequestInfo(whichTime, 1));
kafka.javaapi.OffsetRequest request = new kafka.javaapi.OffsetRequest(
requestInfo, kafka.api.OffsetRequest.CurrentVersion(), clientName);
OffsetResponse response = consumer.getOffsetsBefore(request);
if (response.hasError()) {
Log.e("response_error","Error fetching data Offset Data the Broker. Reason: " + response.errorCode(topic, partition) );
return 0;
}
long[] offsets = response.offsets(topic, partition);
return offsets[0];
}
private String findNewLeader(String a_oldLeader, String a_topic, int a_partition, int a_port) throws Exception {
for (int i = 0; i < 3; i++) {
boolean goToSleep = false;
PartitionMetadata metadata = findLeader(m_replicaBrokers, a_port, a_topic, a_partition);
if (metadata == null) {
goToSleep = true;
} else if (metadata.leader() == null) {
goToSleep = true;
} else if (a_oldLeader.equalsIgnoreCase(metadata.leader().host()) && i == 0) {
// first time through if the leader hasn't changed give ZooKeeper a second to recover
// second time, assume the broker did recover before failover, or it was a non-Broker issue
//
goToSleep = true;
} else {
return metadata.leader().host();
}
if (goToSleep) {
try {
Thread.sleep(1000);
} catch (InterruptedException ie) {
}
}
}
Log.e("learder_error","Unable to find new leader after Broker failure. Exiting");
throw new Exception("Unable to find new leader after Broker failure. Exiting");
}
private PartitionMetadata findLeader(List<String> a_seedBrokers, int a_port, String a_topic, int a_partition) {
PartitionMetadata returnMetaData = null;
loop:
for (String seed : a_seedBrokers) {
SimpleConsumer consumer = null;
try {
consumer = new SimpleConsumer(seed, a_port, 100000, 64 * 1024, "leaderLookup");
List<String> topics = Collections.singletonList(a_topic);
TopicMetadataRequest req = new TopicMetadataRequest(topics);
kafka.javaapi.TopicMetadataResponse resp = consumer.send(req);
List<TopicMetadata> metaData = resp.topicsMetadata();
for (TopicMetadata item : metaData) {
for (PartitionMetadata part : item.partitionsMetadata()) {
if (part.partitionId() == a_partition) {
returnMetaData = part;
break loop;
}
}
}
} catch (Exception e) {
Log.e("partition_error","Error communicating with Broker [" + seed + "] to find Leader for [" + a_topic
+ ", " + a_partition + "] Reason: " + e);
} finally {
if (consumer != null) consumer.close();
}
}
if (returnMetaData != null) {
m_replicaBrokers.clear();
for (kafka.cluster.Broker replica : returnMetaData.replicas()) {
m_replicaBrokers.add(replica.host());
}
}
return returnMetaData;
}
protected Long GetOffset() {
return byte_offset;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.formats.json;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.data.util.DataFormatConverters;
import org.apache.flink.table.runtime.typeutils.InternalTypeInfo;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.types.Row;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Assert;
import org.junit.Test;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneOffset;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
import static org.apache.flink.table.api.DataTypes.ARRAY;
import static org.apache.flink.table.api.DataTypes.BIGINT;
import static org.apache.flink.table.api.DataTypes.BOOLEAN;
import static org.apache.flink.table.api.DataTypes.BYTES;
import static org.apache.flink.table.api.DataTypes.DATE;
import static org.apache.flink.table.api.DataTypes.DECIMAL;
import static org.apache.flink.table.api.DataTypes.DOUBLE;
import static org.apache.flink.table.api.DataTypes.FIELD;
import static org.apache.flink.table.api.DataTypes.FLOAT;
import static org.apache.flink.table.api.DataTypes.INT;
import static org.apache.flink.table.api.DataTypes.MAP;
import static org.apache.flink.table.api.DataTypes.ROW;
import static org.apache.flink.table.api.DataTypes.SMALLINT;
import static org.apache.flink.table.api.DataTypes.STRING;
import static org.apache.flink.table.api.DataTypes.TIME;
import static org.apache.flink.table.api.DataTypes.TIMESTAMP;
import static org.apache.flink.table.api.DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE;
import static org.apache.flink.table.api.DataTypes.TINYINT;
import static org.apache.flink.table.types.utils.TypeConversions.fromLogicalToDataType;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
/**
* Tests for {@link JsonRowDataDeserializationSchema} and {@link JsonRowDataSerializationSchema}.
*/
public class JsonRowDataSerDeSchemaTest {
@Test
public void testSerDe() throws Exception {
byte tinyint = 'c';
short smallint = 128;
int intValue = 45536;
float floatValue = 33.333F;
long bigint = 1238123899121L;
String name = "asdlkjasjkdla998y1122";
byte[] bytes = new byte[1024];
ThreadLocalRandom.current().nextBytes(bytes);
BigDecimal decimal = new BigDecimal("123.456789");
Double[] doubles = new Double[]{1.1, 2.2, 3.3};
LocalDate date = LocalDate.parse("1990-10-14");
LocalTime time = LocalTime.parse("12:12:43");
Timestamp timestamp3 = Timestamp.valueOf("1990-10-14 12:12:43.123");
Timestamp timestamp9 = Timestamp.valueOf("1990-10-14 12:12:43.123456789");
Instant timestampWithLocalZone =
LocalDateTime.of(1990, 10, 14, 12, 12, 43, 123456789).
atOffset(ZoneOffset.of("Z")).toInstant();
Map<String, Long> map = new HashMap<>();
map.put("flink", 123L);
Map<String, Map<String, Integer>> nestedMap = new HashMap<>();
Map<String, Integer> innerMap = new HashMap<>();
innerMap.put("key", 234);
nestedMap.put("inner_map", innerMap);
ObjectMapper objectMapper = new ObjectMapper();
ArrayNode doubleNode = objectMapper.createArrayNode().add(1.1D).add(2.2D).add(3.3D);
// Root
ObjectNode root = objectMapper.createObjectNode();
root.put("bool", true);
root.put("tinyint", tinyint);
root.put("smallint", smallint);
root.put("int", intValue);
root.put("bigint", bigint);
root.put("float", floatValue);
root.put("name", name);
root.put("bytes", bytes);
root.put("decimal", decimal);
root.set("doubles", doubleNode);
root.put("date", "1990-10-14");
root.put("time", "12:12:43");
root.put("timestamp3", "1990-10-14T12:12:43.123");
root.put("timestamp9", "1990-10-14T12:12:43.123456789");
root.put("timestampWithLocalZone", "1990-10-14T12:12:43.123456789Z");
root.putObject("map").put("flink", 123);
root.putObject("map2map").putObject("inner_map").put("key", 234);
byte[] serializedJson = objectMapper.writeValueAsBytes(root);
DataType dataType = ROW(
FIELD("bool", BOOLEAN()),
FIELD("tinyint", TINYINT()),
FIELD("smallint", SMALLINT()),
FIELD("int", INT()),
FIELD("bigint", BIGINT()),
FIELD("float", FLOAT()),
FIELD("name", STRING()),
FIELD("bytes", BYTES()),
FIELD("decimal", DECIMAL(9, 6)),
FIELD("doubles", ARRAY(DOUBLE())),
FIELD("date", DATE()),
FIELD("time", TIME(0)),
FIELD("timestamp3", TIMESTAMP(3)),
FIELD("timestamp9", TIMESTAMP(9)),
FIELD("timestampWithLocalZone", TIMESTAMP_WITH_LOCAL_TIME_ZONE(9)),
FIELD("map", MAP(STRING(), BIGINT())),
FIELD("map2map", MAP(STRING(), MAP(STRING(), INT()))));
RowType schema = (RowType) dataType.getLogicalType();
TypeInformation<RowData> resultTypeInfo = InternalTypeInfo.of(schema);
JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
schema, resultTypeInfo, false, false, TimestampFormat.ISO_8601);
Row expected = new Row(17);
expected.setField(0, true);
expected.setField(1, tinyint);
expected.setField(2, smallint);
expected.setField(3, intValue);
expected.setField(4, bigint);
expected.setField(5, floatValue);
expected.setField(6, name);
expected.setField(7, bytes);
expected.setField(8, decimal);
expected.setField(9, doubles);
expected.setField(10, date);
expected.setField(11, time);
expected.setField(12, timestamp3.toLocalDateTime());
expected.setField(13, timestamp9.toLocalDateTime());
expected.setField(14, timestampWithLocalZone);
expected.setField(15, map);
expected.setField(16, nestedMap);
RowData rowData = deserializationSchema.deserialize(serializedJson);
Row actual = convertToExternal(rowData, dataType);
assertEquals(expected, actual);
// test serialization
JsonRowDataSerializationSchema serializationSchema = new JsonRowDataSerializationSchema(schema, TimestampFormat.ISO_8601);
byte[] actualBytes = serializationSchema.serialize(rowData);
assertEquals(new String(serializedJson), new String(actualBytes));
}
/**
* Tests the deserialization slow path,
* e.g. convert into string and use {@link Double#parseDouble(String)}.
*/
@Test
public void testSlowDeserialization() throws Exception {
Random random = new Random();
boolean bool = random.nextBoolean();
int integer = random.nextInt();
long bigint = random.nextLong();
double doubleValue = random.nextDouble();
float floatValue = random.nextFloat();
ObjectMapper objectMapper = new ObjectMapper();
ObjectNode root = objectMapper.createObjectNode();
root.put("bool", String.valueOf(bool));
root.put("int", String.valueOf(integer));
root.put("bigint", String.valueOf(bigint));
root.put("double1", String.valueOf(doubleValue));
root.put("double2", new BigDecimal(doubleValue));
root.put("float1", String.valueOf(floatValue));
root.put("float2", new BigDecimal(floatValue));
byte[] serializedJson = objectMapper.writeValueAsBytes(root);
DataType dataType = ROW(
FIELD("bool", BOOLEAN()),
FIELD("int", INT()),
FIELD("bigint", BIGINT()),
FIELD("double1", DOUBLE()),
FIELD("double2", DOUBLE()),
FIELD("float1", FLOAT()),
FIELD("float2", FLOAT())
);
RowType rowType = (RowType) dataType.getLogicalType();
JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
rowType, InternalTypeInfo.of(rowType), false, false, TimestampFormat.ISO_8601);
Row expected = new Row(7);
expected.setField(0, bool);
expected.setField(1, integer);
expected.setField(2, bigint);
expected.setField(3, doubleValue);
expected.setField(4, doubleValue);
expected.setField(5, floatValue);
expected.setField(6, floatValue);
RowData rowData = deserializationSchema.deserialize(serializedJson);
Row actual = convertToExternal(rowData, dataType);
assertEquals(expected, actual);
}
@Test
public void testSerDeMultiRows() throws Exception {
RowType rowType = (RowType) ROW(
FIELD("f1", INT()),
FIELD("f2", BOOLEAN()),
FIELD("f3", STRING())
).getLogicalType();
JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
rowType, InternalTypeInfo.of(rowType), false, false, TimestampFormat.ISO_8601);
JsonRowDataSerializationSchema serializationSchema = new JsonRowDataSerializationSchema(rowType, TimestampFormat.ISO_8601);
ObjectMapper objectMapper = new ObjectMapper();
// the first row
{
ObjectNode root = objectMapper.createObjectNode();
root.put("f1", 1);
root.put("f2", true);
root.put("f3", "str");
byte[] serializedJson = objectMapper.writeValueAsBytes(root);
RowData rowData = deserializationSchema.deserialize(serializedJson);
byte[] actual = serializationSchema.serialize(rowData);
assertEquals(new String(serializedJson), new String(actual));
}
// the second row
{
ObjectNode root = objectMapper.createObjectNode();
root.put("f1", 10);
root.put("f2", false);
root.put("f3", "newStr");
byte[] serializedJson = objectMapper.writeValueAsBytes(root);
RowData rowData = deserializationSchema.deserialize(serializedJson);
byte[] actual = serializationSchema.serialize(rowData);
assertEquals(new String(serializedJson), new String(actual));
}
}
@Test
public void testSerDeMultiRowsWithNullValues() throws Exception {
String[] jsons = new String[] {
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"metrics\":{\"k1\":10.01,\"k2\":\"invalid\"}}",
"{\"svt\":\"2020-02-24T12:58:09.209+0800\", \"ops\":{\"id\":\"281708d0-4092-4c21-9233-931950b6eccf\"}, " +
"\"ids\":[1, 2, 3]}",
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"metrics\":{}}",
};
String[] expected = new String[] {
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":null,\"ids\":null,\"metrics\":{\"k1\":10.01,\"k2\":null}}",
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":{\"id\":\"281708d0-4092-4c21-9233-931950b6eccf\"}," +
"\"ids\":[1,2,3],\"metrics\":null}",
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":null,\"ids\":null,\"metrics\":{}}",
};
RowType rowType = (RowType) ROW(
FIELD("svt", STRING()),
FIELD("ops", ROW(FIELD("id", STRING()))),
FIELD("ids", ARRAY(INT())),
FIELD("metrics", MAP(STRING(), DOUBLE()))
).getLogicalType();
JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
rowType, InternalTypeInfo.of(rowType), false, true, TimestampFormat.ISO_8601);
JsonRowDataSerializationSchema serializationSchema = new JsonRowDataSerializationSchema(rowType, TimestampFormat.ISO_8601);
for (int i = 0; i < jsons.length; i++) {
String json = jsons[i];
RowData row = deserializationSchema.deserialize(json.getBytes());
String result = new String(serializationSchema.serialize(row));
assertEquals(expected[i], result);
}
}
@Test
public void testDeserializationMissingNode() throws Exception {
ObjectMapper objectMapper = new ObjectMapper();
// Root
ObjectNode root = objectMapper.createObjectNode();
root.put("id", 123123123);
byte[] serializedJson = objectMapper.writeValueAsBytes(root);
DataType dataType = ROW(FIELD("name", STRING()));
RowType schema = (RowType) dataType.getLogicalType();
// pass on missing field
JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
schema, InternalTypeInfo.of(schema), false, false, TimestampFormat.ISO_8601);
Row expected = new Row(1);
Row actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType);
assertEquals(expected, actual);
// fail on missing field
deserializationSchema = deserializationSchema = new JsonRowDataDeserializationSchema(
schema, InternalTypeInfo.of(schema), true, false, TimestampFormat.ISO_8601);
String errorMessage = "Failed to deserialize JSON '{\"id\":123123123}'.";
try {
deserializationSchema.deserialize(serializedJson);
fail("expecting exception message: " + errorMessage);
} catch (Throwable t) {
assertEquals(errorMessage, t.getMessage());
}
// ignore on parse error
deserializationSchema = new JsonRowDataDeserializationSchema(
schema, InternalTypeInfo.of(schema), false, true, TimestampFormat.ISO_8601);
actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType);
assertEquals(expected, actual);
errorMessage = "JSON format doesn't support failOnMissingField and ignoreParseErrors are both enabled.";
try {
// failOnMissingField and ignoreParseErrors both enabled
new JsonRowDataDeserializationSchema(
schema, InternalTypeInfo.of(schema), true, true, TimestampFormat.ISO_8601);
Assert.fail("expecting exception message: " + errorMessage);
} catch (Throwable t) {
assertEquals(errorMessage, t.getMessage());
}
}
@Test
public void testSerDeSQLTimestampFormat() throws Exception{
RowType rowType = (RowType) ROW(
FIELD("timestamp3", TIMESTAMP(3)),
FIELD("timestamp9", TIMESTAMP(9)),
FIELD("timestamp_with_local_timezone3", TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)),
FIELD("timestamp_with_local_timezone9", TIMESTAMP_WITH_LOCAL_TIME_ZONE(9))
).getLogicalType();
JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
rowType, InternalTypeInfo.of(rowType), false, false, TimestampFormat.SQL);
JsonRowDataSerializationSchema serializationSchema = new JsonRowDataSerializationSchema(rowType, TimestampFormat.SQL);
ObjectMapper objectMapper = new ObjectMapper();
ObjectNode root = objectMapper.createObjectNode();
root.put("timestamp3", "1990-10-14 12:12:43.123");
root.put("timestamp9", "1990-10-14 12:12:43.123456789");
root.put("timestamp_with_local_timezone3", "1990-10-14 12:12:43.123Z");
root.put("timestamp_with_local_timezone9", "1990-10-14 12:12:43.123456789Z");
byte[] serializedJson = objectMapper.writeValueAsBytes(root);
RowData rowData = deserializationSchema.deserialize(serializedJson);
byte[] actual = serializationSchema.serialize(rowData);
assertEquals(new String(serializedJson), new String(actual));
}
@Test
public void testJsonParse() throws Exception {
for (TestSpec spec : testData) {
testIgnoreParseErrors(spec);
if (spec.errorMessage != null) {
testParseErrors(spec);
}
}
}
private void testIgnoreParseErrors(TestSpec spec) throws Exception {
// the parsing field should be null and no exception is thrown
JsonRowDataDeserializationSchema ignoreErrorsSchema = new JsonRowDataDeserializationSchema(
spec.rowType, InternalTypeInfo.of(spec.rowType), false, true,
spec.timestampFormat);
Row expected;
if (spec.expected != null) {
expected = spec.expected;
} else {
expected = new Row(1);
}
RowData rowData = ignoreErrorsSchema.deserialize(spec.json.getBytes());
Row actual = convertToExternal(rowData, fromLogicalToDataType(spec.rowType));
assertEquals("Test Ignore Parse Error: " + spec.json,
expected,
actual);
}
private void testParseErrors(TestSpec spec) throws Exception {
// expect exception if parse error is not ignored
JsonRowDataDeserializationSchema failingSchema = new JsonRowDataDeserializationSchema(
spec.rowType, InternalTypeInfo.of(spec.rowType), false, false,
spec.timestampFormat);
try {
failingSchema.deserialize(spec.json.getBytes());
fail("expecting exception " + spec.errorMessage);
} catch (Throwable t) {
assertEquals(t.getMessage(), spec.errorMessage);
}
}
private static List<TestSpec> testData = Arrays.asList(
TestSpec
.json("{\"id\": \"trueA\"}")
.rowType(ROW(FIELD("id", BOOLEAN())))
.expect(Row.of(false)),
TestSpec
.json("{\"id\": true}")
.rowType(ROW(FIELD("id", BOOLEAN())))
.expect(Row.of(true)),
TestSpec
.json("{\"id\":\"abc\"}")
.rowType(ROW(FIELD("id", INT())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"abc\"}'."),
TestSpec
.json("{\"id\":112.013}")
.rowType(ROW(FIELD("id", BIGINT())))
.expect(Row.of(112L)),
TestSpec
.json("{\"id\":\"long\"}")
.rowType(ROW(FIELD("id", BIGINT())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"long\"}'."),
TestSpec
.json("{\"id\":\"112.013.123\"}")
.rowType(ROW(FIELD("id", FLOAT())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"112.013.123\"}'."),
TestSpec
.json("{\"id\":\"112.013.123\"}")
.rowType(ROW(FIELD("id", DOUBLE())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"112.013.123\"}'."),
TestSpec
.json("{\"id\":\"18:00:243\"}")
.rowType(ROW(FIELD("id", TIME())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"18:00:243\"}'."),
TestSpec
.json("{\"id\":\"18:00:243\"}")
.rowType(ROW(FIELD("id", TIME())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"18:00:243\"}'."),
TestSpec
.json("{\"id\":\"20191112\"}")
.rowType(ROW(FIELD("id", DATE())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"20191112\"}'."),
TestSpec
.json("{\"id\":\"20191112\"}")
.rowType(ROW(FIELD("id", DATE())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"20191112\"}'."),
TestSpec
.json("{\"id\":true}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("true")),
TestSpec
.json("{\"id\":123.234}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("123.234")),
TestSpec
.json("{\"id\":1234567}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("1234567")),
TestSpec
.json("{\"id\":\"string field\"}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("string field")),
TestSpec
.json("{\"id\":[\"array data1\",\"array data2\",123,234.345]}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("[\"array data1\",\"array data2\",123,234.345]")),
TestSpec
.json("{\"id\":{\"k1\":123,\"k2\":234.234,\"k3\":\"string data\"}}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("{\"k1\":123,\"k2\":234.234,\"k3\":\"string data\"}")),
TestSpec
.json("{\"id\":\"2019-11-12 18:00:12\"}")
.rowType(ROW(FIELD("id", TIMESTAMP(0))))
.timestampFormat(TimestampFormat.ISO_8601)
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12 18:00:12\"}'."),
TestSpec
.json("{\"id\":\"2019-11-12T18:00:12\"}")
.rowType(ROW(FIELD("id", TIMESTAMP(0))))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12\"}'."),
TestSpec
.json("{\"id\":\"2019-11-12T18:00:12Z\"}")
.rowType(ROW(FIELD("id", TIMESTAMP(0))))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12Z\"}'."),
TestSpec
.json("{\"id\":\"2019-11-12T18:00:12Z\"}")
.rowType(ROW(FIELD("id", TIMESTAMP(0))))
.timestampFormat(TimestampFormat.ISO_8601)
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12Z\"}'."),
TestSpec
.json("{\"id\":\"abc\"}")
.rowType(ROW(FIELD("id", DECIMAL(10, 3))))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"abc\"}'."),
TestSpec
.json("{\"row\":{\"id\":\"abc\"}}")
.rowType(ROW(FIELD("row", ROW(FIELD("id", BOOLEAN())))))
.expect(Row.of(Row.of(false))),
TestSpec
.json("{\"array\":[123, \"abc\"]}")
.rowType(ROW(FIELD("array", ARRAY(INT()))))
.expect(Row.of((Object) new Integer[]{123, null}))
.expectErrorMessage("Failed to deserialize JSON '{\"array\":[123, \"abc\"]}'."),
TestSpec
.json("{\"map\":{\"key1\":\"123\", \"key2\":\"abc\"}}")
.rowType(ROW(FIELD("map", MAP(STRING(), INT()))))
.expect(Row.of(createHashMap("key1", 123, "key2", null)))
.expectErrorMessage("Failed to deserialize JSON '{\"map\":{\"key1\":\"123\", \"key2\":\"abc\"}}'."),
TestSpec
.json("{\"id\":\"2019-11-12T18:00:12\"}")
.rowType(ROW(FIELD("id", TIMESTAMP_WITH_LOCAL_TIME_ZONE(0))))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12\"}'."),
TestSpec
.json("{\"id\":\"2019-11-12T18:00:12+0800\"}")
.rowType(ROW(FIELD("id", TIMESTAMP_WITH_LOCAL_TIME_ZONE(0))))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12+0800\"}'.")
);
private static Map<String, Integer> createHashMap(String k1, Integer v1, String k2, Integer v2) {
Map<String, Integer> map = new HashMap<>();
map.put(k1, v1);
map.put(k2, v2);
return map;
}
@SuppressWarnings("unchecked")
private static Row convertToExternal(RowData rowData, DataType dataType) {
return (Row) DataFormatConverters.getConverterForDataType(dataType).toExternal(rowData);
}
private static class TestSpec {
private final String json;
private RowType rowType;
private TimestampFormat timestampFormat = TimestampFormat.SQL;
private Row expected;
private String errorMessage;
private TestSpec(String json) {
this.json = json;
}
public static TestSpec json(String json) {
return new TestSpec(json);
}
TestSpec expect(Row row) {
this.expected = row;
return this;
}
TestSpec rowType(DataType rowType) {
this.rowType = (RowType) rowType.getLogicalType();
return this;
}
TestSpec expectErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
return this;
}
TestSpec timestampFormat(TimestampFormat timestampFormat){
this.timestampFormat = timestampFormat;
return this;
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.spatial.search.aggregations.metrics;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.geo.GeoEncodingUtils;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.store.Directory;
import org.elasticsearch.geo.GeometryTestUtils;
import org.elasticsearch.geometry.Geometry;
import org.elasticsearch.geometry.MultiPoint;
import org.elasticsearch.geometry.Point;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.InternalGeoBounds;
import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin;
import org.elasticsearch.xpack.spatial.index.fielddata.CentroidCalculator;
import org.elasticsearch.xpack.spatial.index.mapper.BinaryGeoShapeDocValuesField;
import org.elasticsearch.xpack.spatial.index.mapper.GeoShapeWithDocValuesFieldMapper;
import org.elasticsearch.xpack.spatial.search.aggregations.support.GeoShapeValuesSourceType;
import org.elasticsearch.xpack.spatial.util.GeoTestUtils;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.startsWith;
public class GeoShapeBoundsAggregatorTests extends AggregatorTestCase {
static final double GEOHASH_TOLERANCE = 1E-5D;
@Override
protected List<SearchPlugin> getSearchPlugins() {
return List.of(new LocalStateSpatialPlugin());
}
public void testEmpty() throws Exception {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg")
.field("field")
.wrapLongitude(false);
MappedFieldType fieldType = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType();
fieldType.setHasDocValues(true);
fieldType.setName("field");
try (IndexReader reader = w.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
assertTrue(Double.isInfinite(bounds.top));
assertTrue(Double.isInfinite(bounds.bottom));
assertTrue(Double.isInfinite(bounds.posLeft));
assertTrue(Double.isInfinite(bounds.posRight));
assertTrue(Double.isInfinite(bounds.negLeft));
assertTrue(Double.isInfinite(bounds.negRight));
assertFalse(AggregationInspectionHelper.hasValue(bounds));
}
}
}
public void testUnmappedFieldWithDocs() throws Exception {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
if (randomBoolean()) {
Document doc = new Document();
doc.add(new LatLonDocValuesField("field", 0.0, 0.0));
w.addDocument(doc);
}
GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg")
.field("non_existent")
.wrapLongitude(false);
MappedFieldType fieldType = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType();
fieldType.setHasDocValues(true);
fieldType.setName("field");
try (IndexReader reader = w.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
assertTrue(Double.isInfinite(bounds.top));
assertTrue(Double.isInfinite(bounds.bottom));
assertTrue(Double.isInfinite(bounds.posLeft));
assertTrue(Double.isInfinite(bounds.posRight));
assertTrue(Double.isInfinite(bounds.negLeft));
assertTrue(Double.isInfinite(bounds.negRight));
assertFalse(AggregationInspectionHelper.hasValue(bounds));
}
}
}
public void testMissing() throws Exception {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
Document doc = new Document();
doc.add(new NumericDocValuesField("not_field", 1000L));
w.addDocument(doc);
MappedFieldType fieldType = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType();
fieldType.setHasDocValues(true);
fieldType.setName("field");
Point point = GeometryTestUtils.randomPoint(false);
double lon = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(point.getX()));
double lat = GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(point.getY()));
Object missingVal = "POINT(" + lon + " " + lat + ")";
GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg")
.field("field")
.missing(missingVal)
.wrapLongitude(false);
try (IndexReader reader = w.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
assertThat(bounds.top, equalTo(lat));
assertThat(bounds.bottom, equalTo(lat));
assertThat(bounds.posLeft, equalTo(lon >= 0 ? lon : Double.POSITIVE_INFINITY));
assertThat(bounds.posRight, equalTo(lon >= 0 ? lon : Double.NEGATIVE_INFINITY));
assertThat(bounds.negLeft, equalTo(lon >= 0 ? Double.POSITIVE_INFINITY : lon));
assertThat(bounds.negRight, equalTo(lon >= 0 ? Double.NEGATIVE_INFINITY : lon));
}
}
}
public void testInvalidMissing() throws Exception {
try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
Document doc = new Document();
doc.add(new NumericDocValuesField("not_field", 1000L));
w.addDocument(doc);
MappedFieldType fieldType = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType();
fieldType.setHasDocValues(true);
fieldType.setName("field");
GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg")
.field("field")
.missing("invalid")
.wrapLongitude(false);
try (IndexReader reader = w.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
() -> search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType));
assertThat(exception.getMessage(), startsWith("Unknown geometry type"));
}
}
}
public void testRandomShapes() throws Exception {
double top = Double.NEGATIVE_INFINITY;
double bottom = Double.POSITIVE_INFINITY;
double posLeft = Double.POSITIVE_INFINITY;
double posRight = Double.NEGATIVE_INFINITY;
double negLeft = Double.POSITIVE_INFINITY;
double negRight = Double.NEGATIVE_INFINITY;
int numDocs = randomIntBetween(50, 100);
try (Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
int numValues = randomIntBetween(1, 5);
List<Point> points = new ArrayList<>();
for (int j = 0; j < numValues; j++) {
Point point = GeometryTestUtils.randomPoint(false);
points.add(point);
if (point.getLat() > top) {
top = point.getLat();
}
if (point.getLat() < bottom) {
bottom = point.getLat();
}
if (point.getLon() >= 0 && point.getLon() < posLeft) {
posLeft = point.getLon();
}
if (point.getLon() >= 0 && point.getLon() > posRight) {
posRight = point.getLon();
}
if (point.getLon() < 0 && point.getLon() < negLeft) {
negLeft = point.getLon();
}
if (point.getLon() < 0 && point.getLon() > negRight) {
negRight = point.getLon();
}
}
Geometry geometry = new MultiPoint(points);
doc.add(new BinaryGeoShapeDocValuesField("field", GeoTestUtils.toDecodedTriangles(geometry),
new CentroidCalculator(geometry)));
w.addDocument(doc);
}
GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg")
.field("field")
.wrapLongitude(false);
MappedFieldType fieldType = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType();
fieldType.setHasDocValues(true);
fieldType.setName("field");
try (IndexReader reader = w.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
assertThat(bounds.top, closeTo(top, GEOHASH_TOLERANCE));
assertThat(bounds.bottom, closeTo(bottom, GEOHASH_TOLERANCE));
assertThat(bounds.posLeft, closeTo(posLeft, GEOHASH_TOLERANCE));
assertThat(bounds.posRight, closeTo(posRight, GEOHASH_TOLERANCE));
assertThat(bounds.negRight, closeTo(negRight, GEOHASH_TOLERANCE));
assertThat(bounds.negLeft, closeTo(negLeft, GEOHASH_TOLERANCE));
assertTrue(AggregationInspectionHelper.hasValue(bounds));
}
}
}
@Override
protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
return new GeoBoundsAggregationBuilder("foo").field(fieldName);
}
@Override
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
return List.of(CoreValuesSourceType.GEOPOINT, GeoShapeValuesSourceType.instance());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.io.orc;
import java.io.IOException;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.orc.FileMetadata;
import org.apache.orc.impl.MemoryManager;
import org.apache.orc.TypeDescription;
import org.apache.orc.impl.OrcTail;
/**
* Contains factory methods to read or write ORC files.
*/
public final class OrcFile extends org.apache.orc.OrcFile {
// unused
protected OrcFile() {}
/**
* Create an ORC file reader.
* @param fs file system
* @param path file name to read from
* @return a new ORC file reader.
* @throws IOException
*/
public static Reader createReader(FileSystem fs,
Path path) throws IOException {
ReaderOptions opts = new ReaderOptions(new Configuration());
opts.filesystem(fs);
return new ReaderImpl(path, opts);
}
public static class ReaderOptions extends org.apache.orc.OrcFile.ReaderOptions {
public ReaderOptions(Configuration conf) {
super(conf);
}
public ReaderOptions filesystem(FileSystem fs) {
super.filesystem(fs);
return this;
}
public ReaderOptions maxLength(long val) {
super.maxLength(val);
return this;
}
public ReaderOptions fileMetadata(FileMetadata metadata) {
super.fileMetadata(metadata);
return this;
}
public ReaderOptions orcTail(OrcTail orcTail) {
super.orcTail(orcTail);
return this;
}
}
public static ReaderOptions readerOptions(Configuration conf) {
return new ReaderOptions(conf);
}
public static Reader createReader(Path path,
ReaderOptions options) throws IOException {
return new ReaderImpl(path, options);
}
/**
* Options for creating ORC file writers.
*/
public static class WriterOptions extends org.apache.orc.OrcFile.WriterOptions {
private boolean explicitSchema = false;
private ObjectInspector inspector = null;
// Setting the default batch size to 1000 makes the memory check at 5000
// rows work the same as the row by row writer. (If it was the default 1024,
// the smallest stripe size would be 5120 rows, which changes the output
// of some of the tests.)
private int batchSize = 1000;
WriterOptions(Properties tableProperties, Configuration conf) {
super(tableProperties, conf);
}
/**
* A required option that sets the object inspector for the rows. If
* setSchema is not called, it also defines the schema.
*/
public WriterOptions inspector(ObjectInspector value) {
this.inspector = value;
if (!explicitSchema) {
super.setSchema(OrcInputFormat.convertTypeInfo(
TypeInfoUtils.getTypeInfoFromObjectInspector(value)));
}
return this;
}
/**
* Set the schema for the file. This is a required parameter.
* @param schema the schema for the file.
* @return this
*/
public WriterOptions setSchema(TypeDescription schema) {
this.explicitSchema = true;
super.setSchema(schema);
return this;
}
/**
* Provide the filesystem for the path, if the client has it available.
* If it is not provided, it will be found from the path.
*/
public WriterOptions fileSystem(FileSystem value) {
super.fileSystem(value);
return this;
}
/**
* Set the stripe size for the file. The writer stores the contents of the
* stripe in memory until this memory limit is reached and the stripe
* is flushed to the HDFS file and the next stripe started.
*/
public WriterOptions stripeSize(long value) {
super.stripeSize(value);
return this;
}
/**
* Set the file system block size for the file. For optimal performance,
* set the block size to be multiple factors of stripe size.
*/
public WriterOptions blockSize(long value) {
super.blockSize(value);
return this;
}
/**
* Set the distance between entries in the row index. The minimum value is
* 1000 to prevent the index from overwhelming the data. If the stride is
* set to 0, no indexes will be included in the file.
*/
public WriterOptions rowIndexStride(int value) {
super.rowIndexStride(value);
return this;
}
/**
* The size of the memory buffers used for compressing and storing the
* stripe in memory.
*/
public WriterOptions bufferSize(int value) {
super.bufferSize(value);
return this;
}
/**
* Sets whether the HDFS blocks are padded to prevent stripes from
* straddling blocks. Padding improves locality and thus the speed of
* reading, but costs space.
*/
public WriterOptions blockPadding(boolean value) {
super.blockPadding(value);
return this;
}
/**
* Sets the encoding strategy that is used to encode the data.
*/
public WriterOptions encodingStrategy(EncodingStrategy strategy) {
super.encodingStrategy(strategy);
return this;
}
/**
* Sets the tolerance for block padding as a percentage of stripe size.
*/
public WriterOptions paddingTolerance(double value) {
super.paddingTolerance(value);
return this;
}
/**
* Comma separated values of column names for which bloom filter is to be created.
*/
public WriterOptions bloomFilterColumns(String columns) {
super.bloomFilterColumns(columns);
return this;
}
/**
* Specify the false positive probability for bloom filter.
* @param fpp - false positive probability
* @return this
*/
public WriterOptions bloomFilterFpp(double fpp) {
super.bloomFilterFpp(fpp);
return this;
}
/**
* Sets the generic compression that is used to compress the data.
*/
public WriterOptions compress(CompressionKind value) {
super.compress(value.getUnderlying());
return this;
}
/**
* Sets the generic compression that is used to compress the data.
*/
public WriterOptions compress(org.apache.orc.CompressionKind value) {
super.compress(value);
return this;
}
/**
* Sets the version of the file that will be written.
*/
public WriterOptions version(Version value) {
super.version(value);
return this;
}
/**
* Add a listener for when the stripe and file are about to be closed.
* @param callback the object to be called when the stripe is closed
* @return this
*/
public WriterOptions callback(WriterCallback callback) {
super.callback(callback);
return this;
}
/**
* A package local option to set the memory manager.
*/
protected WriterOptions memory(MemoryManager value) {
super.memory(value);
return this;
}
protected WriterOptions batchSize(int maxSize) {
batchSize = maxSize;
return this;
}
ObjectInspector getInspector() {
return inspector;
}
int getBatchSize() {
return batchSize;
}
}
/**
* Create a set of writer options based on a configuration.
* @param conf the configuration to use for values
* @return A WriterOptions object that can be modified
*/
public static WriterOptions writerOptions(Configuration conf) {
return new WriterOptions(null, conf);
}
/**
* Create a set of write options based on a set of table properties and
* configuration.
* @param tableProperties the properties of the table
* @param conf the configuration of the query
* @return a WriterOptions object that can be modified
*/
public static WriterOptions writerOptions(Properties tableProperties,
Configuration conf) {
return new WriterOptions(tableProperties, conf);
}
/**
* Create an ORC file writer. This is the public interface for creating
* writers going forward and new options will only be added to this method.
* @param path filename to write to
* @param opts the options
* @return a new ORC file writer
* @throws IOException
*/
public static Writer createWriter(Path path,
WriterOptions opts
) throws IOException {
FileSystem fs = opts.getFileSystem() == null ?
path.getFileSystem(opts.getConfiguration()) : opts.getFileSystem();
return new WriterImpl(fs, path, opts);
}
/**
* Create an ORC file writer. This method is provided for API backward
* compatability with Hive 0.11.
* @param fs file system
* @param path filename to write to
* @param inspector the ObjectInspector that inspects the rows
* @param stripeSize the number of bytes in a stripe
* @param compress how to compress the file
* @param bufferSize the number of bytes to compress at once
* @param rowIndexStride the number of rows between row index entries or
* 0 to suppress all indexes
* @return a new ORC file writer
* @throws IOException
*/
public static Writer createWriter(FileSystem fs,
Path path,
Configuration conf,
ObjectInspector inspector,
long stripeSize,
CompressionKind compress,
int bufferSize,
int rowIndexStride) throws IOException {
return createWriter(path, writerOptions(conf)
.inspector(inspector)
.fileSystem(fs)
.stripeSize(stripeSize)
.compress(compress)
.bufferSize(bufferSize)
.rowIndexStride(rowIndexStride));
}
}
| |
package de.julian.baehr.commandLine3;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import de.jreflect4.ClassReflector;
import de.jreflect4.IInstanceReflector;
import de.jreflect4.SecurityModelImpl;
import de.julian.baehr.commandLine3.check.IFloatCheck;
import de.julian.baehr.commandLine3.check.ICheck;
import de.julian.baehr.commandLine3.check.IIntegerCheck;
import lombok.Getter;
import lombok.NonNull;
import lombok.val;
public class CommandLineInterface<T> {
static interface ConversionFunction extends Function<String, Object>{}
private IInstanceReflector<T> reflector;
private Map<Option, List<ICheck>> optionChecks = new HashMap<>();
private ArrayList<String> argumentsList = new ArrayList<>();
private List<StateValidator> stateValidators = new LinkedList<>();
@Getter private String stateKey;
private final int BOOL = 0;//, CHAR = 1, BYTE = 2, SHORT = 3, INT = 4, LONG = 5, FLOAT = 6, DOUBLE = 7, STRING = 8;
private final Class<?>[] allowedTypes = {Boolean.class, Character.class, Byte.class, Short.class, Integer.class, Long.class, Float.class, Double.class, String.class};
//checks for types
private final ICheck[] typeChecks = {
b -> true,
c -> c.length() == 1,
b -> {try{Byte.parseByte(b); return true;}catch(NumberFormatException e){return false;}},
s -> {try{Short.parseShort(s); return true;}catch(NumberFormatException e){return false;}},
i -> {try{Integer.parseInt(i); return true;}catch(NumberFormatException e){return false;}},
l -> {try{Long.parseLong(l); return true;}catch(NumberFormatException e){return false;}},
f -> {try{Float.parseFloat(f); return true;}catch(NumberFormatException e){return false;}},
d -> {try{Double.parseDouble(d); return true;}catch(NumberFormatException e){return false;}},
s -> true
};
//conversion functions for all types
private final ConversionFunction[] conversionFunctions = {
b -> Boolean.parseBoolean(b),
c -> c.charAt(0),
b -> Byte.parseByte(b),
s -> Short.parseShort(s),
i -> Integer.parseInt(i),
l -> Long.parseLong(l),
f -> Float.parseFloat(f),
d -> Double.parseDouble(d),
s -> s
};
public CommandLineInterface(@NonNull Class<T> clazz) {
ClassReflector<T> clsReflector = new ClassReflector<>(clazz, SecurityModelImpl.allowAll());
//check if empty public constructor exists
try {
reflector = clsReflector.create();
} catch (SecurityException e) {
throw new IllegalArgumentException("'" + clazz.getName() + "' does not have an empty constructor!");
}
Field[] fields = reflector.getFields();
for(val field : fields){
Class<?> type = field.getType();
if(!isOfAllowedType(type))
throw new IllegalFieldTypeException(type);
int typeIndex = getAllowedTypeIndex(type);
//get names
String shortName = null;
if(field.isAnnotationPresent(ShortName.class))
shortName = field.getAnnotation(ShortName.class).value();
String longName = field.getName();
if(field.isAnnotationPresent(LongName.class))
longName = field.getAnnotation(LongName.class).value();
//check if names are available
checkOptionNameAvailablility(shortName, longName);
//get description
String description = null;
if(field.isAnnotationPresent(Description.class))
description = field.getAnnotation(Description.class).value();
//if boolean then is a flag
boolean flag = typeIndex == BOOL;
String defaultValue = null;
if(field.isAnnotationPresent(Default.class))
defaultValue = field.getAnnotation(Default.class).value();
val option = new Option(shortName, longName, flag, description, field, defaultValue);
optionChecks.put(option, new LinkedList<>());
//add type check
optionChecks.get(option).add(typeChecks[typeIndex]);
}
}
private void checkOptionNameAvailablility(String shortName, String longName){
for(val option : optionChecks.keySet()){
if(shortName != null)
if(option.hasShortName())
if(option.getShortName().equals(shortName))
throw new OptionNameConflictException(true, shortName);
if(longName != null)
if(option.hasLongName())
if(option.getLongName().equals(longName))
throw new OptionNameConflictException(false, longName);
}
}
private boolean isOfAllowedType(Class<?> fieldType){
return getAllowedTypeIndex(fieldType) != -1;
}
private int getAllowedTypeIndex(Class<?> fieldType){
for(int i = 0; i < allowedTypes.length; i++){
if(allowedTypes[i].equals(fieldType))
return i;
}
return -1;
}
public void addCheck(String fieldName, ICheck check){
if(!reflector.containsField(fieldName))
throw new IllegalArgumentException("No field '" + fieldName + "' in '" + reflector.getReflectingClass().getName() + "'!");
optionChecks.get(findOptionByFieldName(fieldName)).add(check);
}
public void addIntCheck(String fieldName, IIntegerCheck check){
addCheck(fieldName, check);
}
public void addFloatCheck(String fieldName, IFloatCheck check){
addCheck(fieldName, check);
}
Option findOptionByFieldName(String fieldName){
for(val option : optionChecks.keySet())
if(option.getField().getName().equals(fieldName))
return option;
//this should never happen, if it does its a huge bug
throw new NullPointerException("There is no option for the field '" + fieldName + "'!");
}
private boolean isOption(String potentialOptionName){
try{
findOptionByName(potentialOptionName);
return true;
}catch(NullPointerException e){
return false;
}
}
private Option findOptionByName(String optionName) {
String orgName = optionName;
if(optionName.replaceFirst("-*", "").length() == 1){
//short name
optionName = optionName.replaceFirst("-*", "");
}else{
//long name
optionName = Option.fixLongName(optionName);
}
for(val option : optionChecks.keySet()){
if(option.hasShortName())
if(option.getShortName().equals(optionName))
return option;
if(option.hasLongName())
if(option.getLongName().equals(optionName))
return option;
}
throw new NullPointerException("There is no option with the name '" + orgName + "'!");
}
public IStateValidator defineState(String stateKey){
//check state key uniqueness
for(val validator : stateValidators)
if(validator.getKey().equals(stateKey))
throw new IllegalArgumentException("State key '" + stateKey + "' is already taken!");
StateValidator validator = new StateValidator(reflector, stateKey);
stateValidators.add(validator);
return validator;
}
public T parse(String...args){
argumentsList.clear();
stateKey = null;
if(args != null && args.length > 0){
//check for ?, h and help
String first = args[0].replaceFirst("-*", "").toUpperCase();
if(first.equals("H") || first.equals("HELP") || first.equals("?")){
//print help page
System.out.println(getHelpMessage());
//end program
System.exit(0);
//The program should not continue to run!!!
}
}
for(int i = 0; args != null && i < args.length; i++){
//get rid of starting - signs (gnu and posix styles allowed)
String optionName = args[i].replaceFirst("-*", "");
if(isOption(optionName)){
Option option = findOptionByName(optionName);
if(option.isFlag()){
reflector.setValue(option.getField().getName(), true);
}else{
if(i + 1 >= args.length)
throw new CommandLineParsingException(optionName);
String valueString = args[i++ +1];
//run checks for the option argument
for(val check : optionChecks.get(option))
if(!check.check(valueString))
throw new OptionCheckException(optionName, valueString);
int typeIndex = getAllowedTypeIndex(option.getField().getType());
reflector.setValue(option.getField().getName(), conversionFunctions[typeIndex].apply(valueString));
}
}else{
//assume args start here
for(int n = i; n < args.length; n++, i++){
argumentsList.add(args[n]);
}
}
}
//check state validity
//if no validators exist it is automatically a valid state
int validStates = 0;
if(stateValidators.size() != 0){
for(val validator : stateValidators)
if(validator.validate(reflector)){
stateKey = validator.getKey();
validStates++;
}
}else{
//no validators exist -> valid
validStates = 1;
}
if(validStates != 1){
System.out.println("Use -h, -help or ? for help.");
throw new InvalidStateException(validStates);
}
//set all missing flags to false for convenience
Field[] fields = reflector.getFields();
for(val field : fields)
if(field.isAnnotationPresent(Default.class)){
//set to auto converted default value
//only type checks
//it is assumed that the default value fits any specified checks
String value = field.getAnnotation(Default.class).value();
int typeIndex = getAllowedTypeIndex(field.getType());
if(!typeChecks[typeIndex].check(value))
throw new IllegalArgumentException("Default value '" + value + "' for field '" + field.getName() + "' in '" + reflector.getClassName() + "' cannot be converted to '" + field.getType().getName() + "'!");
reflector.setValue(field.getName(), conversionFunctions[typeIndex].apply(value));
}else if(field.getType().equals(Boolean.class))
if(reflector.getValue(field.getName()) == null)
reflector.setValue(field.getName(), false);
return reflector.getObject();
}
private String getHelpMessage(){
StringBuilder sb = new StringBuilder();
for(val validator : stateValidators)
sb.append(validator.makeSynopsisString(this)).append("\n");
sb.append("\n");
Field[] fields = reflector.getFields();
for(val field : fields){
sb.append("-");
val option = findOptionByFieldName(field.getName());
if(option.hasLongName())
sb.append(option.getLongName());
if(option.hasLongName() && option.hasShortName())
sb.append("/-");
if(option.hasShortName())
sb.append(option.getShortName());
if(option.hasDescription()){
String[] descLines = option.getDescription().split("\n");
sb.append(" : ").append(descLines[0]);
for(int i = 1; i < descLines.length; i++){
sb.append("\n\t").append(descLines[i]);
}
}
sb.append("\n");
}
return sb.toString();
}
@SuppressWarnings("unchecked")
public ArrayList<String> getArguments(){
return (ArrayList<String>) argumentsList.clone();
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow;
import java.io.IOException;
import java.net.SocketAddress;
import java.nio.channels.ClosedChannelException;
import io.undertow.predicate.PredicateBuilder;
import io.undertow.protocols.http2.HpackException;
import io.undertow.server.handlers.builder.HandlerBuilder;
import org.jboss.logging.Messages;
import org.jboss.logging.annotations.Cause;
import org.jboss.logging.annotations.Message;
import org.jboss.logging.annotations.MessageBundle;
import javax.net.ssl.SSLPeerUnverifiedException;
/**
* @author Stuart Douglas
*/
@MessageBundle(projectCode = "UT")
public interface UndertowMessages {
UndertowMessages MESSAGES = Messages.getBundle(UndertowMessages.class);
@Message(id = 1, value = "Maximum concurrent requests must be larger than zero.")
IllegalArgumentException maximumConcurrentRequestsMustBeLargerThanZero();
@Message(id = 2, value = "The response has already been started")
IllegalStateException responseAlreadyStarted();
// id = 3
@Message(id = 4, value = "getResponseChannel() has already been called")
IllegalStateException responseChannelAlreadyProvided();
@Message(id = 5, value = "getRequestChannel() has already been called")
IllegalStateException requestChannelAlreadyProvided();
// id = 6
// id = 7
@Message(id = 8, value = "Handler cannot be null")
IllegalArgumentException handlerCannotBeNull();
@Message(id = 9, value = "Path must be specified")
IllegalArgumentException pathMustBeSpecified();
@Message(id = 10, value = "Session not found %s")
IllegalStateException sessionNotFound(final String session);
@Message(id = 11, value = "Session manager must not be null")
IllegalStateException sessionManagerMustNotBeNull();
@Message(id = 12, value = "Session manager was not attached to the request. Make sure that the SessionAttachmentHandler is installed in the handler chain")
IllegalStateException sessionManagerNotFound();
@Message(id = 13, value = "Argument %s cannot be null")
IllegalArgumentException argumentCannotBeNull(final String argument);
@Message(id = 14, value = "close() called with data still to be flushed. Please call shutdownWrites() and then call flush() until it returns true before calling close()")
IOException closeCalledWithDataStillToBeFlushed();
@Message(id = 16, value = "Could not add cookie as cookie handler was not present in the handler chain")
IllegalStateException cookieHandlerNotPresent();
@Message(id = 17, value = "Form value is a file, use getFile() instead")
IllegalStateException formValueIsAFile();
@Message(id = 18, value = "Form value is a String, use getValue() instead")
IllegalStateException formValueIsAString();
@Message(id = 19, value = "Connection from %s terminated as request entity was larger than %s")
IOException requestEntityWasTooLarge(SocketAddress address, long size);
@Message(id = 20, value = "Connection terminated as request was larger than %s")
IOException requestEntityWasTooLarge(long size);
@Message(id = 21, value = "Session already invalidated")
IllegalStateException sessionAlreadyInvalidated();
@Message(id = 22, value = "The specified hash algorithm '%s' can not be found.")
IllegalArgumentException hashAlgorithmNotFound(String algorithmName);
@Message(id = 23, value = "An invalid Base64 token has been received.")
IllegalArgumentException invalidBase64Token(@Cause final IOException cause);
@Message(id = 24, value = "An invalidly formatted nonce has been received.")
IllegalArgumentException invalidNonceReceived();
@Message(id = 25, value = "Unexpected token '%s' within header.")
IllegalArgumentException unexpectedTokenInHeader(final String name);
@Message(id = 26, value = "Invalid header received.")
IllegalArgumentException invalidHeader();
@Message(id = 27, value = "Could not find session cookie config in the request")
IllegalStateException couldNotFindSessionCookieConfig();
@Message(id = 28, value = "Session %s already exists")
IllegalStateException sessionAlreadyExists(final String id);
@Message(id = 29, value = "Channel was closed mid chunk, if you have attempted to write chunked data you cannot shutdown the channel until after it has all been written.")
IOException chunkedChannelClosedMidChunk();
@Message(id = 30, value = "User %s successfully authenticated.")
String userAuthenticated(final String userName);
@Message(id = 31, value = "User %s has logged out.")
String userLoggedOut(final String userName);
@Message(id = 33, value = "Authentication type %s cannot be combined with %s")
IllegalStateException authTypeCannotBeCombined(String type, String existing);
@Message(id = 34, value = "Stream is closed")
IOException streamIsClosed();
@Message(id = 35, value = "Cannot get stream as startBlocking has not been invoked")
IllegalStateException startBlockingHasNotBeenCalled();
@Message(id = 36, value = "Connection terminated parsing multipart data")
IOException connectionTerminatedReadingMultiPartData();
@Message(id = 37, value = "Failed to parse path in HTTP request")
RuntimeException failedToParsePath();
@Message(id = 38, value = "Authentication failed, requested user name '%s'")
String authenticationFailed(final String userName);
@Message(id = 39, value = "To many query parameters, cannot have more than %s query parameters")
RuntimeException tooManyQueryParameters(int noParams);
@Message(id = 40, value = "To many headers, cannot have more than %s header")
RuntimeException tooManyHeaders(int noParams);
@Message(id = 41, value = "Channel is closed")
ClosedChannelException channelIsClosed();
@Message(id = 42, value = "Could not decode trailers in HTTP request")
IOException couldNotDecodeTrailers();
@Message(id = 43, value = "Data is already being sent. You must wait for the completion callback to be be invoked before calling send() again")
IllegalStateException dataAlreadyQueued();
@Message(id = 44, value = "More than one predicate with name %s. Builder class %s and %s")
IllegalStateException moreThanOnePredicateWithName(String name, Class<? extends PredicateBuilder> aClass, Class<? extends PredicateBuilder> existing);
@Message(id = 45, value = "Error parsing predicate string %s:%n%s")
IllegalArgumentException errorParsingPredicateString(String reason, String s);
@Message(id = 46, value = "The number of cookies sent exceeded the maximum of %s")
IllegalStateException tooManyCookies(int maxCookies);
@Message(id = 47, value = "The number of parameters exceeded the maximum of %s")
IllegalStateException tooManyParameters(int maxValues);
@Message(id = 48, value = "No request is currently active")
IllegalStateException noRequestActive();
@Message(id = 50, value = "AuthenticationMechanism Outcome is null")
IllegalStateException authMechanismOutcomeNull();
@Message(id = 51, value = "Not a valid IP pattern %s")
IllegalArgumentException notAValidIpPattern(String peer);
@Message(id = 52, value = "Session data requested when non session based authentication in use")
IllegalStateException noSessionData();
@Message(id = 53, value = "Listener %s already registered")
IllegalArgumentException listenerAlreadyRegistered(String name);
@Message(id = 54, value = "The maximum size %s for an individual file in a multipart request was exceeded")
IOException maxFileSizeExceeded(long maxIndividualFileSize);
@Message(id = 55, value = "Could not set attribute %s to %s as it is read only")
String couldNotSetAttribute(String attributeName, String newValue);
@Message(id = 56, value = "Could not parse URI template %s, exception at char %s")
RuntimeException couldNotParseUriTemplate(String path, int i);
@Message(id = 57, value = "Mismatched braces in attribute string %s")
RuntimeException mismatchedBraces(String valueString);
@Message(id = 58, value = "More than one handler with name %s. Builder class %s and %s")
IllegalStateException moreThanOneHandlerWithName(String name, Class<? extends HandlerBuilder> aClass, Class<? extends HandlerBuilder> existing);
@Message(id = 59, value = "Invalid syntax %s")
IllegalArgumentException invalidSyntax(String line);
@Message(id = 60, value = "Error parsing handler string %s:%n%s")
IllegalArgumentException errorParsingHandlerString(String reason, String s);
@Message(id = 61, value = "Out of band responses only allowed for 100-continue requests")
IllegalArgumentException outOfBandResponseOnlyAllowedFor100Continue();
@Message(id = 62, value = "AJP does not support HTTP upgrade")
IllegalStateException ajpDoesNotSupportHTTPUpgrade();
@Message(id = 63, value = "File system watcher already started")
IllegalStateException fileSystemWatcherAlreadyStarted();
@Message(id = 64, value = "File system watcher not started")
IllegalStateException fileSystemWatcherNotStarted();
@Message(id = 65, value = "SSL must be specified to connect to a https URL")
IOException sslWasNull();
@Message(id = 66, value = "Incorrect magic number for AJP packet header")
IOException wrongMagicNumber();
@Message(id = 67, value = "No client cert was provided")
SSLPeerUnverifiedException peerUnverified();
@Message(id = 68, value = "Servlet path match failed")
IllegalArgumentException servletPathMatchFailed();
@Message(id = 69, value = "Could not parse set cookie header %s")
IllegalArgumentException couldNotParseCookie(String headerValue);
@Message(id = 70, value = "method can only be called by IO thread")
IllegalStateException canOnlyBeCalledByIoThread();
@Message(id = 71, value = "Cannot add path template %s, matcher already contains an equivalent pattern %s")
IllegalStateException matcherAlreadyContainsTemplate(String templateString, String templateString1);
@Message(id = 72, value = "Failed to decode url %s to charset %s")
IllegalArgumentException failedToDecodeURL(String s, String enc);
@Message(id = 73, value = "Resource change listeners are not supported")
IllegalArgumentException resourceChangeListenerNotSupported();
@Message(id = 74, value = "Could not renegotiate SSL connection to require client certificate, as client had sent more data")
IllegalStateException couldNotRenegotiate();
@Message(id = 75, value = "Object was freed")
IllegalStateException objectWasFreed();
@Message(id = 76, value = "Handler not shutdown")
IllegalStateException handlerNotShutdown();
@Message(id = 77, value = "The underlying transport does not support HTTP upgrade")
IllegalStateException upgradeNotSupported();
@Message(id = 78, value = "Renegotiation not supported")
IOException renegotiationNotSupported();
@Message(id = 79, value = "Not a valid user agent pattern %s")
IllegalArgumentException notAValidUserAgentPattern(String userAgent);
@Message(id = 80, value = "Not a valid regular expression pattern %s")
IllegalArgumentException notAValidRegularExpressionPattern(String pattern);
@Message(id = 81, value = "Bad request")
RuntimeException badRequest();
@Message(id = 82, value = "Host %s already registered")
RuntimeException hostAlreadyRegistered(Object host);
@Message(id = 83, value = "Host %s has not been registered")
RuntimeException hostHasNotBeenRegistered(Object host);
@Message(id = 84, value = "Attempted to write additional data after the last chunk")
IOException extraDataWrittenAfterChunkEnd();
@Message(id = 85, value = "Could not generate unique session id")
RuntimeException couldNotGenerateUniqueSessionId();
@Message(id = 86, value = "SPDY needs to be provided with a heap buffer pool, for use in compressing and decompressing headers.")
IllegalArgumentException mustProvideHeapBuffer();
@Message(id = 87, value = "Unexpected SPDY frame type %s")
IOException unexpectedFrameType(int type);
@Message(id = 88, value = "SPDY control frames cannot have body content")
IOException controlFrameCannotHaveBodyContent();
@Message(id = 89, value = "SPDY not supported")
IOException spdyNotSupported();
@Message(id = 90, value = "Jetty NPN not available")
IOException jettyNPNNotAvailable();
@Message(id = 91, value = "Buffer has already been freed")
IllegalStateException bufferAlreadyFreed();
@Message(id = 92, value = "A SPDY header was too large to fit in a response buffer, if you want to support larger headers please increase the buffer size")
IllegalStateException headersTooLargeToFitInHeapBuffer();
@Message(id = 93, value = "A SPDY stream was reset by the remote endpoint")
IOException spdyStreamWasReset();
@Message(id = 94, value = "Blocking await method called from IO thread. Blocking IO must be dispatched to a worker thread or deadlocks will result.")
IOException awaitCalledFromIoThread();
@Message(id = 95, value = "Recursive call to flushSenders()")
RuntimeException recursiveCallToFlushingSenders();
@Message(id = 96, value = "More data was written to the channel than specified in the content-length")
IllegalStateException fixedLengthOverflow();
@Message(id = 97, value = "AJP request already in progress")
IllegalStateException ajpRequestAlreadyInProgress();
@Message(id = 98, value = "HTTP ping data must be 8 bytes in length")
String httpPingDataMustBeLength8();
@Message(id = 99, value = "Received a ping of size other than 8")
String invalidPingSize();
@Message(id = 100, value = "stream id must be zero for frame type %s")
String streamIdMustBeZeroForFrameType(int frameType);
@Message(id = 101, value = "stream id must not be zero for frame type %s")
String streamIdMustNotBeZeroForFrameType(int frameType);
@Message(id = 102, value = "RST_STREAM received for idle stream")
String rstStreamReceivedForIdleStream();
@Message(id = 103, value = "Http2 stream was reset")
IOException http2StreamWasReset();
@Message(id = 104, value = "Incorrect HTTP2 preface")
IOException incorrectHttp2Preface();
@Message(id = 105, value = "HTTP2 frame to large")
IOException http2FrameTooLarge();
@Message(id = 106, value = "HTTP2 continuation frame received without a corresponding headers or push promise frame")
IOException http2ContinuationFrameNotExpected();
@Message(id = 107, value = "Huffman encoded value in HPACK headers did not end with EOS padding")
HpackException huffmanEncodedHpackValueDidNotEndWithEOS();
@Message(id = 108, value = "HPACK variable length integer encoded over too many octects, max is %s")
HpackException integerEncodedOverTooManyOctets(int maxIntegerOctets);
@Message(id = 109, value = "Zero is not a valid header table index")
HpackException zeroNotValidHeaderTableIndex();
@Message(id = 110, value = "Cannot send 100-Continue, getResponseChannel() has already been called")
IOException cannotSendContinueResponse();
@Message(id = 111, value = "Parser did not make progress")
IOException parserDidNotMakeProgress();
@Message(id = 112, value = "Only client side can call createStream, if you wish to send a PUSH_PROMISE frame use createPushPromiseStream instead")
IOException headersStreamCanOnlyBeCreatedByClient();
@Message(id = 113, value = "Only the server side can send a push promise stream")
IOException pushPromiseCanOnlyBeCreatedByServer();
@Message(id = 114, value = "Invalid IP access control rule %s. Format is: [ip-match] allow|deny")
IllegalArgumentException invalidAclRule(String rule);
@Message(id = 115, value = "Server received PUSH_PROMISE frame from client")
IOException serverReceivedPushPromise();
@Message(id = 116, value = "CONNECT not supported by this connector")
IllegalStateException connectNotSupported();
@Message(id = 117, value = "Request was not a CONNECT request")
IllegalStateException notAConnectRequest();
@Message(id = 118, value = "Cannot reset buffer, response has already been commited")
IllegalStateException cannotResetBuffer();
@Message(id = 119, value = "HTTP2 via prior knowledge failed")
IOException http2PriRequestFailed();
@Message(id = 120, value = "Out of band responses are not allowed for this connector")
IllegalStateException outOfBandResponseNotSupported();
@Message(id = 121, value = "Session was rejected as the maximum number of sessions (%s) has been hit")
IllegalStateException tooManySessions(int maxSessions);
@Message(id = 122, value = "CONNECT attempt failed as target proxy returned %s")
IOException proxyConnectionFailed(int responseCode);
}
| |
/**
* The MIT License (MIT)
*
* Copyright (c) 2011-2016 Incapture Technologies LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package rapture.dp.invocable.notification.steps;
import java.io.IOException;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import javax.mail.MessagingException;
import javax.mail.internet.AddressException;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import com.google.common.net.MediaType;
import rapture.common.CallingContext;
import rapture.common.RaptureURI;
import rapture.common.api.AdminApi;
import rapture.common.api.DecisionApi;
import rapture.common.dp.Worker;
import rapture.common.dp.WorkerStorage;
import rapture.common.exception.ExceptionToString;
import rapture.common.impl.jackson.JacksonUtil;
import rapture.dp.AbstractStep;
import rapture.dp.InvocableUtils;
import rapture.kernel.Kernel;
import rapture.kernel.dp.ExecutionContextUtil;
import rapture.mail.EmailTemplate;
import rapture.mail.Mailer;
// Should maybe subclass NotificationStep to provide other notification mechanisms?
// A step to notify the user
// * By email
// * By instant message app - Slack/What'sApp/Pidgin
// * text message?
public class NotificationStep extends AbstractStep {
private static Logger log = Logger.getLogger(NotificationStep.class);
DecisionApi decision;
public NotificationStep(String workerUri, String stepName) {
super(workerUri, stepName);
decision = Kernel.getDecision();
}
private String previousStepName = "UNDEFINED";
@Override
public void preInvoke(CallingContext ctx) {
String psn = StringUtils.stripToNull(decision.getContextValue(ctx, getWorkerURI(), "STEPNAME"));
if (psn != null) previousStepName = psn;
}
@Override
public String invoke(CallingContext ctx) {
// Don't set STEPNAME here because we want the name of the preceding step
// Can read config from a documemnt or pass as args
AdminApi admin = Kernel.getAdmin();
String types = StringUtils.stripToNull(decision.getContextValue(ctx, getWorkerURI(), "NOTIFY_TYPE"));
if (types == null) {
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), "Problem in " + previousStepName + ": parameter NOTIFY_TYPE is not set", true);
return getErrorTransition();
}
StringBuffer error = new StringBuffer();
String retval = getNextTransition();
for (String type : types.split("[, ]+")) {
try {
if (type.equalsIgnoreCase("SLACK")) {
if (!sendSlack(ctx)) {
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), "Slack notification failed", true);
retval = getErrorTransition();
}
} else if (type.equalsIgnoreCase("EMAIL")) {
if (!sendEmail(ctx)) {
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), "Email notification failed", true);
retval = getErrorTransition();
}
} else if (type.equalsIgnoreCase("WORKFLOW")) {
if (!sendEmail(ctx) && !sendSlack(ctx)) {
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), "Notification failed", true);
retval = getErrorTransition();
}
} else {
String unsupported = "Unsupported notification type: " + type;
error.append(unsupported).append("\n");
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), unsupported, true);
retval = getErrorTransition();
}
} catch (Exception e) {
Throwable cause = ExceptionToString.getRootCause(e);
error.append("Cannot send ").append(type).append(" notification : ").append(cause.getLocalizedMessage()).append("\n");
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), "Problem in NotificationStep " + previousStepName + ": notification failed", true);
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), ExceptionToString.summary(cause), true);
log.error(ExceptionToString.format(ExceptionToString.getRootCause(e)));
retval = getErrorTransition();
}
}
String errMsg = error.toString();
if (!StringUtils.isEmpty(errMsg)) {
log.error(errMsg);
decision.setContextLiteral(ctx, getWorkerURI(), previousStepName, "Notification failure");
decision.setContextLiteral(ctx, getWorkerURI(), getErrName(), errMsg);
}
return retval;
}
private int doPost(URL url, byte[] body) throws IOException {
HttpURLConnection http = (HttpURLConnection) url.openConnection();
http.setFixedLengthStreamingMode(body.length);
http.setRequestProperty("Content-Type", MediaType.JSON_UTF_8.toString());
http.setRequestMethod("POST");
http.setDoOutput(true);
http.connect();
try (OutputStream stream = http.getOutputStream()) {
stream.write(body);
}
int response = http.getResponseCode();
http.disconnect();
return response;
}
public String renderTemplate(CallingContext ctx, String template) {
RaptureURI workUri = new RaptureURI(getWorkerURI());
String workOrder = workUri.toShortString();
Worker worker = WorkerStorage.readByFields(workOrder, workUri.getElement());
return ExecutionContextUtil.evalTemplateECF(ctx, workOrder, template, InvocableUtils.getLocalViewOverlay(worker));
}
private boolean sendSlack(CallingContext ctx) throws IOException {
String message = StringUtils.stripToNull(decision.getContextValue(ctx, getWorkerURI(), "MESSAGE_BODY"));
// Legacy: use template if values are not set
String templateName = StringUtils.stripToNull(decision.getContextValue(ctx, getWorkerURI(), "MESSAGE_TEMPLATE"));
String webhook = StringUtils.stripToNull(decision.getContextValue(ctx, getWorkerURI(), "SLACK_WEBHOOK"));
if (webhook == null) {
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), "Problem in " + previousStepName + ": No webhook specified", true);
return false;
}
if (message == null) {
if (templateName == null) {
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), "Problem in " + previousStepName + ": No message specified", true);
return false;
}
EmailTemplate template = Mailer.getEmailTemplate(ctx, templateName);
message = template.getMsgBody();
}
URL url = new URL(webhook);
Map<String, String> slackNotification = new HashMap<>();
slackNotification.put("text", renderTemplate(ctx, message));
int response = doPost(url, JacksonUtil.bytesJsonFromObject(slackNotification));
if (response == 200) {
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), previousStepName + ": slack notification sent successfully", false);
return true;
} else {
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(),
"Problem in " + previousStepName + ": slack notification failed with HTTP error code " + response, true);
return false;
}
}
private boolean sendEmail(CallingContext ctx) throws AddressException, MessagingException {
String message = StringUtils.stripToNull(decision.getContextValue(ctx, getWorkerURI(), "MESSAGE_BODY"));
String subject = StringUtils.stripToNull(decision.getContextValue(ctx, getWorkerURI(), "MESSAGE_SUBJECT"));
String recipientList = StringUtils.stripToNull(decision.getContextValue(ctx, getWorkerURI(), "EMAIL_RECIPIENTS"));
// Legacy: use template if values are not set
String templateName = StringUtils.stripToNull(decision.getContextValue(ctx, getWorkerURI(), "MESSAGE_TEMPLATE"));
if (templateName != null) {
EmailTemplate template = Mailer.getEmailTemplate(ctx, templateName);
if (template != null) {
if (message == null) message = template.getMsgBody();
if (subject == null) subject = template.getSubject();
if (recipientList == null) recipientList = template.getEmailTo();
}
}
if (subject == null) subject = "No Subject Given";
if (message == null) {
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), "Problem in " + previousStepName + ": No message specified", true);
return false;
}
if (recipientList == null) {
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), "Problem in " + previousStepName + ": No recipient specified", true);
return false;
}
try {
Mailer.email(renderTemplate(ctx, recipientList).split("[, ]+"), renderTemplate(ctx, subject), renderTemplate(ctx, message));
decision.writeWorkflowAuditEntry(ctx, getWorkerURI(), previousStepName + ": email notification sent successfully", false);
return true;
} catch (MessagingException e) {
log.warn("Unable to send email", e);
return false;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.geronimo.javamail.transport.nntp;
import java.io.BufferedReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.mail.MessagingException;
/**
* Util class to represent a reply from a NNTP server
*
* @version $Rev$ $Date$
*/
public class NNTPReply {
// general server responses
public static final int POSTING_ALLOWED = 200;
public static final int NO_POSTING_ALLOWED = 201;
public static final int EXTENSIONS_SUPPORTED = 202;
public static final int SERVICE_DISCONTINUED = 400;
public static final int COMMAND_NOT_RECOGNIZED = 500;
public static final int COMMAND_SYNTAX_ERROR = 501;
public static final int PERMISSION_DENIED = 502;
public static final int PROGRAM_FAULT = 503;
// article responses
public static final int ARTICLE_FOLLOWS = 220;
public static final int HEAD_FOLLOWS = 221;
public static final int BODY_FOLLOWS = 222;
public static final int REQUEST_TEXT_SEPARATELY = 223;
public static final int OVERVIEW_FOLLOWS = 224;
public static final int NEW_ARTICLES_FOLLOWS = 230;
public static final int NEW_GROUPS_FOLLOWS = 231;
public static final int ARTICLE_TRANSFERRED = 235;
public static final int NO_NEWSGROUP_SELECTED = 412;
public static final int NO_ARTICLE_SELECTED = 420;
public static final int NO_ARTICLE_NUMBER = 423;
public static final int NO_ARTICLE_FOUND = 430;
// group responses
public static final int GROUP_SELECTED = 211;
public static final int NO_SUCH_NEWSGROUP = 411;
// post responses
public static final int POSTED_OK = 240;
public static final int SEND_ARTICLE = 340;
public static final int POSTING_NOT_ALLOWED = 440;
public static final int POSTING_FAILED = 441;
// quit responses
public static final int CLOSING_CONNECTION = 205;
// authentication responses
public static final int AUTHINFO_ACCEPTED = 250;
public static final int AUTHINFO_ACCEPTED_FINAL = 251;
public static final int AUTHINFO_CONTINUE = 350;
public static final int AUTHINFO_CHALLENGE = 350;
public static final int AUTHINFO_SIMPLE_REJECTED = 402;
public static final int AUTHENTICATION_ACCEPTED = 281;
public static final int MORE_AUTHENTICATION_REQUIRED = 381;
public static final int AUTHINFO_REQUIRED = 480;
public static final int AUTHINFO_SIMPLE_REQUIRED = 450;
public static final int AUTHENTICATION_REJECTED = 482;
// list active reponses
public static final int LIST_FOLLOWS = 215;
// The original reply string
private final String reply;
// returned message code
private final int code;
// the returned message text
private final String message;
// data associated with a long response command.
private ArrayList data;
NNTPReply(String s) throws MessagingException {
// save the reply
reply = s;
// In a normal response, the first 3 must be the return code. However,
// the response back from a QUIT command is frequently a null string.
// Therefore, if the result is
// too short, just default the code to -1 and use the entire text for
// the message.
if (s == null || s.length() < 3) {
code = -1;
message = s;
return;
}
try {
code = Integer.parseInt(s.substring(0, 3));
// message should be separated by a space OR a continuation
// character if this is a
// multi-line response.
if (s.length() > 4) {
message = s.substring(4);
} else {
message = "";
}
} catch (NumberFormatException e) {
throw new MessagingException("error in parsing reply code", e);
}
}
/**
* Retrieve data associated with a multi-line reponse from a server stream.
*
* @param in
* The reader that's the source of the additional lines.
*
* @exception IOException
*/
public void retrieveData(BufferedReader in) throws MessagingException {
try {
data = new ArrayList();
String line = in.readLine();
// read until the end of file or until we see the end of data
// marker.
while (line != null && !line.equals(".")) {
// this line is not the terminator, but it may have been byte
// stuffed. If it starts with
// '.', throw away the leading one.
if (line.startsWith(".")) {
line = line.substring(1);
}
// just add the line to the list
data.add(line);
line = in.readLine();
}
} catch (IOException e) {
throw new MessagingException("Error reading message reply", e);
}
}
/**
* Retrieve the long-command data from this response.
*
* @return The data list. Returns null if there is no associated data.
*/
public List getData() {
return data;
}
/**
* Return the code value associated with the reply.
*
* @return The integer code associated with the reply.
*/
public int getCode() {
return this.code;
}
/**
* Get the message text associated with the reply.
*
* @return The string value of the message from the reply.
*/
public String getMessage() {
return this.message;
}
/**
* Retrieve the raw reply string for the reponse.
*
* @return The original reply string from the server.
*/
public String getReply() {
return reply;
}
/**
* Indicates if reply is an error condition
*/
boolean isError() {
// error codes are all above 400
return code >= 400;
}
public String toString() {
return "CODE = " + getCode() + " : MSG = " + getMessage();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.search.SearchPhaseResult;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.internal.SearchContextId;
import org.elasticsearch.transport.RemoteClusterService;
import org.elasticsearch.transport.Transport;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiFunction;
import java.util.function.Supplier;
import static org.elasticsearch.action.search.TransportSearchHelper.internalScrollSearchRequest;
/**
* Abstract base class for scroll execution modes. This class encapsulates the basic logic to
* fan out to nodes and execute the query part of the scroll request. Subclasses can for instance
* run separate fetch phases etc.
*/
abstract class SearchScrollAsyncAction<T extends SearchPhaseResult> implements Runnable {
/*
* Some random TODO:
* Today we still have a dedicated executing mode for scrolls while we could simplify this by implementing
* scroll like functionality (mainly syntactic sugar) as an ordinary search with search_after. We could even go further and
* make the scroll entirely stateless and encode the state per shard in the scroll ID.
*
* Today we also hold a context per shard but maybe
* we want the context per coordinating node such that we route the scroll to the same coordinator all the time and hold the context
* here? This would have the advantage that if we loose that node the entire scroll is deal not just one shard.
*
* Additionally there is the possibility to associate the scroll with a seq. id. such that we can talk to any replica as long as
* the shards engine hasn't advanced that seq. id yet. Such a resume is possible and best effort, it could be even a safety net since
* if you rely on indices being read-only things can change in-between without notification or it's hard to detect if there where any
* changes while scrolling. These are all options to improve the current situation which we can look into down the road
*/
protected final Logger logger;
protected final ActionListener<SearchResponse> listener;
protected final ParsedScrollId scrollId;
protected final DiscoveryNodes nodes;
protected final SearchPhaseController searchPhaseController;
protected final SearchScrollRequest request;
protected final SearchTransportService searchTransportService;
private final long startTime;
private final List<ShardSearchFailure> shardFailures = new ArrayList<>();
private final AtomicInteger successfulOps;
protected SearchScrollAsyncAction(ParsedScrollId scrollId, Logger logger, DiscoveryNodes nodes,
ActionListener<SearchResponse> listener, SearchPhaseController searchPhaseController,
SearchScrollRequest request,
SearchTransportService searchTransportService) {
this.startTime = System.currentTimeMillis();
this.scrollId = scrollId;
this.successfulOps = new AtomicInteger(scrollId.getContext().length);
this.logger = logger;
this.listener = listener;
this.nodes = nodes;
this.searchPhaseController = searchPhaseController;
this.request = request;
this.searchTransportService = searchTransportService;
}
/**
* Builds how long it took to execute the search.
*/
private long buildTookInMillis() {
// protect ourselves against time going backwards
// negative values don't make sense and we want to be able to serialize that thing as a vLong
return Math.max(1, System.currentTimeMillis() - startTime);
}
public final void run() {
final ScrollIdForNode[] context = scrollId.getContext();
if (context.length == 0) {
listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", ShardSearchFailure.EMPTY_ARRAY));
} else {
collectNodesAndRun(Arrays.asList(context), nodes, searchTransportService, ActionListener.wrap(lookup -> run(lookup, context),
listener::onFailure));
}
}
/**
* This method collects nodes from the remote clusters asynchronously if any of the scroll IDs references a remote cluster.
* Otherwise the action listener will be invoked immediately with a function based on the given discovery nodes.
*/
static void collectNodesAndRun(final Iterable<ScrollIdForNode> scrollIds, DiscoveryNodes nodes,
SearchTransportService searchTransportService,
ActionListener<BiFunction<String, String, DiscoveryNode>> listener) {
Set<String> clusters = new HashSet<>();
for (ScrollIdForNode target : scrollIds) {
if (target.getClusterAlias() != null) {
clusters.add(target.getClusterAlias());
}
}
if (clusters.isEmpty()) { // no remote clusters
listener.onResponse((cluster, node) -> nodes.get(node));
} else {
RemoteClusterService remoteClusterService = searchTransportService.getRemoteClusterService();
remoteClusterService.collectNodes(clusters, ActionListener.map(listener,
nodeFunction -> (clusterAlias, node) -> clusterAlias == null ? nodes.get(node) : nodeFunction.apply(clusterAlias, node)));
}
}
private void run(BiFunction<String, String, DiscoveryNode> clusterNodeLookup, final ScrollIdForNode[] context) {
final CountDown counter = new CountDown(scrollId.getContext().length);
for (int i = 0; i < context.length; i++) {
ScrollIdForNode target = context[i];
final int shardIndex = i;
final Transport.Connection connection;
try {
DiscoveryNode node = clusterNodeLookup.apply(target.getClusterAlias(), target.getNode());
if (node == null) {
throw new IllegalStateException("node [" + target.getNode() + "] is not available");
}
connection = getConnection(target.getClusterAlias(), node);
} catch (Exception ex) {
onShardFailure("query", counter, target.getContextId(),
ex, null, () -> SearchScrollAsyncAction.this.moveToNextPhase(clusterNodeLookup));
continue;
}
final InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(target.getContextId(), request);
// we can't create a SearchShardTarget here since we don't know the index and shard ID we are talking to
// we only know the node and the search context ID. Yet, the response will contain the SearchShardTarget
// from the target node instead...that's why we pass null here
SearchActionListener<T> searchActionListener = new SearchActionListener<T>(null, shardIndex) {
@Override
protected void setSearchShardTarget(T response) {
// don't do this - it's part of the response...
assert response.getSearchShardTarget() != null : "search shard target must not be null";
if (target.getClusterAlias() != null) {
// re-create the search target and add the cluster alias if there is any,
// we need this down the road for subseq. phases
SearchShardTarget searchShardTarget = response.getSearchShardTarget();
response.setSearchShardTarget(new SearchShardTarget(searchShardTarget.getNodeId(), searchShardTarget.getShardId(),
target.getClusterAlias(), null));
}
}
@Override
protected void innerOnResponse(T result) {
assert shardIndex == result.getShardIndex() : "shard index mismatch: " + shardIndex + " but got: "
+ result.getShardIndex();
onFirstPhaseResult(shardIndex, result);
if (counter.countDown()) {
SearchPhase phase = moveToNextPhase(clusterNodeLookup);
try {
phase.run();
} catch (Exception e) {
// we need to fail the entire request here - the entire phase just blew up
// don't call onShardFailure or onFailure here since otherwise we'd countDown the counter
// again which would result in an exception
listener.onFailure(new SearchPhaseExecutionException(phase.getName(), "Phase failed", e,
ShardSearchFailure.EMPTY_ARRAY));
}
}
}
@Override
public void onFailure(Exception t) {
onShardFailure("query", counter, target.getContextId(), t, null,
() -> SearchScrollAsyncAction.this.moveToNextPhase(clusterNodeLookup));
}
};
executeInitialPhase(connection, internalRequest, searchActionListener);
}
}
synchronized ShardSearchFailure[] buildShardFailures() { // pkg private for testing
if (shardFailures.isEmpty()) {
return ShardSearchFailure.EMPTY_ARRAY;
}
return shardFailures.toArray(new ShardSearchFailure[shardFailures.size()]);
}
// we do our best to return the shard failures, but its ok if its not fully concurrently safe
// we simply try and return as much as possible
private synchronized void addShardFailure(ShardSearchFailure failure) {
shardFailures.add(failure);
}
protected abstract void executeInitialPhase(Transport.Connection connection, InternalScrollSearchRequest internalRequest,
SearchActionListener<T> searchActionListener);
protected abstract SearchPhase moveToNextPhase(BiFunction<String, String, DiscoveryNode> clusterNodeLookup);
protected abstract void onFirstPhaseResult(int shardId, T result);
protected SearchPhase sendResponsePhase(SearchPhaseController.ReducedQueryPhase queryPhase,
final AtomicArray<? extends SearchPhaseResult> fetchResults) {
return new SearchPhase("fetch") {
@Override
public void run() throws IOException {
sendResponse(queryPhase, fetchResults);
}
};
}
protected final void sendResponse(SearchPhaseController.ReducedQueryPhase queryPhase,
final AtomicArray<? extends SearchPhaseResult> fetchResults) {
try {
final InternalSearchResponse internalResponse = searchPhaseController.merge(true, queryPhase, fetchResults.asList(),
fetchResults::get);
// the scroll ID never changes we always return the same ID. This ID contains all the shards and their context ids
// such that we can talk to them again in the next roundtrip.
String scrollId = null;
if (request.scroll() != null) {
scrollId = request.scrollId();
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, this.scrollId.getContext().length, successfulOps.get(),
0, buildTookInMillis(), buildShardFailures(), SearchResponse.Clusters.EMPTY));
} catch (Exception e) {
listener.onFailure(new ReduceSearchPhaseException("fetch", "inner finish failed", e, buildShardFailures()));
}
}
protected void onShardFailure(String phaseName, final CountDown counter, final SearchContextId searchId, Exception failure,
@Nullable SearchShardTarget searchShardTarget,
Supplier<SearchPhase> nextPhaseSupplier) {
if (logger.isDebugEnabled()) {
logger.debug(new ParameterizedMessage("[{}] Failed to execute {} phase", searchId, phaseName), failure);
}
addShardFailure(new ShardSearchFailure(failure, searchShardTarget));
int successfulOperations = successfulOps.decrementAndGet();
assert successfulOperations >= 0 : "successfulOperations must be >= 0 but was: " + successfulOperations;
if (counter.countDown()) {
if (successfulOps.get() == 0) {
listener.onFailure(new SearchPhaseExecutionException(phaseName, "all shards failed", failure, buildShardFailures()));
} else {
SearchPhase phase = nextPhaseSupplier.get();
try {
phase.run();
} catch (Exception e) {
e.addSuppressed(failure);
listener.onFailure(new SearchPhaseExecutionException(phase.getName(), "Phase failed", e,
ShardSearchFailure.EMPTY_ARRAY));
}
}
}
}
protected Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) {
return searchTransportService.getConnection(clusterAlias, node);
}
}
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.backends.iosrobovm;
import java.io.File;
import org.robovm.apple.coregraphics.CGSize;
import org.robovm.apple.foundation.NSDictionary;
import org.robovm.apple.foundation.NSMutableDictionary;
import org.robovm.apple.foundation.NSObject;
import org.robovm.apple.foundation.NSString;
import org.robovm.apple.uikit.UIApplication;
import org.robovm.apple.uikit.UIApplicationDelegateAdapter;
import org.robovm.apple.uikit.UIDevice;
import org.robovm.apple.uikit.UIInterfaceOrientation;
import org.robovm.apple.uikit.UIPasteboard;
import org.robovm.apple.uikit.UIScreen;
import org.robovm.apple.uikit.UIUserInterfaceIdiom;
import org.robovm.apple.uikit.UIViewController;
import org.robovm.apple.uikit.UIWindow;
import com.badlogic.gdx.Application;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Audio;
import com.badlogic.gdx.Files;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Graphics;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.LifecycleListener;
import com.badlogic.gdx.Net;
import com.badlogic.gdx.Preferences;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.Clipboard;
public class IOSApplication implements Application {
public static abstract class Delegate extends UIApplicationDelegateAdapter {
private IOSApplication app;
protected abstract IOSApplication createApplication ();
@Override
public boolean didFinishLaunching (UIApplication application, NSDictionary<NSString, ?> launchOptions) {
application.addStrongRef(this); // Prevent this from being GCed until the ObjC UIApplication is deallocated
this.app = createApplication();
return app.didFinishLaunching(application, launchOptions);
}
@Override
public void didBecomeActive (UIApplication application) {
app.didBecomeActive(application);
}
@Override
public void willResignActive (UIApplication application) {
app.willResignActive(application);
}
@Override
public void willTerminate (UIApplication application) {
app.willTerminate(application);
}
}
UIApplication uiApp;
UIWindow uiWindow;
ApplicationListener listener;
IOSApplicationConfiguration config;
IOSGraphics graphics;
IOSAudio audio;
IOSFiles files;
IOSInput input;
IOSNet net;
int logLevel = Application.LOG_DEBUG;
/** The display scale factor (1.0f for normal; 2.0f to use retina coordinates/dimensions). */
float displayScaleFactor;
Array<Runnable> runnables = new Array<Runnable>();
Array<Runnable> executedRunnables = new Array<Runnable>();
Array<LifecycleListener> lifecycleListeners = new Array<LifecycleListener>();
public IOSApplication (ApplicationListener listener, IOSApplicationConfiguration config) {
this.listener = listener;
this.config = config;
}
final boolean didFinishLaunching (UIApplication uiApp, NSDictionary<?, ?> options) {
Gdx.app = this;
this.uiApp = uiApp;
// enable or disable screen dimming
UIApplication.getSharedApplication().setIdleTimerDisabled(config.preventScreenDimming);
// fix the scale factor if we have a retina device (NOTE: iOS screen sizes are in "points" not pixels by default!)
if (UIScreen.getMainScreen().getScale() == 2.0f) {
// we have a retina device!
if (UIDevice.getCurrentDevice().getUserInterfaceIdiom() == UIUserInterfaceIdiom.Pad) {
// it's an iPad!
displayScaleFactor = config.displayScaleLargeScreenIfRetina * 2.0f;
} else {
// it's an iPod or iPhone
displayScaleFactor = config.displayScaleSmallScreenIfRetina * 2.0f;
}
} else {
// no retina screen: no scaling!
if (UIDevice.getCurrentDevice().getUserInterfaceIdiom() == UIUserInterfaceIdiom.Pad) {
// it's an iPad!
displayScaleFactor = config.displayScaleLargeScreenIfNonRetina;
} else {
// it's an iPod or iPhone
displayScaleFactor = config.displayScaleSmallScreenIfNonRetina;
}
}
GL20 gl20 = new IOSGLES20();
Gdx.gl = gl20;
Gdx.gl20 = gl20;
// setup libgdx
this.input = new IOSInput(this);
this.graphics = new IOSGraphics(getBounds(null), this, config, input, gl20);
this.files = new IOSFiles();
this.audio = new IOSAudio(config);
this.net = new IOSNet(this);
Gdx.files = this.files;
Gdx.graphics = this.graphics;
Gdx.audio = this.audio;
Gdx.input = this.input;
Gdx.net = this.net;
this.input.setupPeripherals();
this.uiWindow = new UIWindow(UIScreen.getMainScreen().getBounds());
this.uiWindow.setRootViewController(this.graphics.viewController);
this.uiWindow.makeKeyAndVisible();
Gdx.app.debug("IOSApplication", "created");
return true;
}
/** Return the UI view controller of IOSApplication
* @return the view controller of IOSApplication */
public UIViewController getUIViewController () {
return graphics.viewController;
}
/** Return the UI Window of IOSApplication
* @return the window */
public UIWindow getUIWindow () {
return uiWindow;
}
/** Returns our real display dimension based on screen orientation.
*
* @param viewController The view controller.
* @return Or real display dimension. */
CGSize getBounds (UIViewController viewController) {
// or screen size (always portrait)
// CGSize bounds = UIScreen.getMainScreen().getBounds().size();
CGSize bounds = UIScreen.getMainScreen().getApplicationFrame().size();
// determine orientation and resulting width + height
UIInterfaceOrientation orientation;
if (viewController != null) {
orientation = viewController.getInterfaceOrientation();
} else if (config.orientationLandscape == config.orientationPortrait) {
/*
* if the app has orientation in any side then we can only check status bar orientation
*/
orientation = uiApp.getStatusBarOrientation();
} else if (config.orientationLandscape) {// is landscape true and portrait false
orientation = UIInterfaceOrientation.LandscapeRight;
} else {// is portrait true and landscape false
orientation = UIInterfaceOrientation.Portrait;
}
int width;
int height;
switch (orientation) {
case LandscapeLeft:
case LandscapeRight:
height = (int)bounds.width();
width = (int)bounds.height();
break;
default:
// assume portrait
width = (int)bounds.width();
height = (int)bounds.height();
}
// update width/height depending on display scaling selected
width *= displayScaleFactor;
height *= displayScaleFactor;
// log screen dimensions
Gdx.app.debug("IOSApplication", "View: " + orientation.toString() + " " + width + "x" + height);
// return resulting view size (based on orientation)
return new CGSize(width, height);
}
final void didBecomeActive (UIApplication uiApp) {
Gdx.app.debug("IOSApplication", "resumed");
// workaround for ObjectAL crash problem
// see: https://groups.google.com/forum/?fromgroups=#!topic/objectal-for-iphone/ubRWltp_i1Q
// OALAudioSession.sharedInstance().forceEndInterrupt();
graphics.makeCurrent();
graphics.resume();
}
final void willResignActive (UIApplication uiApp) {
Gdx.app.debug("IOSApplication", "paused");
graphics.makeCurrent();
graphics.pause();
Gdx.gl.glFlush();
}
final void willTerminate (UIApplication uiApp) {
Gdx.app.debug("IOSApplication", "disposed");
graphics.makeCurrent();
Array<LifecycleListener> listeners = lifecycleListeners;
synchronized (listeners) {
for (LifecycleListener listener : listeners) {
listener.pause();
}
}
listener.dispose();
Gdx.gl.glFlush();
}
@Override
public ApplicationListener getApplicationListener () {
return listener;
}
@Override
public Graphics getGraphics () {
return graphics;
}
@Override
public Audio getAudio () {
return audio;
}
@Override
public Input getInput () {
return input;
}
@Override
public Files getFiles () {
return files;
}
@Override
public Net getNet () {
return net;
}
@Override
public void log (String tag, String message) {
if (logLevel > LOG_NONE) {
System.out.println("[info] " + tag + ": " + message);
}
}
@Override
public void log (String tag, String message, Throwable exception) {
if (logLevel > LOG_NONE) {
System.out.println("[info] " + tag + ": " + message);
exception.printStackTrace();
}
}
@Override
public void error (String tag, String message) {
if (logLevel >= LOG_ERROR) {
System.out.println("[error] " + tag + ": " + message);
}
}
@Override
public void error (String tag, String message, Throwable exception) {
if (logLevel >= LOG_ERROR) {
System.out.println("[error] " + tag + ": " + message);
exception.printStackTrace();
}
}
@Override
public void debug (String tag, String message) {
if (logLevel >= LOG_DEBUG) {
System.out.println("[debug] " + tag + ": " + message);
}
}
@Override
public void debug (String tag, String message, Throwable exception) {
if (logLevel >= LOG_DEBUG) {
System.out.println("[error] " + tag + ": " + message);
exception.printStackTrace();
}
}
@Override
public void setLogLevel (int logLevel) {
this.logLevel = logLevel;
}
@Override
public int getLogLevel () {
return logLevel;
}
@Override
public ApplicationType getType () {
return ApplicationType.iOS;
}
@Override
public int getVersion () {
return Integer.parseInt(UIDevice.getCurrentDevice().getSystemVersion().split("\\.")[0]);
}
@Override
public long getJavaHeap () {
return Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory();
}
@Override
public long getNativeHeap () {
return getJavaHeap();
}
@Override
public Preferences getPreferences (String name) {
File libraryPath = new File(System.getenv("HOME"), "Library");
File finalPath = new File(libraryPath, name + ".plist");
Gdx.app.debug("IOSApplication", "Loading NSDictionary from file " + finalPath);
@SuppressWarnings("unchecked")
NSMutableDictionary<NSString, NSObject> nsDictionary = (NSMutableDictionary<NSString, NSObject>)NSMutableDictionary
.read(finalPath);
// if it fails to get an existing dictionary, create a new one.
if (nsDictionary == null) {
Gdx.app.debug("IOSApplication", "NSDictionary not found, creating a new one");
nsDictionary = new NSMutableDictionary<NSString, NSObject>();
boolean fileWritten = nsDictionary.write(finalPath, false);
if (fileWritten)
Gdx.app.debug("IOSApplication", "NSDictionary file written");
else
Gdx.app.debug("IOSApplication", "Failed to write NSDictionary to file " + finalPath);
}
return new IOSPreferences(nsDictionary, finalPath.getAbsolutePath());
}
@Override
public void postRunnable (Runnable runnable) {
synchronized (runnables) {
runnables.add(runnable);
}
}
public void processRunnables () {
synchronized (runnables) {
executedRunnables.clear();
executedRunnables.addAll(runnables);
runnables.clear();
}
for (int i = 0; i < executedRunnables.size; i++) {
try {
executedRunnables.get(i).run();
} catch (Throwable t) {
t.printStackTrace();
}
}
}
@Override
public void exit () {
System.exit(0);
}
@Override
public Clipboard getClipboard () {
return new Clipboard() {
@Override
public void setContents (String content) {
UIPasteboard.getGeneral().setString(content);
}
@Override
public String getContents () {
return UIPasteboard.getGeneral().getString();
}
};
}
@Override
public void addLifecycleListener (LifecycleListener listener) {
synchronized (lifecycleListeners) {
lifecycleListeners.add(listener);
}
}
@Override
public void removeLifecycleListener (LifecycleListener listener) {
synchronized (lifecycleListeners) {
lifecycleListeners.removeValue(listener, true);
}
}
}
| |
/*
* Copyright (C) 1997-2001 Id Software, Inc.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation; either version 2 of the License, or (at your option) any later
* version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE.
*
* See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc., 59 Temple
* Place - Suite 330, Boston, MA 02111-1307, USA.
*
*/
// Created on 13.11.2003 by RST.
// $Id: M_Flash.java,v 1.2 2005/02/06 18:48:32 salomo Exp $
package org.free.jake2.game.monsters;
public class M_Flash {
// m_flash.c
// this file is included in both the game dll and quake2,
// the game needs it to source shot locations, the client
// needs it to position muzzle flashes
public static float monster_flash_offset[][] = {
// flash 0 is not used
{0.0f, 0.0f, 0.0f},
// MZ2_TANK_BLASTER_1 1
{20.7f, -18.5f, 28.7f},
// MZ2_TANK_BLASTER_2 2
{16.6f, -21.5f, 30.1f},
// MZ2_TANK_BLASTER_3 3
{11.8f, -23.9f, 32.1f},
// MZ2_TANK_MACHINEGUN_1 4
{22.9f, -0.7f, 25.3f},
// MZ2_TANK_MACHINEGUN_2 5
{22.2f, 6.2f, 22.3f},
// MZ2_TANK_MACHINEGUN_3 6
{19.4f, 13.1f, 18.6f},
// MZ2_TANK_MACHINEGUN_4 7
{19.4f, 18.8f, 18.6f},
// MZ2_TANK_MACHINEGUN_5 8
{17.9f, 25.0f, 18.6f},
// MZ2_TANK_MACHINEGUN_6 9
{14.1f, 30.5f, 20.6f},
// MZ2_TANK_MACHINEGUN_7 10
{9.3f, 35.3f, 22.1f},
// MZ2_TANK_MACHINEGUN_8 11
{4.7f, 38.4f, 22.1f},
// MZ2_TANK_MACHINEGUN_9 12
{-1.1f, 40.4f, 24.1f},
// MZ2_TANK_MACHINEGUN_10 13
{-6.5f, 41.2f, 24.1f},
// MZ2_TANK_MACHINEGUN_11 14
{3.2f, 40.1f, 24.7f},
// MZ2_TANK_MACHINEGUN_12 15
{11.7f, 36.7f, 26.0f},
// MZ2_TANK_MACHINEGUN_13 16
{18.9f, 31.3f, 26.0f},
// MZ2_TANK_MACHINEGUN_14 17
{24.4f, 24.4f, 26.4f},
// MZ2_TANK_MACHINEGUN_15 18
{27.1f, 17.1f, 27.2f},
// MZ2_TANK_MACHINEGUN_16 19
{28.5f, 9.1f, 28.0f},
// MZ2_TANK_MACHINEGUN_17 20
{27.1f, 2.2f, 28.0f},
// MZ2_TANK_MACHINEGUN_18 21
{24.9f, -2.8f, 28.0f},
// MZ2_TANK_MACHINEGUN_19 22
{21.6f, -7.0f, 26.4f},
// MZ2_TANK_ROCKET_1 23
{6.2f, 29.1f, 49.1f},
// MZ2_TANK_ROCKET_2 24
{6.9f, 23.8f, 49.1f},
// MZ2_TANK_ROCKET_3 25
{8.3f, 17.8f, 49.5f},
// MZ2_INFANTRY_MACHINEGUN_1 26
{26.6f, 7.1f, 13.1f},
// MZ2_INFANTRY_MACHINEGUN_2 27
{18.2f, 7.5f, 15.4f},
// MZ2_INFANTRY_MACHINEGUN_3 28
{17.2f, 10.3f, 17.9f},
// MZ2_INFANTRY_MACHINEGUN_4 29
{17.0f, 12.8f, 20.1f},
// MZ2_INFANTRY_MACHINEGUN_5 30
{15.1f, 14.1f, 21.8f},
// MZ2_INFANTRY_MACHINEGUN_6 31
{11.8f, 17.2f, 23.1f},
// MZ2_INFANTRY_MACHINEGUN_7 32
{11.4f, 20.2f, 21.0f},
// MZ2_INFANTRY_MACHINEGUN_8 33
{9.0f, 23.0f, 18.9f},
// MZ2_INFANTRY_MACHINEGUN_9 34
{13.9f, 18.6f, 17.7f},
// MZ2_INFANTRY_MACHINEGUN_10 35
{15.4f, 15.6f, 15.8f},
// MZ2_INFANTRY_MACHINEGUN_11 36
{10.2f, 15.2f, 25.1f},
// MZ2_INFANTRY_MACHINEGUN_12 37
{-1.9f, 15.1f, 28.2f},
// MZ2_INFANTRY_MACHINEGUN_13 38
{-12.4f, 13.0f, 20.2f},
// MZ2_SOLDIER_BLASTER_1 39
{10.6f * 1.2f, 7.7f * 1.2f, 7.8f * 1.2f},
// MZ2_SOLDIER_BLASTER_2 40
{21.1f * 1.2f, 3.6f * 1.2f, 19.0f * 1.2f},
// MZ2_SOLDIER_SHOTGUN_1 41
{10.6f * 1.2f, 7.7f * 1.2f, 7.8f * 1.2f},
// MZ2_SOLDIER_SHOTGUN_2 42
{21.1f * 1.2f, 3.6f * 1.2f, 19.0f * 1.2f},
// MZ2_SOLDIER_MACHINEGUN_1 43
{10.6f * 1.2f, 7.7f * 1.2f, 7.8f * 1.2f},
// MZ2_SOLDIER_MACHINEGUN_2 44
{21.1f * 1.2f, 3.6f * 1.2f, 19.0f * 1.2f},
// MZ2_GUNNER_MACHINEGUN_1 45
{30.1f * 1.15f, 3.9f * 1.15f, 19.6f * 1.15f},
// MZ2_GUNNER_MACHINEGUN_2 46
{29.1f * 1.15f, 2.5f * 1.15f, 20.7f * 1.15f},
// MZ2_GUNNER_MACHINEGUN_3 47
{28.2f * 1.15f, 2.5f * 1.15f, 22.2f * 1.15f},
// MZ2_GUNNER_MACHINEGUN_4 48
{28.2f * 1.15f, 3.6f * 1.15f, 22.0f * 1.15f},
// MZ2_GUNNER_MACHINEGUN_5 49
{26.9f * 1.15f, 2.0f * 1.15f, 23.4f * 1.15f},
// MZ2_GUNNER_MACHINEGUN_6 50
{26.5f * 1.15f, 0.6f * 1.15f, 20.8f * 1.15f},
// MZ2_GUNNER_MACHINEGUN_7 51
{26.9f * 1.15f, 0.5f * 1.15f, 21.5f * 1.15f},
// MZ2_GUNNER_MACHINEGUN_8 52
{29.0f * 1.15f, 2.4f * 1.15f, 19.5f * 1.15f},
// MZ2_GUNNER_GRENADE_1 53
{4.6f * 1.15f, -16.8f * 1.15f, 7.3f * 1.15f},
// MZ2_GUNNER_GRENADE_2 54
{4.6f * 1.15f, -16.8f * 1.15f, 7.3f * 1.15f},
// MZ2_GUNNER_GRENADE_3 55
{4.6f * 1.15f, -16.8f * 1.15f, 7.3f * 1.15f},
// MZ2_GUNNER_GRENADE_4 56
{4.6f * 1.15f, -16.8f * 1.15f, 7.3f * 1.15f},
// MZ2_CHICK_ROCKET_1 57
// -24.8f, -9.0f, 39.0f},
{24.8f, -9.0f, 39.0f}, // PGM - this was incorrect in Q2
// MZ2_FLYER_BLASTER_1 58
{12.1f, 13.4f, -14.5f},
// MZ2_FLYER_BLASTER_2 59
{12.1f, -7.4f, -14.5f},
// MZ2_MEDIC_BLASTER_1 60
{12.1f, 5.4f, 16.5f},
// MZ2_GLADIATOR_RAILGUN_1 61
{30.0f, 18.0f, 28.0f},
// MZ2_HOVER_BLASTER_1 62
{32.5f, -0.8f, 10.0f},
// MZ2_ACTOR_MACHINEGUN_1 63
{18.4f, 7.4f, 9.6f},
// MZ2_SUPERTANK_MACHINEGUN_1 64
{30.0f, 30.0f, 88.5f},
// MZ2_SUPERTANK_MACHINEGUN_2 65
{30.0f, 30.0f, 88.5f},
// MZ2_SUPERTANK_MACHINEGUN_3 66
{30.0f, 30.0f, 88.5f},
// MZ2_SUPERTANK_MACHINEGUN_4 67
{30.0f, 30.0f, 88.5f},
// MZ2_SUPERTANK_MACHINEGUN_5 68
{30.0f, 30.0f, 88.5f},
// MZ2_SUPERTANK_MACHINEGUN_6 69
{30.0f, 30.0f, 88.5f},
// MZ2_SUPERTANK_ROCKET_1 70
{16.0f, -22.5f, 91.2f},
// MZ2_SUPERTANK_ROCKET_2 71
{16.0f, -33.4f, 86.7f},
// MZ2_SUPERTANK_ROCKET_3 72
{16.0f, -42.8f, 83.3f},
// --- Start Xian Stuff ---
// MZ2_BOSS2_MACHINEGUN_L1 73
{32f, -40f, 70f},
// MZ2_BOSS2_MACHINEGUN_L2 74
{32f, -40f, 70f},
// MZ2_BOSS2_MACHINEGUN_L3 75
{32f, -40f, 70f},
// MZ2_BOSS2_MACHINEGUN_L4 76
{32f, -40f, 70f},
// MZ2_BOSS2_MACHINEGUN_L5 77
{32f, -40f, 70f},
// --- End Xian Stuff
// MZ2_BOSS2_ROCKET_1 78
{22.0f, 16.0f, 10.0f},
// MZ2_BOSS2_ROCKET_2 79
{22.0f, 8.0f, 10.0f},
// MZ2_BOSS2_ROCKET_3 80
{22.0f, -8.0f, 10.0f},
// MZ2_BOSS2_ROCKET_4 81
{22.0f, -16.0f, 10.0f},
// MZ2_FLOAT_BLASTER_1 82
{32.5f, -0.8f, 10f},
// MZ2_SOLDIER_BLASTER_3 83
{20.8f * 1.2f, 10.1f * 1.2f, -2.7f * 1.2f},
// MZ2_SOLDIER_SHOTGUN_3 84
{20.8f * 1.2f, 10.1f * 1.2f, -2.7f * 1.2f},
// MZ2_SOLDIER_MACHINEGUN_3 85
{20.8f * 1.2f, 10.1f * 1.2f, -2.7f * 1.2f},
// MZ2_SOLDIER_BLASTER_4 86
{7.6f * 1.2f, 9.3f * 1.2f, 0.8f * 1.2f},
// MZ2_SOLDIER_SHOTGUN_4 87
{7.6f * 1.2f, 9.3f * 1.2f, 0.8f * 1.2f},
// MZ2_SOLDIER_MACHINEGUN_4 88
{7.6f * 1.2f, 9.3f * 1.2f, 0.8f * 1.2f},
// MZ2_SOLDIER_BLASTER_5 89
{30.5f * 1.2f, 9.9f * 1.2f, -18.7f * 1.2f},
// MZ2_SOLDIER_SHOTGUN_5 90
{30.5f * 1.2f, 9.9f * 1.2f, -18.7f * 1.2f},
// MZ2_SOLDIER_MACHINEGUN_5 91
{30.5f * 1.2f, 9.9f * 1.2f, -18.7f * 1.2f},
// MZ2_SOLDIER_BLASTER_6 92
{27.6f * 1.2f, 3.4f * 1.2f, -10.4f * 1.2f},
// MZ2_SOLDIER_SHOTGUN_6 93
{27.6f * 1.2f, 3.4f * 1.2f, -10.4f * 1.2f},
// MZ2_SOLDIER_MACHINEGUN_6 94
{27.6f * 1.2f, 3.4f * 1.2f, -10.4f * 1.2f},
// MZ2_SOLDIER_BLASTER_7 95
{28.9f * 1.2f, 4.6f * 1.2f, -8.1f * 1.2f},
// MZ2_SOLDIER_SHOTGUN_7 96
{28.9f * 1.2f, 4.6f * 1.2f, -8.1f * 1.2f},
// MZ2_SOLDIER_MACHINEGUN_7 97
{28.9f * 1.2f, 4.6f * 1.2f, -8.1f * 1.2f},
// MZ2_SOLDIER_BLASTER_8 98
// 34.5f * 1.2f, 9.6f * 1.2f, 6.1f * 1.2f},
{31.5f * 1.2f, 9.6f * 1.2f, 10.1f * 1.2f},
// MZ2_SOLDIER_SHOTGUN_8 99
{34.5f * 1.2f, 9.6f * 1.2f, 6.1f * 1.2f},
// MZ2_SOLDIER_MACHINEGUN_8 100
{34.5f * 1.2f, 9.6f * 1.2f, 6.1f * 1.2f},
// --- Xian shit below ---
// MZ2_MAKRON_BFG 101
{17f, -19.5f, 62.9f},
// MZ2_MAKRON_BLASTER_1 102
{-3.6f, -24.1f, 59.5f},
// MZ2_MAKRON_BLASTER_2 103
{-1.6f, -19.3f, 59.5f},
// MZ2_MAKRON_BLASTER_3 104
{-0.1f, -14.4f, 59.5f},
// MZ2_MAKRON_BLASTER_4 105
{2.0f, -7.6f, 59.5f},
// MZ2_MAKRON_BLASTER_5 106
{3.4f, 1.3f, 59.5f},
// MZ2_MAKRON_BLASTER_6 107
{3.7f, 11.1f, 59.5f},
// MZ2_MAKRON_BLASTER_7 108
{-0.3f, 22.3f, 59.5f},
// MZ2_MAKRON_BLASTER_8 109
{-6f, 33f, 59.5f},
// MZ2_MAKRON_BLASTER_9 110
{-9.3f, 36.4f, 59.5f},
// MZ2_MAKRON_BLASTER_10 111
{-7f, 35f, 59.5f},
// MZ2_MAKRON_BLASTER_11 112
{-2.1f, 29f, 59.5f},
// MZ2_MAKRON_BLASTER_12 113
{3.9f, 17.3f, 59.5f},
// MZ2_MAKRON_BLASTER_13 114
{6.1f, 5.8f, 59.5f},
// MZ2_MAKRON_BLASTER_14 115
{5.9f, -4.4f, 59.5f},
// MZ2_MAKRON_BLASTER_15 116
{4.2f, -14.1f, 59.5f},
// MZ2_MAKRON_BLASTER_16 117
{2.4f, -18.8f, 59.5f},
// MZ2_MAKRON_BLASTER_17 118
{-1.8f, -25.5f, 59.5f},
// MZ2_MAKRON_RAILGUN_1 119
{-17.3f, 7.8f, 72.4f},
// MZ2_JORG_MACHINEGUN_L1 120
{78.5f, -47.1f, 96f},
// MZ2_JORG_MACHINEGUN_L2 121
{78.5f, -47.1f, 96f},
// MZ2_JORG_MACHINEGUN_L3 122
{78.5f, -47.1f, 96f},
// MZ2_JORG_MACHINEGUN_L4 123
{78.5f, -47.1f, 96f},
// MZ2_JORG_MACHINEGUN_L5 124
{78.5f, -47.1f, 96f},
// MZ2_JORG_MACHINEGUN_L6 125
{78.5f, -47.1f, 96f},
// MZ2_JORG_MACHINEGUN_R1 126
{78.5f, 46.7f, 96f},
// MZ2_JORG_MACHINEGUN_R2 127
{78.5f, 46.7f, 96f},
// MZ2_JORG_MACHINEGUN_R3 128
{78.5f, 46.7f, 96f},
// MZ2_JORG_MACHINEGUN_R4 129
{78.5f, 46.7f, 96f},
// MZ2_JORG_MACHINEGUN_R5 130
{78.5f, 46.7f, 96f},
// MZ2_JORG_MACHINEGUN_R6 131
{78.5f, 46.7f, 96f},
// MZ2_JORG_BFG_1 132
{6.3f, -9f, 111.2f},
// MZ2_BOSS2_MACHINEGUN_R1 73
{32f, 40f, 70f},
// MZ2_BOSS2_MACHINEGUN_R2 74
{32f, 40f, 70f},
// MZ2_BOSS2_MACHINEGUN_R3 75
{32f, 40f, 70f},
// MZ2_BOSS2_MACHINEGUN_R4 76
{32f, 40f, 70f},
// MZ2_BOSS2_MACHINEGUN_R5 77
{32f, 40f, 70f},
// --- End Xian Shit ---
// ROGUE
// note that the above really ends at 137
// carrier machineguns
// MZ2_CARRIER_MACHINEGUN_L1
{56f, -32f, 32f},
// MZ2_CARRIER_MACHINEGUN_R1
{56f, 32f, 32f},
// MZ2_CARRIER_GRENADE
{42f, 24f, 50f},
// MZ2_TURRET_MACHINEGUN 141
{16f, 0f, 0f},
// MZ2_TURRET_ROCKET 142
{16f, 0f, 0f},
// MZ2_TURRET_BLASTER 143
{16f, 0f, 0f},
// MZ2_STALKER_BLASTER 144
{24f, 0f, 6f},
// MZ2_DAEDALUS_BLASTER 145
{32.5f, -0.8f, 10.0f},
// MZ2_MEDIC_BLASTER_2 146
{12.1f, 5.4f, 16.5f},
// MZ2_CARRIER_RAILGUN 147
{32f, 0f, 6f},
// MZ2_WIDOW_DISRUPTOR 148
{57.72f, 14.50f, 88.81f},
// MZ2_WIDOW_BLASTER 149
{56f, 32f, 32f},
// MZ2_WIDOW_RAIL 150
{62f, -20f, 84f},
// MZ2_WIDOW_PLASMABEAM 151 // PMM - not used!
{32f, 0f, 6f},
// MZ2_CARRIER_MACHINEGUN_L2 152
{61f, -32f, 12f},
// MZ2_CARRIER_MACHINEGUN_R2 153
{61f, 32f, 12f},
// MZ2_WIDOW_RAIL_LEFT 154
{17f, -62f, 91f},
// MZ2_WIDOW_RAIL_RIGHT 155
{68f, 12f, 86f},
// MZ2_WIDOW_BLASTER_SWEEP1 156 pmm - the sweeps need to be in
// sequential order
{47.5f, 56f, 89f},
// MZ2_WIDOW_BLASTER_SWEEP2 157
{54f, 52f, 91f},
// MZ2_WIDOW_BLASTER_SWEEP3 158
{58f, 40f, 91f},
// MZ2_WIDOW_BLASTER_SWEEP4 159
{68f, 30f, 88f},
// MZ2_WIDOW_BLASTER_SWEEP5 160
{74f, 20f, 88f},
// MZ2_WIDOW_BLASTER_SWEEP6 161
{73f, 11f, 87f},
// MZ2_WIDOW_BLASTER_SWEEP7 162
{73f, 3f, 87f},
// MZ2_WIDOW_BLASTER_SWEEP8 163
{70f, -12f, 87f},
// MZ2_WIDOW_BLASTER_SWEEP9 164
{67f, -20f, 90f},
// MZ2_WIDOW_BLASTER_100 165
{-20f, 76f, 90f},
// MZ2_WIDOW_BLASTER_90 166
{-8f, 74f, 90f},
// MZ2_WIDOW_BLASTER_80 167
{0f, 72f, 90f},
// MZ2_WIDOW_BLASTER_70 168 d06
{10f, 71f, 89f},
// MZ2_WIDOW_BLASTER_60 169 d07
{23f, 70f, 87f},
// MZ2_WIDOW_BLASTER_50 170 d08
{32f, 64f, 85f},
// MZ2_WIDOW_BLASTER_40 171
{40f, 58f, 84f},
// MZ2_WIDOW_BLASTER_30 172 d10
{48f, 50f, 83f},
// MZ2_WIDOW_BLASTER_20 173
{54f, 42f, 82f},
// MZ2_WIDOW_BLASTER_10 174 d12
{56f, 34f, 82f},
// MZ2_WIDOW_BLASTER_0 175
{58f, 26f, 82f},
// MZ2_WIDOW_BLASTER_10L 176 d14
{60f, 16f, 82f},
// MZ2_WIDOW_BLASTER_20L 177
{59f, 6f, 81f},
// MZ2_WIDOW_BLASTER_30L 178 d16
{58f, -2f, 80f},
// MZ2_WIDOW_BLASTER_40L 179
{57f, -10f, 79f},
// MZ2_WIDOW_BLASTER_50L 180 d18
{54f, -18f, 78f},
// MZ2_WIDOW_BLASTER_60L 181
{42f, -32f, 80f},
// MZ2_WIDOW_BLASTER_70L 182 d20
{36f, -40f, 78f},
// MZ2_WIDOW_RUN_1 183
{68.4f, 10.88f, 82.08f},
// MZ2_WIDOW_RUN_2 184
{68.51f, 8.64f, 85.14f},
// MZ2_WIDOW_RUN_3 185
{68.66f, 6.38f, 88.78f},
// MZ2_WIDOW_RUN_4 186
{68.73f, 5.1f, 84.47f},
// MZ2_WIDOW_RUN_5 187
{68.82f, 4.79f, 80.52f},
// MZ2_WIDOW_RUN_6 188
{68.77f, 6.11f, 85.37f},
// MZ2_WIDOW_RUN_7 189
{68.67f, 7.99f, 90.24f},
// MZ2_WIDOW_RUN_8 190
{68.55f, 9.54f, 87.36f},
// MZ2_CARRIER_ROCKET_1 191
{0f, 0f, -5f},
// MZ2_CARRIER_ROCKET_2 192
{0f, 0f, -5f},
// MZ2_CARRIER_ROCKET_3 193
{0f, 0f, -5f},
// MZ2_CARRIER_ROCKET_4 194
{0f, 0f, -5f},
// MZ2_WIDOW2_BEAMER_1 195
// 72.13f, -17.63f, 93.77f},
{69.00f, -17.63f, 93.77f},
// MZ2_WIDOW2_BEAMER_2 196
// 71.46f, -17.08f, 89.82f},
{69.00f, -17.08f, 89.82f},
// MZ2_WIDOW2_BEAMER_3 197
// 71.47f, -18.40f, 90.70f},
{69.00f, -18.40f, 90.70f},
// MZ2_WIDOW2_BEAMER_4 198
// 71.96f, -18.34f, 94.32f},
{69.00f, -18.34f, 94.32f},
// MZ2_WIDOW2_BEAMER_5 199
// 72.25f, -18.30f, 97.98f},
{69.00f, -18.30f, 97.98f},
// MZ2_WIDOW2_BEAM_SWEEP_1 200
{45.04f, -59.02f, 92.24f},
// MZ2_WIDOW2_BEAM_SWEEP_2 201
{50.68f, -54.70f, 91.96f},
// MZ2_WIDOW2_BEAM_SWEEP_3 202
{56.57f, -47.72f, 91.65f},
// MZ2_WIDOW2_BEAM_SWEEP_4 203
{61.75f, -38.75f, 91.38f},
// MZ2_WIDOW2_BEAM_SWEEP_5 204
{65.55f, -28.76f, 91.24f},
// MZ2_WIDOW2_BEAM_SWEEP_6 205
{67.79f, -18.90f, 91.22f},
// MZ2_WIDOW2_BEAM_SWEEP_7 206
{68.60f, -9.52f, 91.23f},
// MZ2_WIDOW2_BEAM_SWEEP_8 207
{68.08f, 0.18f, 91.32f},
// MZ2_WIDOW2_BEAM_SWEEP_9 208
{66.14f, 9.79f, 91.44f},
// MZ2_WIDOW2_BEAM_SWEEP_10 209
{62.77f, 18.91f, 91.65f},
// MZ2_WIDOW2_BEAM_SWEEP_11 210
{58.29f, 27.11f, 92.00f},
// end of table
{0.0f, 0.0f, 0.0f}};
}
| |
#set( $symbol_pound = '#' )
#set( $symbol_dollar = '$' )
#set( $symbol_escape = '\' )
package ${package}.pojo;
import java.util.ArrayList;
import java.util.List;
public class HomeExample {
protected String orderByClause;
protected boolean distinct;
protected List<Criteria> oredCriteria;
public HomeExample() {
oredCriteria = new ArrayList<Criteria>();
}
public void setOrderByClause(String orderByClause) {
this.orderByClause = orderByClause;
}
public String getOrderByClause() {
return orderByClause;
}
public void setDistinct(boolean distinct) {
this.distinct = distinct;
}
public boolean isDistinct() {
return distinct;
}
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
public void or(Criteria criteria) {
oredCriteria.add(criteria);
}
public Criteria or() {
Criteria criteria = createCriteriaInternal();
oredCriteria.add(criteria);
return criteria;
}
public Criteria createCriteria() {
Criteria criteria = createCriteriaInternal();
if (oredCriteria.size() == 0) {
oredCriteria.add(criteria);
}
return criteria;
}
protected Criteria createCriteriaInternal() {
Criteria criteria = new Criteria();
return criteria;
}
public void clear() {
oredCriteria.clear();
orderByClause = null;
distinct = false;
}
protected abstract static class GeneratedCriteria {
protected List<Criterion> criteria;
protected GeneratedCriteria() {
super();
criteria = new ArrayList<Criterion>();
}
public boolean isValid() {
return criteria.size() > 0;
}
public List<Criterion> getAllCriteria() {
return criteria;
}
public List<Criterion> getCriteria() {
return criteria;
}
protected void addCriterion(String condition) {
if (condition == null) {
throw new RuntimeException("Value for condition cannot be null");
}
criteria.add(new Criterion(condition));
}
protected void addCriterion(String condition, Object value, String property) {
if (value == null) {
throw new RuntimeException("Value for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value));
}
protected void addCriterion(String condition, Object value1, Object value2, String property) {
if (value1 == null || value2 == null) {
throw new RuntimeException("Between values for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value1, value2));
}
public Criteria andIdIsNull() {
addCriterion("id is null");
return (Criteria) this;
}
public Criteria andIdIsNotNull() {
addCriterion("id is not null");
return (Criteria) this;
}
public Criteria andIdEqualTo(Integer value) {
addCriterion("id =", value, "id");
return (Criteria) this;
}
public Criteria andIdNotEqualTo(Integer value) {
addCriterion("id <>", value, "id");
return (Criteria) this;
}
public Criteria andIdGreaterThan(Integer value) {
addCriterion("id >", value, "id");
return (Criteria) this;
}
public Criteria andIdGreaterThanOrEqualTo(Integer value) {
addCriterion("id >=", value, "id");
return (Criteria) this;
}
public Criteria andIdLessThan(Integer value) {
addCriterion("id <", value, "id");
return (Criteria) this;
}
public Criteria andIdLessThanOrEqualTo(Integer value) {
addCriterion("id <=", value, "id");
return (Criteria) this;
}
public Criteria andIdIn(List<Integer> values) {
addCriterion("id in", values, "id");
return (Criteria) this;
}
public Criteria andIdNotIn(List<Integer> values) {
addCriterion("id not in", values, "id");
return (Criteria) this;
}
public Criteria andIdBetween(Integer value1, Integer value2) {
addCriterion("id between", value1, value2, "id");
return (Criteria) this;
}
public Criteria andIdNotBetween(Integer value1, Integer value2) {
addCriterion("id not between", value1, value2, "id");
return (Criteria) this;
}
public Criteria andHomelinkIsNull() {
addCriterion("homelink is null");
return (Criteria) this;
}
public Criteria andHomelinkIsNotNull() {
addCriterion("homelink is not null");
return (Criteria) this;
}
public Criteria andHomelinkEqualTo(String value) {
addCriterion("homelink =", value, "homelink");
return (Criteria) this;
}
public Criteria andHomelinkNotEqualTo(String value) {
addCriterion("homelink <>", value, "homelink");
return (Criteria) this;
}
public Criteria andHomelinkGreaterThan(String value) {
addCriterion("homelink >", value, "homelink");
return (Criteria) this;
}
public Criteria andHomelinkGreaterThanOrEqualTo(String value) {
addCriterion("homelink >=", value, "homelink");
return (Criteria) this;
}
public Criteria andHomelinkLessThan(String value) {
addCriterion("homelink <", value, "homelink");
return (Criteria) this;
}
public Criteria andHomelinkLessThanOrEqualTo(String value) {
addCriterion("homelink <=", value, "homelink");
return (Criteria) this;
}
public Criteria andHomelinkLike(String value) {
addCriterion("homelink like", value, "homelink");
return (Criteria) this;
}
public Criteria andHomelinkNotLike(String value) {
addCriterion("homelink not like", value, "homelink");
return (Criteria) this;
}
public Criteria andHomelinkIn(List<String> values) {
addCriterion("homelink in", values, "homelink");
return (Criteria) this;
}
public Criteria andHomelinkNotIn(List<String> values) {
addCriterion("homelink not in", values, "homelink");
return (Criteria) this;
}
public Criteria andHomelinkBetween(String value1, String value2) {
addCriterion("homelink between", value1, value2, "homelink");
return (Criteria) this;
}
public Criteria andHomelinkNotBetween(String value1, String value2) {
addCriterion("homelink not between", value1, value2, "homelink");
return (Criteria) this;
}
public Criteria andHomedescIsNull() {
addCriterion("homedesc is null");
return (Criteria) this;
}
public Criteria andHomedescIsNotNull() {
addCriterion("homedesc is not null");
return (Criteria) this;
}
public Criteria andHomedescEqualTo(String value) {
addCriterion("homedesc =", value, "homedesc");
return (Criteria) this;
}
public Criteria andHomedescNotEqualTo(String value) {
addCriterion("homedesc <>", value, "homedesc");
return (Criteria) this;
}
public Criteria andHomedescGreaterThan(String value) {
addCriterion("homedesc >", value, "homedesc");
return (Criteria) this;
}
public Criteria andHomedescGreaterThanOrEqualTo(String value) {
addCriterion("homedesc >=", value, "homedesc");
return (Criteria) this;
}
public Criteria andHomedescLessThan(String value) {
addCriterion("homedesc <", value, "homedesc");
return (Criteria) this;
}
public Criteria andHomedescLessThanOrEqualTo(String value) {
addCriterion("homedesc <=", value, "homedesc");
return (Criteria) this;
}
public Criteria andHomedescLike(String value) {
addCriterion("homedesc like", value, "homedesc");
return (Criteria) this;
}
public Criteria andHomedescNotLike(String value) {
addCriterion("homedesc not like", value, "homedesc");
return (Criteria) this;
}
public Criteria andHomedescIn(List<String> values) {
addCriterion("homedesc in", values, "homedesc");
return (Criteria) this;
}
public Criteria andHomedescNotIn(List<String> values) {
addCriterion("homedesc not in", values, "homedesc");
return (Criteria) this;
}
public Criteria andHomedescBetween(String value1, String value2) {
addCriterion("homedesc between", value1, value2, "homedesc");
return (Criteria) this;
}
public Criteria andHomedescNotBetween(String value1, String value2) {
addCriterion("homedesc not between", value1, value2, "homedesc");
return (Criteria) this;
}
public Criteria andRemarkIsNull() {
addCriterion("remark is null");
return (Criteria) this;
}
public Criteria andRemarkIsNotNull() {
addCriterion("remark is not null");
return (Criteria) this;
}
public Criteria andRemarkEqualTo(String value) {
addCriterion("remark =", value, "remark");
return (Criteria) this;
}
public Criteria andRemarkNotEqualTo(String value) {
addCriterion("remark <>", value, "remark");
return (Criteria) this;
}
public Criteria andRemarkGreaterThan(String value) {
addCriterion("remark >", value, "remark");
return (Criteria) this;
}
public Criteria andRemarkGreaterThanOrEqualTo(String value) {
addCriterion("remark >=", value, "remark");
return (Criteria) this;
}
public Criteria andRemarkLessThan(String value) {
addCriterion("remark <", value, "remark");
return (Criteria) this;
}
public Criteria andRemarkLessThanOrEqualTo(String value) {
addCriterion("remark <=", value, "remark");
return (Criteria) this;
}
public Criteria andRemarkLike(String value) {
addCriterion("remark like", value, "remark");
return (Criteria) this;
}
public Criteria andRemarkNotLike(String value) {
addCriterion("remark not like", value, "remark");
return (Criteria) this;
}
public Criteria andRemarkIn(List<String> values) {
addCriterion("remark in", values, "remark");
return (Criteria) this;
}
public Criteria andRemarkNotIn(List<String> values) {
addCriterion("remark not in", values, "remark");
return (Criteria) this;
}
public Criteria andRemarkBetween(String value1, String value2) {
addCriterion("remark between", value1, value2, "remark");
return (Criteria) this;
}
public Criteria andRemarkNotBetween(String value1, String value2) {
addCriterion("remark not between", value1, value2, "remark");
return (Criteria) this;
}
}
public static class Criteria extends GeneratedCriteria {
protected Criteria() {
super();
}
}
public static class Criterion {
private String condition;
private Object value;
private Object secondValue;
private boolean noValue;
private boolean singleValue;
private boolean betweenValue;
private boolean listValue;
private String typeHandler;
public String getCondition() {
return condition;
}
public Object getValue() {
return value;
}
public Object getSecondValue() {
return secondValue;
}
public boolean isNoValue() {
return noValue;
}
public boolean isSingleValue() {
return singleValue;
}
public boolean isBetweenValue() {
return betweenValue;
}
public boolean isListValue() {
return listValue;
}
public String getTypeHandler() {
return typeHandler;
}
protected Criterion(String condition) {
super();
this.condition = condition;
this.typeHandler = null;
this.noValue = true;
}
protected Criterion(String condition, Object value, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.typeHandler = typeHandler;
if (value instanceof List<?>) {
this.listValue = true;
} else {
this.singleValue = true;
}
}
protected Criterion(String condition, Object value) {
this(condition, value, null);
}
protected Criterion(String condition, Object value, Object secondValue, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.secondValue = secondValue;
this.typeHandler = typeHandler;
this.betweenValue = true;
}
protected Criterion(String condition, Object value, Object secondValue) {
this(condition, value, secondValue, null);
}
}
}
| |
/**
* Copyright 2012-2015 TIKAL-TECHNOLOGY
*
*Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.nekorp.workflow.desktop.view.resource.costo;
import java.lang.reflect.InvocationTargetException;
import java.util.LinkedList;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.swing.table.AbstractTableModel;
import org.apache.commons.beanutils.PropertyUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.reflect.MethodUtils;
import org.nekorp.workflow.desktop.servicio.RegistroCostoFactory;
import org.nekorp.workflow.desktop.servicio.binding.ValidacionBindable;
import org.nekorp.workflow.desktop.servicio.imp.ProxyUtil;
import org.nekorp.workflow.desktop.servicio.validacion.ValidacionBeanFactory;
import org.nekorp.workflow.desktop.servicio.validacion.imp.CampoObligatorioValidacion;
import org.nekorp.workflow.desktop.servicio.validacion.imp.ValidacionRangoInteger;
import org.nekorp.workflow.desktop.servicio.validacion.imp.ValidacionRangoMoneda;
import org.nekorp.workflow.desktop.view.binding.Bindable;
import org.nekorp.workflow.desktop.view.binding.BindingManager;
import org.nekorp.workflow.desktop.view.model.costo.RegistroCostoVB;
import org.nekorp.workflow.desktop.view.model.costo.RegistroOtrosGastosVB;
import org.nekorp.workflow.desktop.view.model.currency.MonedaVB;
import org.nekorp.workflow.desktop.view.model.servicio.GrupoCostoVB;
import org.nekorp.workflow.desktop.view.model.servicio.ServicioVB;
import org.nekorp.workflow.desktop.view.model.validacion.ValidacionRegistroCosto;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
/**
* @author Nekorp
*
*/
@Scope("prototype")
@Component("costoServicioTableModel")
public class CostoServicioTableModel extends AbstractTableModel implements Bindable {
private GrupoCostoVB grupo;
private String[] nombresColumas = new String[]{
"Tipo",
"Concepto",
"Cantidad",
"Precio Unitario",
"",
"IVA",
"Precio Cliente",
"Utilidad",
"Subtotal",
"",
"IVA"
};
@Autowired
private RegistroCostoFactory factory;
@Autowired
private BindingManager<Bindable> bindingManager;
@Autowired
private ProxyUtil proxyUtil;
@Autowired
@Qualifier(value="servicio")
private ServicioVB viewServicioModel;
private LinkedList<Object> ignore;
private List<RegistroCostoVB> datos;
private List<String> metodosGet;
private List<String> atributos;
private boolean editable;
@Autowired
private ValidacionRegistroCosto validacionRegistroCosto;
@Autowired
private ValidacionBeanFactory factoryValidacion;
private ValidacionBindable validacionConceptoBinding;
private ValidacionBindable validacionCantidadBinding;
private ValidacionBindable validacionPrecioUnitarioBinding;
public CostoServicioTableModel() {
this.ignore = new LinkedList<>();
this.datos = new LinkedList<>();
metodosGet = new LinkedList<>();
metodosGet.add("getSubtipo");
metodosGet.add("getConcepto");
metodosGet.add("getCantidad");
metodosGet.add("getPrecioUnitario");
metodosGet.add("isPrecioUnitarioConIVA");
metodosGet.add("getIvaPrecioUnitario");
metodosGet.add("getPrecioCliente");
metodosGet.add("getUtilidad");
metodosGet.add("getSubtotal");
metodosGet.add("isSubtotalConIVA");
metodosGet.add("getIvaSubtotal");
atributos = new LinkedList<>();
atributos.add("subtipo");
atributos.add("concepto");
atributos.add("cantidad");
atributos.add("precioUnitario");
atributos.add("precioUnitarioConIVA");
atributos.add("");
atributos.add("precioCliente");
atributos.add("");
atributos.add("");
atributos.add("subtotalConIVA");
atributos.add("");
}
@PostConstruct
public void inicializa() {
validacionConceptoBinding = new ValidacionBindable();
validacionConceptoBinding.setTarget(this.validacionRegistroCosto);
validacionConceptoBinding.setValidationResult("conceptoOk");
CampoObligatorioValidacion conceptoVld = new CampoObligatorioValidacion();
conceptoVld.setFailMessage("concepto obligatorio");
conceptoVld.setFactory(factoryValidacion);
validacionConceptoBinding.setValidador(conceptoVld);
validacionCantidadBinding = new ValidacionBindable();
validacionCantidadBinding.setTarget(this.validacionRegistroCosto);
validacionCantidadBinding.setValidationResult("cantidadOk");
ValidacionRangoInteger cantidadVld = new ValidacionRangoInteger();
cantidadVld.setMin(0);
cantidadVld.setIncMin(true);
cantidadVld.setOutOfRangeMessage("la cantidad debe ser mayor o igual a 0");
cantidadVld.setFactory(factoryValidacion);
validacionCantidadBinding.setValidador(cantidadVld);
validacionPrecioUnitarioBinding = new ValidacionBindable();
validacionPrecioUnitarioBinding.setTarget(this.validacionRegistroCosto);
validacionPrecioUnitarioBinding.setValidationResult("precioUnitarioOk");
ValidacionRangoMoneda precioUnitarioVld = new ValidacionRangoMoneda();
precioUnitarioVld.setMin(0d);
precioUnitarioVld.setIncMin(true);
precioUnitarioVld.setOutOfRangeMessage("el precio unitario debe ser mayor o igual a 0");
precioUnitarioVld.setFactory(factoryValidacion);
validacionPrecioUnitarioBinding.setValidador(precioUnitarioVld);
}
public void setEditable(boolean editable) {
this.editable = editable;
}
@Override
public String getColumnName(int column) {
return nombresColumas[column];
}
@Override
public Class<?> getColumnClass(int columnIndex) {
Class<?> r = MethodUtils.getAccessibleMethod(RegistroCostoVB.class,
metodosGet.get(columnIndex), new Class[]{}).getReturnType();
if (r.isPrimitive() && r.getName().equals("boolean")) {
return Boolean.class;
}
return r;
}
@Override
public boolean isCellEditable(int rowIndex, int columnIndex) {
if (!editable) {
return false;
}
if (columnIndex == 0 && this.datos.get(rowIndex) instanceof RegistroOtrosGastosVB) {
return false;
}
if (columnIndex == 6 ) {
if (StringUtils.equals("Insumo", this.datos.get(rowIndex).getSubtipo())) {
return false;
}
}
return !this.atributos.get(columnIndex).equals("");
}
@Override
public int getRowCount() {
return this.datos.size();
}
@Override
public int getColumnCount() {
return nombresColumas.length;
}
@Override
public Object getValueAt(int rowIndex, int columnIndex) {
try {
return MethodUtils.invokeMethod(this.datos.get(rowIndex), this.metodosGet.get(columnIndex), new Object[]{});
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ex) {
throw new IllegalArgumentException("Mal configurado el modelo de la tabla costos", ex);
}
}
@Override
public void setValueAt(Object value, int row, int col) {
//TODO esto es solo un parche
//encontrar por que esta pidiendo actualizar una columna que ya no existe
if (row >= this.datos.size()) {
return;
}
try {
if (col > 1) {
RegistroCostoVB dato = this.datos.get(row);
if (col == 2) {
if(value != null) {
PropertyUtils.setProperty(dato, this.atributos.get(col), value);
} else {
PropertyUtils.setProperty(dato, this.atributos.get(col), Integer.valueOf(0));
}
}
if (col == 3 || col == 6) {
PropertyUtils.setProperty(dato, this.atributos.get(col), MonedaVB.valueOf((String)value));
}
if (col == 4 || col == 9) {
PropertyUtils.setProperty(dato, this.atributos.get(col), value);
}
fireTableCellUpdated(row, col);
//columnas de utilidad 5 y subtotal 6
//to weak code
fireTableCellUpdated(row, 5);
fireTableCellUpdated(row, 7);
fireTableCellUpdated(row, 8);
fireTableCellUpdated(row, 10);
} else {
RegistroCostoVB dato = this.datos.get(row);
PropertyUtils.setProperty(dato, this.atributos.get(col), value);
fireTableCellUpdated(row, col);
}
} catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ex) {
throw new IllegalArgumentException("Mal configurado el modelo de la tabla costos", ex);
}
}
public void addRegistro(String tipo) {
RegistroCostoVB nuevo = factory.getRegistroCosto(tipo);
nuevo.setGrupo(this.grupo);
this.datos.add(nuevo);
for (String property: this.atributos) {
if (!property.equals("")) {
this.bindingManager.registerBind(nuevo, property, this);
}
}
List<RegistroCostoVB> costos = viewServicioModel.getCostos();
costos.add(nuevo);
viewServicioModel.setCostos(costos);
//this.fireTableRowsInserted(this.datos.size(), this.datos.size());
}
public void deleteRegistro(int index) {
RegistroCostoVB old = this.datos.remove(index);
for (String property: this.atributos) {
if (!property.equals("")) {
this.bindingManager.removeBind(old, property, this);
}
}
List<RegistroCostoVB> costos = viewServicioModel.getCostos();
costos.remove(old);
viewServicioModel.setCostos(costos);
this.fireTableRowsDeleted(index, index);
}
public void deleteAll() {
List<RegistroCostoVB> costos = viewServicioModel.getCostos();
for (RegistroCostoVB x: this.datos) {
for (String property: this.atributos) {
if (!property.equals("")) {
this.bindingManager.removeBind(x, property, this);
}
}
costos.remove(x);
}
viewServicioModel.setCostos(costos);
}
private int getIndexProxy(RegistroCostoVB origen) {
for (RegistroCostoVB proxy: this.datos) {
if (proxyUtil.getTarget(proxy) == origen) {
return this.datos.indexOf(proxy);
}
}
return -1;
}
public int getIndexOf(RegistroCostoVB origen) {
return this.getIndexProxy(origen);
}
@Override
public void updateModel(Object origen, String property, Object value) {
if(!ignore.remove(value)){
if (origen instanceof ServicioVB) {
this.bindingManager.clearBindings(this);
List<RegistroCostoVB> datosOrigen = (List<RegistroCostoVB>) value;
this.datos = new LinkedList<>();
for (RegistroCostoVB x: datosOrigen) {
if (x.getGrupo().equals(this.grupo)) {
this.datos.add(x);
for (String prp: this.atributos) {
if (!prp.equals("")) {
this.bindingManager.registerBind(x, prp, this);
}
}
}
}
if (datos.size() > 0) {
RegistroCostoVB ultimo = datos.get(datos.size() - 1);
bindingManager.clearBindings(validacionConceptoBinding);
bindingManager.clearBindings(validacionCantidadBinding);
bindingManager.clearBindings(validacionPrecioUnitarioBinding);
bindingManager.registerBind(ultimo, "concepto", validacionConceptoBinding);
bindingManager.registerBind(ultimo, "cantidad", validacionCantidadBinding);
bindingManager.registerBind(ultimo, "precioUnitario", validacionPrecioUnitarioBinding);
}
this.fireTableDataChanged();
}
if (origen instanceof RegistroCostoVB) {
//actualizar un solo elemento
int row = this.getIndexProxy((RegistroCostoVB)origen);
int col = this.atributos.indexOf(property);
fireTableCellUpdated(row, col);
if (col > 1) {
//columnas de utilidad 5 y subtotal 6
//to weak code
fireTableCellUpdated(row, 5);
fireTableCellUpdated(row, 7);
fireTableCellUpdated(row, 8);
fireTableCellUpdated(row, 10);
}
}
}
}
@Override
public void ignoreUpdate(Object value) {
this.ignore.add(value);
}
@Override
public Object getModelValue() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public void bindListener(Object target, String property) {
//esto se delega a los metodos de actualizacion de la tabla
}
public List<RegistroCostoVB> getDatos() {
return this.datos;
}
public void setGrupo(GrupoCostoVB grupo) {
this.grupo = grupo;
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.psi.impl.source;
import com.intellij.codeInsight.javadoc.JavaDocUtil;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleSettingsFacade;
import com.intellij.psi.filters.*;
import com.intellij.psi.filters.element.ModifierFilter;
import com.intellij.psi.impl.CheckUtil;
import com.intellij.psi.impl.DebugUtil;
import com.intellij.psi.impl.PsiImplUtil;
import com.intellij.psi.impl.source.resolve.ClassResolverProcessor;
import com.intellij.psi.impl.source.resolve.JavaResolveUtil;
import com.intellij.psi.impl.source.resolve.ResolveCache;
import com.intellij.psi.impl.source.resolve.VariableResolverProcessor;
import com.intellij.psi.impl.source.tree.*;
import com.intellij.psi.impl.source.tree.java.PsiReferenceExpressionImpl;
import com.intellij.psi.infos.CandidateInfo;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.scope.ElementClassFilter;
import com.intellij.psi.scope.PsiScopeProcessor;
import com.intellij.psi.scope.processor.FilterScopeProcessor;
import com.intellij.psi.scope.util.PsiScopesUtil;
import com.intellij.psi.tree.ChildRoleBase;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.ProcessingContext;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static com.intellij.patterns.PsiJavaPatterns.psiElement;
public class PsiJavaCodeReferenceElementImpl extends CompositePsiElement implements PsiAnnotatedJavaCodeReferenceElement, SourceJavaCodeReference {
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.PsiJavaCodeReferenceElementImpl");
private volatile String myCachedQName;
private volatile String myCachedNormalizedText;
private volatile Kind myKindWhenDummy = Kind.CLASS_NAME_KIND;
public enum Kind {
CLASS_NAME_KIND,
PACKAGE_NAME_KIND,
CLASS_OR_PACKAGE_NAME_KIND,
CLASS_FQ_NAME_KIND,
CLASS_FQ_OR_PACKAGE_NAME_KIND,
CLASS_IN_QUALIFIED_NEW_KIND,
}
@SuppressWarnings("AssignmentToStaticFieldFromInstanceMethod") private final int myHC = ourHC++;
public PsiJavaCodeReferenceElementImpl() {
super(JavaElementType.JAVA_CODE_REFERENCE);
}
@Override
public final int hashCode() {
return myHC;
}
@Override
public int getTextOffset() {
final ASTNode refName = getReferenceNameNode();
return refName != null ? refName.getStartOffset() : super.getTextOffset();
}
public void setKindWhenDummy(@NotNull Kind kind) {
IElementType type = getTreeParent().getElementType();
LOG.assertTrue(isDummy(type), type);
myKindWhenDummy = kind;
}
private static boolean isDummy(final IElementType type) {
return type == TokenType.DUMMY_HOLDER || type == JavaElementType.DUMMY_ELEMENT;
}
@NotNull
public Kind getKindEnum(@NotNull PsiFile containingFile) {
if (!containingFile.isValid()) { // optimization to avoid relatively expensive this.isValid check
// but still provide diagnostics for this element and not its containing DummyHolder file
PsiUtilCore.ensureValid(this);
}
CompositeElement treeParent = getTreeParent();
IElementType i = treeParent.getElementType();
if (isDummy(i)) {
return myKindWhenDummy;
}
if (i == JavaElementType.TYPE) {
return treeParent.getTreeParent().getPsi() instanceof PsiTypeCodeFragment ? Kind.CLASS_OR_PACKAGE_NAME_KIND : Kind.CLASS_NAME_KIND;
}
if (i == JavaElementType.EXTENDS_LIST ||
i == JavaElementType.IMPLEMENTS_LIST ||
i == JavaElementType.EXTENDS_BOUND_LIST ||
i == JavaElementType.THROWS_LIST ||
i == JavaElementType.THIS_EXPRESSION ||
i == JavaElementType.SUPER_EXPRESSION ||
i == JavaDocElementType.DOC_METHOD_OR_FIELD_REF ||
i == JavaDocElementType.DOC_TAG_VALUE_ELEMENT ||
i == JavaElementType.REFERENCE_PARAMETER_LIST ||
i == JavaElementType.ANNOTATION ||
i == JavaElementType.USES_STATEMENT ||
i == JavaElementType.PROVIDES_STATEMENT ||
i == JavaElementType.PROVIDES_WITH_LIST) {
return isQualified() ? Kind.CLASS_OR_PACKAGE_NAME_KIND : Kind.CLASS_NAME_KIND;
}
if (i == JavaElementType.NEW_EXPRESSION) {
final ASTNode qualifier = treeParent.findChildByRole(ChildRole.QUALIFIER);
return qualifier != null ? Kind.CLASS_IN_QUALIFIED_NEW_KIND : Kind.CLASS_NAME_KIND;
}
if (i == JavaElementType.ANONYMOUS_CLASS) {
if (treeParent.getChildRole(this) == ChildRole.BASE_CLASS_REFERENCE) {
CompositeElement granny = treeParent.getTreeParent();
IElementType gType = granny.getElementType();
LOG.assertTrue(gType == JavaElementType.NEW_EXPRESSION, gType);
final ASTNode qualifier = granny.findChildByRole(ChildRole.QUALIFIER);
return qualifier != null ? Kind.CLASS_IN_QUALIFIED_NEW_KIND : Kind.CLASS_NAME_KIND;
}
else {
return Kind.CLASS_OR_PACKAGE_NAME_KIND; // incomplete code
}
}
if (i == JavaElementType.PACKAGE_STATEMENT || i == JavaElementType.EXPORTS_STATEMENT || i == JavaElementType.OPENS_STATEMENT) {
return Kind.PACKAGE_NAME_KIND;
}
if (i == JavaElementType.IMPORT_STATEMENT) {
boolean isOnDemand = SourceTreeToPsiMap.<PsiImportStatement>treeToPsiNotNull(treeParent).isOnDemand();
return isOnDemand ? Kind.CLASS_FQ_OR_PACKAGE_NAME_KIND : Kind.CLASS_FQ_NAME_KIND;
}
if (i == JavaElementType.IMPORT_STATIC_STATEMENT) {
return Kind.CLASS_FQ_OR_PACKAGE_NAME_KIND;
}
if (i == JavaElementType.JAVA_CODE_REFERENCE) {
Kind parentKind = ((PsiJavaCodeReferenceElementImpl)treeParent).getKindEnum(containingFile);
if (parentKind == Kind.CLASS_NAME_KIND) {
return Kind.CLASS_OR_PACKAGE_NAME_KIND;
}
if (parentKind == Kind.CLASS_FQ_NAME_KIND) {
return Kind.CLASS_FQ_OR_PACKAGE_NAME_KIND;
}
return parentKind;
}
if (i == JavaElementType.CLASS || i == JavaElementType.PARAMETER_LIST || i == TokenType.ERROR_ELEMENT) {
return Kind.CLASS_OR_PACKAGE_NAME_KIND;
}
if (i == JavaElementType.IMPORT_STATIC_REFERENCE) {
return Kind.CLASS_FQ_OR_PACKAGE_NAME_KIND;
}
if (i == JavaDocElementType.DOC_TAG ||
i == JavaDocElementType.DOC_INLINE_TAG ||
i == JavaDocElementType.DOC_REFERENCE_HOLDER ||
i == JavaDocElementType.DOC_TYPE_HOLDER) {
PsiDocComment docComment = PsiTreeUtil.getParentOfType(this, PsiDocComment.class);
if (JavaDocUtil.isInsidePackageInfo(docComment)) {
return Kind.CLASS_FQ_OR_PACKAGE_NAME_KIND;
}
return Kind.CLASS_OR_PACKAGE_NAME_KIND;
}
if (isCodeFragmentType(i)) {
PsiJavaCodeReferenceCodeFragment fragment = (PsiJavaCodeReferenceCodeFragment)treeParent.getPsi();
return fragment.isClassesAccepted() ? Kind.CLASS_FQ_OR_PACKAGE_NAME_KIND : Kind.PACKAGE_NAME_KIND;
}
diagnoseUnknownParent();
return Kind.CLASS_NAME_KIND;
}
/**
* @deprecated Use {@link #getKindEnum(PsiFile)} instead
*/
@Deprecated
public int getKind(@NotNull PsiFile containingFile) {
return getKindEnum(containingFile).ordinal() + 1;
}
private void diagnoseUnknownParent() {
CompositeElement parent = getTreeParent();
IElementType i = parent.getElementType();
StringBuilder msg = new StringBuilder("Unknown parent for java code reference: '").append(parent).append("'; Type: ").append(i).append(";\n");
while (parent != null && parent.getPsi() instanceof PsiExpression) {
parent = parent.getTreeParent();
msg.append(" Parent: '").append(parent).append("'; \n");
}
if (parent != null) {
msg.append(DebugUtil.treeToString(parent, false));
}
LOG.error(msg.toString());
}
private static boolean isCodeFragmentType(IElementType type) {
return type == TokenType.CODE_FRAGMENT || type instanceof ICodeFragmentElementType;
}
@Override
public void deleteChildInternal(@NotNull ASTNode child) {
if (getChildRole(child) == ChildRole.QUALIFIER) {
ASTNode dot = findChildByType(JavaTokenType.DOT, child);
assert dot != null : this;
deleteChildRange(child.getPsi(), dot.getPsi());
ASTNode ref = findChildByRole(ChildRole.REFERENCE_NAME);
assert ref != null : this;
PsiElement lastChild = ref.getPsi().getPrevSibling();
if (lastChild != null) {
PsiElement modifierList = PsiImplUtil.findNeighbourModifierList(this);
if (modifierList != null) {
modifierList.addRange(getFirstChild(), lastChild);
}
else {
getParent().addRangeBefore(getFirstChild(), lastChild, this);
}
// during previous operations, formatter support could have altered the children (if they're whitespace),
// so we retrieve and check them again
if (ref != getFirstChild()) {
deleteChildRange(getFirstChild(), ref.getPsi().getPrevSibling());
}
}
}
else if (child.getElementType() == JavaElementType.REFERENCE_PARAMETER_LIST) {
replaceChildInternal(child, PsiReferenceExpressionImpl.createEmptyRefParameterList(getProject()));
}
else {
super.deleteChildInternal(child);
}
}
@Override
public final ASTNode findChildByRole(final int role) {
LOG.assertTrue(ChildRole.isUnique(role), role);
switch (role) {
case ChildRole.REFERENCE_NAME:
return TreeUtil.findChildBackward(this, JavaTokenType.IDENTIFIER);
case ChildRole.REFERENCE_PARAMETER_LIST:
TreeElement lastChild = getLastChildNode();
return lastChild.getElementType() == JavaElementType.REFERENCE_PARAMETER_LIST ? lastChild : null;
case ChildRole.QUALIFIER:
return findChildByType(JavaElementType.JAVA_CODE_REFERENCE);
case ChildRole.DOT:
return findChildByType(JavaTokenType.DOT);
}
return null;
}
@Override
public final int getChildRole(@NotNull final ASTNode child) {
LOG.assertTrue(child.getTreeParent() == this);
final IElementType i = child.getElementType();
if (i == JavaElementType.REFERENCE_PARAMETER_LIST) {
return ChildRole.REFERENCE_PARAMETER_LIST;
}
if (i == JavaElementType.JAVA_CODE_REFERENCE) {
return ChildRole.QUALIFIER;
}
if (i == JavaTokenType.DOT) {
return ChildRole.DOT;
}
if (i == JavaTokenType.IDENTIFIER) {
return ChildRole.REFERENCE_NAME;
}
return ChildRoleBase.NONE;
}
@Override
@NotNull
public String getCanonicalText() {
return getCanonicalText(false, null, getContainingFile());
}
@NotNull
@Override
public String getCanonicalText(boolean annotated, @Nullable PsiAnnotation[] annotations) {
return getCanonicalText(annotated, annotations, getContainingFile());
}
@NotNull
private String getCanonicalText(boolean annotated, @Nullable PsiAnnotation[] annotations, @NotNull PsiFile containingFile) {
Kind kind = getKindEnum(containingFile);
switch (kind) {
case CLASS_NAME_KIND:
case CLASS_OR_PACKAGE_NAME_KIND:
case CLASS_IN_QUALIFIED_NEW_KIND:
JavaResolveResult[] results = PsiImplUtil.multiResolveImpl(containingFile.getProject(), containingFile, this, false, OurGenericsResolver.INSTANCE);
PsiElement target = results.length == 1 ? results[0].getElement() : null;
if (target instanceof PsiClass) {
StringBuilder buffer = new StringBuilder();
PsiClass aClass = (PsiClass)target;
PsiElement qualifier = getQualifier();
String prefix = null;
if (qualifier instanceof PsiJavaCodeReferenceElementImpl) {
prefix = ((PsiJavaCodeReferenceElementImpl)qualifier).getCanonicalText(annotated, annotations, containingFile);
annotations = null;
}
else {
String fqn = aClass.getQualifiedName();
if (fqn != null) {
prefix = StringUtil.getPackageName(fqn);
}
}
if (!StringUtil.isEmpty(prefix)) {
buffer.append(prefix);
buffer.append('.');
}
if (annotated) {
List<PsiAnnotation> list = annotations != null ? Arrays.asList(annotations) : getAnnotations();
PsiNameHelper.appendAnnotations(buffer, list, true);
}
buffer.append(aClass.getName());
PsiNameHelper.appendTypeArgs(buffer, getTypeParameters(), true, annotated);
return buffer.toString();
}
else if (target instanceof PsiPackage) {
return ((PsiPackage)target).getQualifiedName();
}
else {
LOG.assertTrue(target == null, target);
return getNormalizedText();
}
case PACKAGE_NAME_KIND:
case CLASS_FQ_NAME_KIND:
case CLASS_FQ_OR_PACKAGE_NAME_KIND:
return getNormalizedText();
default:
LOG.error(kind);
return null;
}
}
@Override
public PsiReference getReference() {
return this;
}
@Override
public final PsiElement resolve() {
return advancedResolve(false).getElement();
}
@NotNull
public static TextRange calcRangeInElement(CompositePsiElement refElement) {
TreeElement nameChild = (TreeElement)refElement.findChildByRole(ChildRole.REFERENCE_NAME);
if (nameChild == null) {
TreeElement dot = (TreeElement)refElement.findChildByRole(ChildRole.DOT);
if (dot == null) {
throw new IllegalStateException(refElement.toString());
}
return TextRange.from(dot.getStartOffsetInParent() + dot.getTextLength(), 0);
}
return TextRange.from(nameChild.getStartOffsetInParent(), nameChild.getTextLength());
}
private static final class OurGenericsResolver implements ResolveCache.PolyVariantContextResolver<PsiJavaReference> {
private static final OurGenericsResolver INSTANCE = new OurGenericsResolver();
@NotNull
@Override
public ResolveResult[] resolve(@NotNull PsiJavaReference ref, @NotNull PsiFile containingFile, boolean incompleteCode) {
PsiJavaCodeReferenceElementImpl referenceElement = (PsiJavaCodeReferenceElementImpl)ref;
Kind kind = referenceElement.getKindEnum(containingFile);
JavaResolveResult[] result = referenceElement.resolve(kind, containingFile);
if (incompleteCode && result.length == 0 && kind != Kind.CLASS_FQ_NAME_KIND && kind != Kind.CLASS_FQ_OR_PACKAGE_NAME_KIND) {
VariableResolverProcessor processor = new VariableResolverProcessor(referenceElement, containingFile);
PsiScopesUtil.resolveAndWalk(processor, referenceElement, null, true);
result = processor.getResult();
if (result.length == 0 && kind == Kind.CLASS_NAME_KIND) {
result = referenceElement.resolve(Kind.PACKAGE_NAME_KIND, containingFile);
}
}
if (result.length == 0 && (kind == Kind.CLASS_OR_PACKAGE_NAME_KIND || kind == Kind.CLASS_NAME_KIND)) {
String qualifiedName = referenceElement.getClassNameText();
if (qualifiedName != null) {
result = tryClassResult(qualifiedName, referenceElement, result);
}
}
JavaResolveUtil.substituteResults(referenceElement, result);
return result;
}
}
public static JavaResolveResult[] tryClassResult(String qualifiedName, PsiElement referenceElement, JavaResolveResult[] result) {
String packageName = StringUtil.getPackageName(qualifiedName);
Project project = referenceElement.getProject();
if (!StringUtil.isEmptyOrSpaces(packageName)) {
PsiClass referencedClass = PsiResolveHelper.SERVICE.getInstance(project).resolveReferencedClass(packageName, referenceElement);
//class is always preferred to package => when such a class exists, the qualified name can point to inner class only and that check must already have been failed
if (referencedClass != null) {
return result;
}
PsiClass aClass = JavaPsiFacade.getInstance(project).findClass(qualifiedName, referenceElement.getResolveScope());
if (aClass != null) {
result = new JavaResolveResult[] {new CandidateInfo(aClass, PsiSubstitutor.EMPTY, referenceElement, false)};
}
}
return result;
}
@Override
@NotNull
public JavaResolveResult advancedResolve(final boolean incompleteCode) {
final JavaResolveResult[] results = multiResolve(incompleteCode);
return results.length == 1 ? results[0] : JavaResolveResult.EMPTY;
}
@Override
@NotNull
public JavaResolveResult[] multiResolve(boolean incompleteCode) {
return PsiImplUtil.multiResolveImpl(this, incompleteCode, OurGenericsResolver.INSTANCE);
}
@NotNull
private PsiSubstitutor updateSubstitutor(@NotNull PsiClass psiClass) {
@NotNull PsiSubstitutor subst = PsiSubstitutor.EMPTY;
final PsiType[] parameters = getTypeParameters();
subst = subst.putAll(psiClass, parameters);
return subst;
}
@NotNull
private JavaResolveResult[] resolve(@NotNull Kind kind, @NotNull PsiFile containingFile) {
ProgressManager.checkCanceled();
switch (kind) {
case CLASS_FQ_NAME_KIND:
String text = getNormalizedText();
if (!StringUtil.isEmptyOrSpaces(text)) {
PsiClass aClass = JavaPsiFacade.getInstance(containingFile.getProject()).findClass(text, getResolveScope());
if (aClass != null) {
return new JavaResolveResult[]{new CandidateInfo(aClass, updateSubstitutor(aClass), this, false)};
}
}
return JavaResolveResult.EMPTY_ARRAY;
case CLASS_IN_QUALIFIED_NEW_KIND: {
PsiElement parent = getParent();
if (parent instanceof JavaDummyHolder) {
parent = parent.getContext();
}
if (parent instanceof PsiAnonymousClass) {
parent = parent.getParent();
}
final PsiExpression qualifier;
if (parent instanceof PsiNewExpression) {
qualifier = ((PsiNewExpression)parent).getQualifier();
LOG.assertTrue(qualifier != null);
}
else if (parent instanceof PsiJavaCodeReferenceElement) {
return JavaResolveResult.EMPTY_ARRAY;
}
else {
LOG.error("Invalid java reference: "+ parent);
return JavaResolveResult.EMPTY_ARRAY;
}
final PsiType qualifierType = qualifier.getType();
if (qualifierType == null) return JavaResolveResult.EMPTY_ARRAY;
if (!(qualifierType instanceof PsiClassType)) return JavaResolveResult.EMPTY_ARRAY;
final JavaResolveResult result = PsiUtil.resolveGenericsClassInType(qualifierType);
final PsiElement resultElement = result.getElement();
if (resultElement == null) return JavaResolveResult.EMPTY_ARRAY;
final PsiElement classNameElement = getReferenceNameElement();
if (!(classNameElement instanceof PsiIdentifier)) return JavaResolveResult.EMPTY_ARRAY;
final String className = classNameElement.getText();
final ClassResolverProcessor processor = new ClassResolverProcessor(className, this, containingFile);
resultElement.processDeclarations(processor, ResolveState.initial().put(PsiSubstitutor.KEY, result.getSubstitutor()), this, this);
return processor.getResult();
}
case CLASS_NAME_KIND:
final PsiElement classNameElement = getReferenceNameElement();
if (!(classNameElement instanceof PsiIdentifier)) return JavaResolveResult.EMPTY_ARRAY;
final String className = classNameElement.getText();
final ClassResolverProcessor processor = new ClassResolverProcessor(className, this, containingFile);
PsiScopesUtil.resolveAndWalk(processor, this, null);
return processor.getResult();
case PACKAGE_NAME_KIND:
String packageName = getNormalizedText();
Project project = getManager().getProject();
PsiPackage aPackage = JavaPsiFacade.getInstance(project).findPackage(packageName);
if (aPackage != null && aPackage.isValid()) {
return new JavaResolveResult[]{new CandidateInfo(aPackage, PsiSubstitutor.EMPTY, this, false)};
}
else if (JavaPsiFacade.getInstance(project).isPartOfPackagePrefix(packageName)) {
return CandidateInfo.RESOLVE_RESULT_FOR_PACKAGE_PREFIX_PACKAGE;
}
else {
return JavaResolveResult.EMPTY_ARRAY;
}
case CLASS_FQ_OR_PACKAGE_NAME_KIND:
case CLASS_OR_PACKAGE_NAME_KIND:
Kind classKind = kind == Kind.CLASS_OR_PACKAGE_NAME_KIND ? Kind.CLASS_NAME_KIND : Kind.CLASS_FQ_NAME_KIND;
JavaResolveResult[] result;
// A single-type-import declaration D in a compilation unit C of package P
// that imports a type named N shadows, throughout C, the declarations of
// ... any top level type named N declared in another compilation unit of P.
if (PsiTreeUtil.getParentOfType(this, PsiImportStatement.class) != null) {
result = resolve(Kind.PACKAGE_NAME_KIND, containingFile);
if (result.length == 0) {
result = resolve(classKind, containingFile);
}
}
else {
result = resolve(classKind, containingFile);
if (result.length == 1 && !result[0].isAccessible()) {
JavaResolveResult[] packageResult = resolve(Kind.PACKAGE_NAME_KIND, containingFile);
if (packageResult.length != 0) {
result = packageResult;
}
}
else if (result.length == 0) {
result = resolve(Kind.PACKAGE_NAME_KIND, containingFile);
}
}
return result;
}
LOG.error(this);
return JavaResolveResult.EMPTY_ARRAY;
}
@Override
public final PsiElement handleElementRename(@NotNull final String newElementName) throws IncorrectOperationException {
final PsiElement oldIdentifier = getReferenceNameElement();
if (oldIdentifier == null) {
throw new IncorrectOperationException();
}
final PsiElement identifier = JavaPsiFacade.getInstance(getProject()).getElementFactory().createIdentifier(newElementName);
oldIdentifier.replace(identifier);
return this;
}
@Override
public PsiElement bindToElement(@NotNull final PsiElement element) throws IncorrectOperationException {
PsiFile containingFile = getContainingFile();
CheckUtil.checkWritable(containingFile);
if (isReferenceTo(element)) return this;
Kind kind = getKindEnum(containingFile);
switch (kind) {
case CLASS_NAME_KIND:
case CLASS_FQ_NAME_KIND:
if (!(element instanceof PsiClass)) {
throw cannotBindError(element, kind);
}
return bindToClass((PsiClass)element, containingFile);
case PACKAGE_NAME_KIND:
if (!(element instanceof PsiPackage)) {
throw cannotBindError(element, kind);
}
return bindToPackage((PsiPackage)element);
case CLASS_OR_PACKAGE_NAME_KIND:
case CLASS_FQ_OR_PACKAGE_NAME_KIND:
if (element instanceof PsiClass) {
return bindToClass((PsiClass)element, containingFile);
}
else if (element instanceof PsiPackage) {
return bindToPackage((PsiPackage)element);
}
else {
throw cannotBindError(element, kind);
}
case CLASS_IN_QUALIFIED_NEW_KIND:
if (element instanceof PsiClass) {
final PsiClass aClass = (PsiClass)element;
final String name = aClass.getName();
if (name == null) {
throw new IncorrectOperationException(aClass.toString());
}
final PsiJavaParserFacade parserFacade = JavaPsiFacade.getInstance(containingFile.getProject()).getParserFacade();
final PsiJavaCodeReferenceElement ref = parserFacade.createReferenceFromText(name, getParent());
getTreeParent().replaceChildInternal(this, (TreeElement)ref.getNode());
return ref;
}
else {
throw cannotBindError(element, kind);
}
default:
LOG.error(kind);
return null;
}
}
@NotNull
private static IncorrectOperationException cannotBindError(@NotNull PsiElement element, @NotNull Kind kind) {
return new IncorrectOperationException("Cannot bind to " + element+" of kind: "+kind);
}
private PsiElement bindToClass(@NotNull PsiClass aClass, @NotNull PsiFile containingFile) throws IncorrectOperationException {
String qName = aClass.getQualifiedName();
Project project = containingFile.getProject();
boolean preserveQualification = JavaCodeStyleSettingsFacade.getInstance(project).useFQClassNames() && isFullyQualified(containingFile);
JavaPsiFacade facade = JavaPsiFacade.getInstance(project);
if (qName == null) {
qName = aClass.getName();
assert qName != null : aClass;
PsiClass psiClass = facade.getResolveHelper().resolveReferencedClass(qName, this);
if (!getManager().areElementsEquivalent(psiClass, aClass)) {
throw cannotBindError(aClass, getKindEnum(containingFile));
}
}
else if (facade.findClass(qName, getResolveScope()) == null && !preserveQualification) {
return this;
}
else if (facade.getResolveHelper().resolveReferencedClass(qName, this) == null &&
facade.getResolveHelper().resolveReferencedClass(StringUtil.getPackageName(qName), this) != null) {
qName = aClass.getName();
assert qName != null : aClass;
}
StringBuilder text = new StringBuilder(qName);
PsiReferenceParameterList parameterList = getParameterList();
if (parameterList != null) {
PsiElement cur = getReferenceNameElement();
while (cur != parameterList) {
assert cur != null : getText();
cur = cur.getNextSibling();
text.append(cur.getText());
}
}
PsiJavaCodeReferenceElement ref;
try {
ref = facade.getParserFacade().createReferenceFromText(text.toString(), getParent());
}
catch (IncorrectOperationException e) {
throw new IncorrectOperationException(e.getMessage() + " [qname=" + qName + " class=" + aClass + ";" + aClass.getClass().getName() + "]");
}
PsiReferenceParameterList refParameterList = ref.getParameterList();
if (parameterList != null && refParameterList != null) {
refParameterList.replace(parameterList);
}
getTreeParent().replaceChildInternal(this, (TreeElement)ref.getNode());
if (!preserveQualification) {
JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(project);
ref = (PsiJavaCodeReferenceElement)codeStyleManager.shortenClassReferences(ref, JavaCodeStyleManager.INCOMPLETE_CODE);
}
return ref;
}
@NotNull
private List<PsiAnnotation> getAnnotations() {
List<PsiAnnotation> annotations = PsiTreeUtil.getChildrenOfTypeAsList(this, PsiAnnotation.class);
if (!isQualified()) {
PsiModifierList modifierList = PsiImplUtil.findNeighbourModifierList(this);
if (modifierList != null) {
annotations = new ArrayList<>(annotations);
PsiImplUtil.collectTypeUseAnnotations(modifierList, annotations);
}
}
return annotations;
}
private boolean isFullyQualified(@NotNull PsiFile containingFile) {
Kind kind = getKindEnum(containingFile);
switch (kind) {
case CLASS_OR_PACKAGE_NAME_KIND:
if (resolve() instanceof PsiPackage) return true;
break;
case CLASS_NAME_KIND:
case CLASS_IN_QUALIFIED_NEW_KIND:
break;
case PACKAGE_NAME_KIND:
case CLASS_FQ_NAME_KIND:
case CLASS_FQ_OR_PACKAGE_NAME_KIND:
return true;
default:
LOG.error(kind);
return true;
}
ASTNode qualifier = findChildByRole(ChildRole.QUALIFIER);
if (qualifier == null) return false;
IElementType qualifierElementType = qualifier.getElementType();
LOG.assertTrue(qualifierElementType == JavaElementType.JAVA_CODE_REFERENCE, qualifierElementType);
PsiElement refElement = SourceTreeToPsiMap.<PsiJavaCodeReferenceElement>treeToPsiNotNull(qualifier).resolve();
if (refElement instanceof PsiPackage) return true;
return SourceTreeToPsiMap.<PsiJavaCodeReferenceElementImpl>treeToPsiNotNull(qualifier).isFullyQualified(containingFile);
}
private PsiElement bindToPackage(@NotNull PsiPackage aPackage) throws IncorrectOperationException {
final String qName = aPackage.getQualifiedName();
if (qName.isEmpty()) {
throw new IncorrectOperationException("Cannot bind to default package: "+aPackage);
}
final PsiJavaParserFacade parserFacade = JavaPsiFacade.getInstance(getProject()).getParserFacade();
final PsiJavaCodeReferenceElement ref = parserFacade.createReferenceFromText(qName, getParent());
getTreeParent().replaceChildInternal(this, (TreeElement)ref.getNode());
return ref;
}
@Override
public boolean isReferenceTo(@NotNull final PsiElement element) {
PsiFile containingFile = getContainingFile();
return isReferenceTo(element, containingFile);
}
private boolean isReferenceTo(PsiElement element, @NotNull PsiFile containingFile) {
Kind kind = getKindEnum(containingFile);
switch (kind) {
case CLASS_NAME_KIND:
case CLASS_IN_QUALIFIED_NEW_KIND:
if (!(element instanceof PsiClass)) return false;
break;
case CLASS_FQ_NAME_KIND: {
if (!(element instanceof PsiClass)) return false;
final String qName = ((PsiClass)element).getQualifiedName();
return qName != null && qName.equals(getCanonicalText(false, null, containingFile));
}
case PACKAGE_NAME_KIND: {
if (!(element instanceof PsiPackage)) return false;
final String qName = ((PsiPackage)element).getQualifiedName();
return qName.equals(getCanonicalText(false, null, containingFile));
}
case CLASS_OR_PACKAGE_NAME_KIND:
if (element instanceof PsiPackage) {
final String qName = ((PsiPackage)element).getQualifiedName();
return qName.equals(getCanonicalText(false, null, containingFile));
}
if (element instanceof PsiClass) {
final PsiElement nameElement = getReferenceNameElement();
if (nameElement == null) return false;
final String name = ((PsiClass)element).getName();
if (name == null) return false;
return nameElement.textMatches(name) && containingFile.getManager().areElementsEquivalent(resolve(), element);
}
return false;
case CLASS_FQ_OR_PACKAGE_NAME_KIND:
if (element instanceof PsiClass) {
final String qName = ((PsiClass)element).getQualifiedName();
if (qName != null && qName.equals(getCanonicalText(false, null, containingFile))) {
return !PsiUtil.isFromDefaultPackage((PsiClass)element) ||
PsiTreeUtil.getParentOfType(this, PsiImportStatementBase.class) == null;
}
}
if (element instanceof PsiPackage) {
final String qName = ((PsiPackage)element).getQualifiedName();
return qName.equals(getCanonicalText(false, null, containingFile));
}
return false;
default:
LOG.error(kind);
return true;
}
final ASTNode referenceNameElement = getReferenceNameNode();
if (referenceNameElement == null || referenceNameElement.getElementType() != JavaTokenType.IDENTIFIER) return false;
final String name = ((PsiClass)element).getName();
return name != null && referenceNameElement.getText().equals(name) && containingFile.getManager().areElementsEquivalent(resolve(), element);
}
private String getNormalizedText() {
String whiteSpaceAndComments = myCachedNormalizedText;
if (whiteSpaceAndComments == null) {
myCachedNormalizedText = whiteSpaceAndComments = JavaSourceUtil.getReferenceText(this);
}
return whiteSpaceAndComments;
}
@Override
public String getClassNameText() {
String cachedQName = myCachedQName;
if (cachedQName == null) {
myCachedQName = cachedQName = PsiNameHelper.getQualifiedClassName(getNormalizedText(), false);
}
return cachedQName;
}
@Override
public void fullyQualify(@NotNull final PsiClass targetClass) {
final Kind kind = getKindEnum(getContainingFile());
if (kind != Kind.CLASS_NAME_KIND && kind != Kind.CLASS_OR_PACKAGE_NAME_KIND && kind != Kind.CLASS_IN_QUALIFIED_NEW_KIND) {
LOG.error("Wrong kind " + kind);
return;
}
JavaSourceUtil.fullyQualifyReference(this, targetClass);
}
@Override
public boolean isQualified() {
return getQualifier() != null;
}
@Override
public PsiElement getQualifier() {
return SourceTreeToPsiMap.treeElementToPsi(findChildByRole(ChildRole.QUALIFIER));
}
@Override
public void clearCaches() {
super.clearCaches();
myCachedQName = null;
myCachedNormalizedText = null;
}
@Override
@NotNull
public Object[] getVariants() {
final ElementFilter filter;
switch (getKindEnum(getContainingFile())) {
case CLASS_OR_PACKAGE_NAME_KIND:
filter = new OrFilter(ElementClassFilter.CLASS, ElementClassFilter.PACKAGE);
break;
case CLASS_NAME_KIND:
filter = ElementClassFilter.CLASS;
break;
case PACKAGE_NAME_KIND:
filter = ElementClassFilter.PACKAGE;
break;
case CLASS_FQ_NAME_KIND:
case CLASS_FQ_OR_PACKAGE_NAME_KIND:
filter = isQualified() ? new OrFilter(ElementClassFilter.CLASS, ElementClassFilter.PACKAGE) : ElementClassFilter.PACKAGE;
break;
case CLASS_IN_QUALIFIED_NEW_KIND:
filter = ElementClassFilter.CLASS;
break;
default:
throw new RuntimeException("Unknown reference type");
}
return PsiImplUtil.getReferenceVariantsByFilter(this, filter);
}
@Override
public boolean isSoft() {
return false;
}
@Override
public void processVariants(@NotNull PsiScopeProcessor processor) {
List<ElementFilter> filters = new ArrayList<>();
if (isInCode() && !(getParent() instanceof PsiImportStatement) && !(getParent() instanceof PsiReferenceList)) {
filters.add(new AndFilter(ElementClassFilter.METHOD, new NotFilter(new ConstructorFilter())));
filters.add(ElementClassFilter.VARIABLE);
}
switch (getKindEnum(getContainingFile())) {
case CLASS_OR_PACKAGE_NAME_KIND:
filters.add(ElementClassFilter.CLASS);
filters.add(ElementClassFilter.PACKAGE);
break;
case CLASS_NAME_KIND:
filters.add(ElementClassFilter.CLASS);
if (isQualified() || PsiTreeUtil.getParentOfType(this, PsiJavaModule.class) != null) {
filters.add(ElementClassFilter.PACKAGE);
}
break;
case PACKAGE_NAME_KIND:
filters.add(ElementClassFilter.PACKAGE);
break;
case CLASS_FQ_NAME_KIND:
case CLASS_FQ_OR_PACKAGE_NAME_KIND:
filters.add(ElementClassFilter.PACKAGE);
if (isQualified()) {
filters.add(ElementClassFilter.CLASS);
}
break;
case CLASS_IN_QUALIFIED_NEW_KIND:
PsiElement parent = getParent();
if (parent instanceof PsiNewExpression) {
PsiExpression qualifier = ((PsiNewExpression)parent).getQualifier();
assert qualifier != null : parent;
PsiType type = qualifier.getType();
PsiClass aClass = PsiUtil.resolveClassInType(type);
if (aClass != null) {
AndFilter filter = new AndFilter(ElementClassFilter.CLASS, new ModifierFilter(PsiModifier.STATIC, false));
aClass.processDeclarations(new FilterScopeProcessor(filter, processor), ResolveState.initial(), null, this);
}
}
return;
default:
throw new RuntimeException("Unknown reference type");
}
OrFilter filter = new OrFilter(filters.toArray(ElementFilter.EMPTY_ARRAY));
FilterScopeProcessor proc = new FilterScopeProcessor(filter, processor);
for (PsiTypeParameter typeParameter : getUnfinishedMethodTypeParameters()) {
if (!proc.execute(typeParameter, ResolveState.initial())) {
return;
}
}
PsiScopesUtil.resolveAndWalk(proc, this, null, true);
}
private PsiTypeParameter[] getUnfinishedMethodTypeParameters() {
ProcessingContext context = new ProcessingContext();
if (psiElement().inside(
psiElement(PsiTypeElement.class).afterLeaf(
psiElement().withText(">").withParent(
psiElement(PsiTypeParameterList.class).withParent(PsiErrorElement.class).save("typeParameterList")))).accepts(this, context)) {
PsiTypeParameterList list = (PsiTypeParameterList)context.get("typeParameterList");
PsiElement current = list.getParent().getParent();
if (current instanceof PsiField) {
current = current.getParent();
}
if (current instanceof PsiClass) {
return list.getTypeParameters();
}
}
return PsiTypeParameter.EMPTY_ARRAY;
}
private boolean isInCode() {
if (isCodeFragmentType(getTreeParent().getElementType()) || getParent() instanceof PsiAnnotation) {
return false;
}
PsiElement superParent = getParent();
while (superParent != null) {
if (superParent instanceof PsiCodeBlock || superParent instanceof PsiLocalVariable) {
return true;
}
if (superParent instanceof PsiClass || superParent instanceof PsiCatchSection) {
return false;
}
superParent = superParent.getParent();
}
return false;
}
@Override
public PsiElement getReferenceNameElement() {
return SourceTreeToPsiMap.treeElementToPsi(getReferenceNameNode());
}
@Nullable
private ASTNode getReferenceNameNode() {
return findChildByRole(ChildRole.REFERENCE_NAME);
}
@Override
public PsiReferenceParameterList getParameterList() {
return (PsiReferenceParameterList)findChildByRoleAsPsiElement(ChildRole.REFERENCE_PARAMETER_LIST);
}
@Override
public String getQualifiedName() {
Kind kind = getKindEnum(getContainingFile());
switch (kind) {
case CLASS_NAME_KIND:
case CLASS_OR_PACKAGE_NAME_KIND:
case CLASS_IN_QUALIFIED_NEW_KIND:
final PsiElement target = resolve();
if (target instanceof PsiClass) {
final PsiClass aClass = (PsiClass)target;
String name = aClass.getQualifiedName();
if (name == null) {
name = aClass.getName(); //?
}
return name;
}
else if (target instanceof PsiPackage) {
return ((PsiPackage)target).getQualifiedName();
}
else {
LOG.assertTrue(target == null, target);
return getClassNameText();
}
case PACKAGE_NAME_KIND:
case CLASS_FQ_NAME_KIND:
case CLASS_FQ_OR_PACKAGE_NAME_KIND:
return getNormalizedText(); // there cannot be any <...>
default:
LOG.error(kind);
return null;
}
}
@Override
public String getReferenceName() {
final ASTNode childByRole = getReferenceNameNode();
if (childByRole == null) return null;
return childByRole.getText();
}
@NotNull
@Override
public final TextRange getRangeInElement() {
return calcRangeInElement(this);
}
@Override
@NotNull
public PsiType[] getTypeParameters() {
final PsiReferenceParameterList parameterList = getParameterList();
if (parameterList == null) return PsiType.EMPTY_ARRAY;
return parameterList.getTypeArguments();
}
@NotNull
@Override
public final PsiElement getElement() {
return this;
}
@Override
public final void accept(@NotNull final PsiElementVisitor visitor) {
if (visitor instanceof JavaElementVisitor) {
((JavaElementVisitor)visitor).visitReferenceElement(this);
}
else {
visitor.visitElement(this);
}
}
@Override
public final String toString() {
return "PsiJavaCodeReferenceElement:" + getText();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oozie.command.coord;
import java.util.Date;
import java.util.List;
import org.apache.oozie.CoordinatorActionBean;
import org.apache.oozie.CoordinatorJobBean;
import org.apache.oozie.ErrorCode;
import org.apache.oozie.XException;
import org.apache.oozie.client.CoordinatorJob;
import org.apache.oozie.client.Job;
import org.apache.oozie.command.CommandException;
import org.apache.oozie.command.PreconditionException;
import org.apache.oozie.command.SuspendTransitionXCommand;
import org.apache.oozie.command.bundle.BundleStatusUpdateXCommand;
import org.apache.oozie.command.wf.SuspendXCommand;
import org.apache.oozie.executor.jpa.BatchQueryExecutor;
import org.apache.oozie.executor.jpa.BatchQueryExecutor.UpdateEntry;
import org.apache.oozie.executor.jpa.CoordActionQueryExecutor.CoordActionQuery;
import org.apache.oozie.executor.jpa.CoordJobGetActionsRunningJPAExecutor;
import org.apache.oozie.executor.jpa.CoordJobQueryExecutor;
import org.apache.oozie.executor.jpa.CoordJobQueryExecutor.CoordJobQuery;
import org.apache.oozie.executor.jpa.JPAExecutorException;
import org.apache.oozie.service.JPAService;
import org.apache.oozie.service.Services;
import org.apache.oozie.util.InstrumentUtils;
import org.apache.oozie.util.LogUtils;
import org.apache.oozie.util.ParamChecker;
import org.apache.oozie.util.StatusUtils;
/**
* Suspend coordinator job and actions.
*
*/
public class CoordSuspendXCommand extends SuspendTransitionXCommand {
private final String jobId;
private CoordinatorJobBean coordJob;
private JPAService jpaService;
private boolean exceptionOccured = false;
private CoordinatorJob.Status prevStatus = null;
public CoordSuspendXCommand(String id) {
super("coord_suspend", "coord_suspend", 1);
this.jobId = ParamChecker.notEmpty(id, "id");
}
@Override
public String getEntityKey() {
return jobId;
}
@Override
public String getKey() {
return getName() + "_" + jobId;
}
@Override
protected boolean isLockRequired() {
return true;
}
@Override
protected void loadState() throws CommandException {
super.eagerLoadState();
try {
jpaService = Services.get().get(JPAService.class);
if (jpaService != null) {
this.coordJob = CoordJobQueryExecutor.getInstance()
.get(CoordJobQuery.GET_COORD_JOB_SUSPEND_KILL, this.jobId);
prevStatus = coordJob.getStatus();
}
else {
throw new CommandException(ErrorCode.E0610);
}
}
catch (Exception ex) {
throw new CommandException(ErrorCode.E0603, ex.getMessage(), ex);
}
LogUtils.setLogInfo(this.coordJob);
}
@Override
protected void verifyPrecondition() throws CommandException, PreconditionException {
super.eagerVerifyPrecondition();
if (coordJob.getStatus() == CoordinatorJob.Status.SUCCEEDED
|| coordJob.getStatus() == CoordinatorJob.Status.FAILED
|| coordJob.getStatus() == CoordinatorJob.Status.KILLED
|| coordJob.getStatus() == CoordinatorJob.Status.IGNORED) {
LOG.info("CoordSuspendXCommand is not going to execute because "
+ "job finished or failed or killed, id = " + jobId + ", status = " + coordJob.getStatus());
throw new PreconditionException(ErrorCode.E0728, jobId, coordJob.getStatus().toString());
}
}
@Override
public void suspendChildren() throws CommandException {
try {
//Get all running actions of a job to suspend them
List<CoordinatorActionBean> actionList = jpaService
.execute(new CoordJobGetActionsRunningJPAExecutor(jobId));
for (CoordinatorActionBean action : actionList) {
// queue a SuspendXCommand
if (action.getExternalId() != null) {
queue(new SuspendXCommand(action.getExternalId()));
updateCoordAction(action);
LOG.debug(
"Suspend coord action = [{0}], new status = [{1}], pending = [{2}] and queue SuspendXCommand for [{3}]",
action.getId(), action.getStatus(), action.getPending(), action.getExternalId());
}
else {
updateCoordAction(action);
LOG.debug(
"Suspend coord action = [{0}], new status = [{1}], pending = [{2}] and external id is null",
action.getId(), action.getStatus(), action.getPending());
}
}
LOG.debug("Suspended coordinator actions for the coordinator=[{0}]", jobId);
}
catch (XException ex) {
exceptionOccured = true;
throw new CommandException(ex);
}
finally {
if (exceptionOccured) {
coordJob.setStatus(CoordinatorJob.Status.FAILED);
coordJob.resetPending();
LOG.debug("Exception happened, fail coordinator job id = " + jobId + ", status = "
+ coordJob.getStatus());
updateList.add(new UpdateEntry<CoordJobQuery>(CoordJobQuery.UPDATE_COORD_JOB_STATUS_PENDING_TIME, coordJob));
}
}
}
@Override
public void notifyParent() throws CommandException {
// update bundle action
if (this.coordJob.getBundleId() != null) {
BundleStatusUpdateXCommand bundleStatusUpdate = new BundleStatusUpdateXCommand(coordJob, prevStatus);
bundleStatusUpdate.call();
}
}
@Override
public void updateJob() {
InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
coordJob.setLastModifiedTime(new Date());
coordJob.setSuspendedTime(new Date());
LOG.debug("Suspend coordinator job id = " + jobId + ", status = " + coordJob.getStatus() + ", pending = "
+ coordJob.isPending());
updateList.add(new UpdateEntry<CoordJobQuery>(CoordJobQuery.UPDATE_COORD_JOB_STATUS_PENDING_TIME, coordJob));
}
@Override
public void performWrites() throws CommandException {
try {
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(null, updateList, null);
}
catch (JPAExecutorException jex) {
throw new CommandException(jex);
}
}
private void updateCoordAction(CoordinatorActionBean action) {
action.setStatus(CoordinatorActionBean.Status.SUSPENDED);
action.incrementAndGetPending();
action.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<CoordActionQuery>(CoordActionQuery.UPDATE_COORD_ACTION_STATUS_PENDING_TIME, action));
}
@Override
public Job getJob() {
return coordJob;
}
/**
* Transit job to suspended from running or to prepsuspended from prep.
*
* @see org.apache.oozie.command.TransitionXCommand#transitToNext()
*/
@Override
public void transitToNext() {
if (coordJob == null) {
coordJob = (CoordinatorJobBean) this.getJob();
}
if (coordJob.getStatus() == Job.Status.PREP) {
coordJob.setStatus(Job.Status.PREPSUSPENDED);
coordJob.setStatus(StatusUtils.getStatus(coordJob));
}
else if (coordJob.getStatus() == Job.Status.RUNNING) {
coordJob.setStatus(Job.Status.SUSPENDED);
}
else if (coordJob.getStatus() == Job.Status.RUNNINGWITHERROR || coordJob.getStatus() == Job.Status.PAUSEDWITHERROR) {
coordJob.setStatus(Job.Status.SUSPENDEDWITHERROR);
}
else if (coordJob.getStatus() == Job.Status.PAUSED) {
coordJob.setStatus(Job.Status.SUSPENDED);
}
else if (coordJob.getStatus() == Job.Status.PREPPAUSED) {
coordJob.setStatus(Job.Status.PREPSUSPENDED);
}
coordJob.setPending();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.rya.indexing.pcj.fluo.app.query;
import static com.google.common.base.Preconditions.checkNotNull;
import edu.umd.cs.findbugs.annotations.Nullable;
import edu.umd.cs.findbugs.annotations.DefaultAnnotation;
import edu.umd.cs.findbugs.annotations.NonNull;
import net.jcip.annotations.Immutable;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.rya.indexing.pcj.storage.accumulo.VariableOrder;
import com.google.common.base.Objects;
/**
* Metadata that is specific to Join nodes.
*/
@Immutable
@DefaultAnnotation(NonNull.class)
public class JoinMetadata extends CommonNodeMetadata {
/**
* The different types of Join algorithms that this join may perform.
*/
public static enum JoinType {
NATURAL_JOIN,
LEFT_OUTER_JOIN;
}
private final JoinType joinType;
private final String parentNodeId;
private final String leftChildNodeId;
private final String rightChildNodeId;
/**
* Constructs an instance of {@link JoinMetadata}.
*
* @param nodeId - The ID the Fluo app uses to reference this node. (not null)
* @param varOrder - The variable order of binding sets that are emitted by this node. (not null)
* @param joinType - Defines which join algorithm the join will use.
* @param parentNodeId - The node id of this node's parent. (not null)
* @param leftChildNodeId - One of the nodes whose results are being joined. (not null)
* @param rightChildNodeId - The other node whose results are being joined. (not null)
*/
public JoinMetadata(
final String nodeId,
final VariableOrder varOrder,
final JoinType joinType,
final String parentNodeId,
final String leftChildNodeId,
final String rightChildNodeId) {
super(nodeId, varOrder);
this.joinType = checkNotNull(joinType);
this.parentNodeId = checkNotNull(parentNodeId);
this.leftChildNodeId = checkNotNull(leftChildNodeId);
this.rightChildNodeId = checkNotNull(rightChildNodeId);
}
/**
* @return Defines which join algorithm the join will use.
*/
public JoinType getJoinType() {
return joinType;
}
/**
* @return The node id of this node's parent.
*/
public String getParentNodeId() {
return parentNodeId;
}
/**
* @return One of the nodes whose results are being joined.
*/
public String getLeftChildNodeId() {
return leftChildNodeId;
}
/**
* @return The other node whose results are being joined.
*/
public String getRightChildNodeId() {
return rightChildNodeId;
}
@Override
public int hashCode() {
return Objects.hashCode(
super.getNodeId(),
super.getVariableOrder(),
joinType,
parentNodeId,
leftChildNodeId,
rightChildNodeId);
}
@Override
public boolean equals(final Object o) {
if(o == this) {
return true;
}
if(o instanceof JoinMetadata) {
if(super.equals(o)) {
final JoinMetadata joinMetadata = (JoinMetadata)o;
return new EqualsBuilder()
.append(joinType, joinMetadata.joinType)
.append(parentNodeId, joinMetadata.parentNodeId)
.append(leftChildNodeId, joinMetadata.leftChildNodeId)
.append(rightChildNodeId, joinMetadata.rightChildNodeId)
.isEquals();
}
return false;
}
return false;
}
@Override
public String toString() {
return new StringBuilder()
.append("Join Metadata {\n")
.append(" Node ID: " + super.getNodeId() + "\n")
.append(" Variable Order: " + super.getVariableOrder() + "\n")
.append(" Join Type: " + joinType + "\n")
.append(" Parent Node ID: " + parentNodeId + "\n")
.append(" Left Child Node ID: " + leftChildNodeId + "\n")
.append(" Right Child Node ID: " + rightChildNodeId + "\n")
.append("}")
.toString();
}
/**
* Creates a new {@link Builder} for this class.
*
* @param nodeId - The ID the Fluo app uses to reference this node. (not null)
* @return A new {@link Builder} for this class.
*/
public static Builder builder(final String nodeId) {
return new Builder(nodeId);
}
/**
* Builds instances of {@link JoinMetadata}.
*/
@DefaultAnnotation(NonNull.class)
public static final class Builder {
private final String nodeId;
private VariableOrder varOrder;
private JoinType joinType;
private String parentNodeId;
private String leftChildNodeId;
private String rightChildNodeId;
/**
* Constructs an instance of {@link Builder}.
*
* @param nodeId - The node ID associated with the Join node this builder makes. (not null)
*/
public Builder(final String nodeId) {
this.nodeId = checkNotNull(nodeId);
}
/**
* @return The node ID associated with the Join node this builder makes.
*/
public String getNodeId() {
return nodeId;
}
/**
* Sets the variable order of the binding sets that are emitted by this node.
*
* @param varOrder - The variable order of the binding sets that are emitted by this node.
* @return This builder so that method invocation could be chained.
*/
public Builder setVariableOrder(@Nullable final VariableOrder varOrder) {
this.varOrder = varOrder;
return this;
}
/**
* Sets the node id of this node's parent.
*
* @param parentNodeId - The node id of this node's parent.
* @return This builder so that method invocation could be chained.
*/
public Builder setParentNodeId(@Nullable final String parentNodeId) {
this.parentNodeId = parentNodeId;
return this;
}
/**
* Sets the type of join algorithm that will be used by this join.
*
* @param joinType - Defines which join algorithm the join will use.
* @return This builder so that method invocation could be chained.
*/
public Builder setJoinType(@Nullable final JoinType joinType) {
this.joinType = joinType;
return this;
}
/**
* Set one of the nodes whose results are being joined.
*
* @param leftChildNodeId - One of the nodes whose results are being joined.
* @return This builder so that method invocation could be chained.
*/
public Builder setLeftChildNodeId(@Nullable final String leftChildNodeId) {
this.leftChildNodeId = leftChildNodeId;
return this;
}
/**
* Set the other node whose results are being joined.
*
* @param rightChildNodeId - The other node whose results are being joined.
* @return This builder so that method invocation could be chained.
*/
public Builder setRightChildNodeId(@Nullable final String rightChildNodeId) {
this.rightChildNodeId = rightChildNodeId;
return this;
}
/**
* @return An instance of {@link JoinMetadata} built using this builder's values.
*/
public JoinMetadata build() {
return new JoinMetadata(
nodeId,
varOrder,
joinType,
parentNodeId,
leftChildNodeId,
rightChildNodeId);
}
}
}
| |
/***************************************************************************
* Copyright 2014 greenbird Integration Technology, http://www.greenbird.com/
*
* This file is part of the 'xml-formatter' project available at
* http://greenbird.github.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.greenbird.xml.prettyprinter.plain;
import java.io.EOFException;
import java.io.IOException;
import java.io.Reader;
import com.greenbird.xml.prettyprinter.PrettyPrinter;
public abstract class AbstractPrettyPrinter implements PrettyPrinter {
public enum Type {
/** public for testing */
INCREMENT(), DECREMENT(), NEITHER();
private Type() {
}
}
/** enum for flows where characters at end element are not handled in the same way for cdata, comments and text nodes */
public enum CharactersType {
/** public for testing */
NONE(), CDATA(), COMMENT();
private CharactersType() {
}
}
protected static final char[][] tabIndentations;
protected static final char[][][] spaceIndentations;
public static final char defaultIndentationCharacter = '\t';
public static final int defaultIndentationMultiplier = 1;
public static final int defaultIndentationDepth = 64;
static {
tabIndentations = getIndentations(defaultIndentationDepth, defaultIndentationCharacter, defaultIndentationMultiplier);
spaceIndentations = new char[4][][];
for(int i = 0; i < spaceIndentations.length; i++) {
spaceIndentations[i] = getIndentations(defaultIndentationDepth, ' ', i + 1);
}
// reduce number of objects somewhat
char[][] commonSpaceIndentations = new char[defaultIndentationDepth * spaceIndentations.length + 1][];
for(int i = 0; i < spaceIndentations.length; i++) {
for(int k = 0; k < spaceIndentations[i].length; k++) {
if(commonSpaceIndentations[spaceIndentations[i][k].length] != null) {
if(spaceIndentations[i][k].length != commonSpaceIndentations[spaceIndentations[i][k].length].length) {
throw new IllegalArgumentException();
}
spaceIndentations[i][k] = commonSpaceIndentations[spaceIndentations[i][k].length];
} else {
commonSpaceIndentations[spaceIndentations[i][k].length] = spaceIndentations[i][k];
}
}
}
}
private static char[][] getIndentations(int size, char character, int multiplier) {
final char[][] indentations = new char[size][];
for(int i = 0; i < indentations.length; i++) {
indentations[i] = new char[(i * multiplier) + 1];
indentations[i][0] = '\n';
for(int k = 1; k < indentations[i].length; k++) {
indentations[i][k] = character;
}
}
return indentations;
}
protected final boolean declaration;
protected final int maxTextNodeLength; // not always in use, if so set to max int
protected final int maxCDATANodeLength; // not always in use, if so set to max int
protected final char indentationCharacter;
protected final int indentationMultiplier;
protected volatile char[][] indentations;
public AbstractPrettyPrinter(boolean declaration) {
this(declaration, Integer.MAX_VALUE, Integer.MAX_VALUE, defaultIndentationCharacter, defaultIndentationMultiplier);
}
public AbstractPrettyPrinter(boolean declaration, char indentationCharacter, int indentationMultiplier) {
this(declaration, Integer.MAX_VALUE, Integer.MAX_VALUE, indentationCharacter, indentationMultiplier);
}
public AbstractPrettyPrinter(boolean declaration, int maxTextNodeLength, int maxCDATANodeLength, char indentationCharacter, int indentationMultiplier) {
this(indentationCharacter, indentationMultiplier, declaration, maxTextNodeLength, maxCDATANodeLength);
}
public AbstractPrettyPrinter(char indentationCharacter, int indentationMultiplier, boolean declaration, int maxTextNodeLength, int maxCDATANodeLength) {
this.indentationCharacter = indentationCharacter;
this.indentationMultiplier = indentationMultiplier;
if(indentationCharacter == '\t' && indentationMultiplier == 1) {
this.indentations = tabIndentations;
} else if(indentationCharacter == ' ' && indentationMultiplier <= 4 && indentationMultiplier >= 1) {
this.indentations = spaceIndentations[indentationMultiplier - 1];
} else {
this.indentations = getIndentations(defaultIndentationDepth, indentationCharacter, indentationMultiplier);
}
this.declaration = declaration;
if(maxTextNodeLength < -1) {
throw new IllegalArgumentException();
}
if(maxCDATANodeLength < -1) {
throw new IllegalArgumentException();
}
if(maxCDATANodeLength == -1 && maxTextNodeLength == -1) {
throw new IllegalArgumentException("No max node length");
}
if(maxTextNodeLength == -1) {
this.maxTextNodeLength = Integer.MAX_VALUE;
} else {
this.maxTextNodeLength = maxTextNodeLength;
}
if(maxCDATANodeLength == -1) {
this.maxCDATANodeLength = Integer.MAX_VALUE;
} else {
this.maxCDATANodeLength = maxCDATANodeLength;
}
}
/**
* Return array of indentations
*
* @param size depth of XML tree
* @return array of indentation character arrays
*/
public char[][] ensureCharCapacity(int size) {
final char[][] currentIdentactions = indentations;
if(size < currentIdentactions.length) {
return currentIdentactions;
}
final char[][] nextIndentations = new char[size][];
// reuse existing values
System.arraycopy(currentIdentactions, 0, nextIndentations, 0, currentIdentactions.length);
for(int i = currentIdentactions.length; i < nextIndentations.length; i++) {
nextIndentations[i] = new char[(i * indentationMultiplier) + 1];
nextIndentations[i][0] = '\n';
for(int k = 1; k < nextIndentations[i].length; k++) {
nextIndentations[i][k] = indentationCharacter;
}
}
// save in field
this.indentations = nextIndentations;
return nextIndentations;
}
public boolean isXmlDeclaration() {
return declaration;
}
public int getMaxCDATANodeLength() {
if(maxCDATANodeLength == Integer.MAX_VALUE) {
return -1;
}
return maxCDATANodeLength;
}
public int getMaxTextNodeLength() {
if(maxTextNodeLength == Integer.MAX_VALUE) {
return -1;
}
return maxTextNodeLength;
}
public boolean process(String xmlString, StringBuilder output) {
char[] chars = xmlString.toCharArray();
return process(chars, 0, chars.length, output);
}
public boolean process(Reader reader, int length, StringBuilder output) throws IOException {
if(!reader.markSupported()) {
throw new IllegalArgumentException("Reader of class " + reader.getClass().getName() + " does not support mark/reset");
}
reader.mark(length);
char[] chars = new char[length];
int offset = 0;
int read;
do {
read = reader.read(chars, offset, length - offset);
if(read == -1) {
throw new EOFException("Expected reader with " + length + " characters");
}
offset += read;
} while(offset < length);
boolean success = process(chars, 0, chars.length, output);
reader.reset();
return success;
}
protected boolean isIndentationWhitespace(char c) {
return c == ' ' || c == '\t' || c == '\n' || c == '\r';
}
/**
* Scan from end element start to end element end, plus one.
*
* @param chars XML data
* @param offset start offset within XML data
* @param limit end offset within XML data
*
* @return offset one character past the end element
*/
protected static int scanBeyondEndElement(final char[] chars, int offset, int limit) {
// scan to end of end tag + 1
offset += 3; // an end element must be at least 4 chars.
return scanElementEnd(chars, offset, limit) + 1;
}
protected static int scanElementEnd(final char[] chars, int offset, int limit) {
while(offset < limit) {
if(chars[offset] == '>') {
return offset;
}
offset++;
}
throw new ArrayIndexOutOfBoundsException("Unable to find end of end element");
}
/**
*
* Scan from start element start to start element end, plus one.
*
* @param chars XML data
* @param offset start offset within XML data
* @param limit end offset within XML data
*
* @return offset one character past the start tag
*/
protected static int scanBeyondStartElement(final char[] chars, int offset, int limit) {
// scan to end of start tag + 1
offset += 2; // an start element must be at least 3 chars.
return scanStartElementEnd(chars, offset, limit) + 1;
}
/**
*
* Scan start element end, plus one.
*
* @param chars XML data
* @param offset start offset within XML data
* @param limit end offset within XML data
*
* @return offset one character past the start tag
*/
protected static int scanBeyondStartElementEnd(final char[] chars, int offset, int limit) {
return scanStartElementEnd(chars, offset, limit) + 1;
}
protected static int scanStartElementEnd(final char[] chars, int offset, int limit) {
while(offset < limit) {
if(chars[offset] == '>') {
return offset;
}
offset++;
}
throw new ArrayIndexOutOfBoundsException("Unable to find end of start element");
}
protected static int scanBeyondDTDEnd(final char[] chars, int offset, int limit) {
return scanDTDEnd(chars, offset, limit) + 1;
}
protected static int scanDTDEnd(final char[] chars, int offset, int limit) {
// assume DTD are nested structures
// simplified scan loop
int level = 1;
do {
offset++;
if(chars[offset] == '<') {
level++;
} else if(chars[offset] == '>') {
level--;
}
} while(level > 0);
return offset;
}
/**
* Scan from processing instruction start to processing instruction end, plus one.
*
* @param chars XML data
* @param offset start offset within XML data
* @param limit end offset within XML data
*
* @return offset one character past the processing instruction end
*/
protected static int scanBeyondProcessingInstruction(final char[] chars, int offset, int limit) {
offset += 3; // // a processing must be at least 4 chars. <? >
return scanProcessingInstructionEnd(chars, offset, limit) + 1;
}
protected static int scanProcessingInstructionEnd(final char[] chars, int offset, int limit) {
while(offset < limit) {
if(chars[offset] == '>' && chars[offset - 1] == '?') {
return offset;
}
offset++;
}
throw new ArrayIndexOutOfBoundsException("Unable to find end of processing instruction");
}
/**
* Scan from CDATA start to CDATA end, plus one.
*
* @param chars XML data
* @param offset start offset within XML data
* @param limit end offset within XML data
*
* @return offset one character past the CDATA end
*/
protected static int scanBeyondCData(final char[] chars, int offset, int limit) {
offset += 11; // // a CDATA node must be at least 12 chars. <![CDATA[]]>
return scanCDataEnd(chars, offset, limit) + 1;
}
/**
*
* Scan one past CDATA end
*
* @param chars XML data
* @param offset start offset within XML data
* @param limit end offset within XML data
*
* @return offset one character past the CDATA end
*
*/
protected static int scanBeyondCDataEnd(final char[] chars, int offset, int limit) {
return scanCDataEnd(chars, offset, limit) + 1;
}
protected static int scanCDataEnd(final char[] chars, int offset, int limit) {
while(offset < limit) {
if(chars[offset] == '>' && chars[offset - 1] == ']' && chars[offset - 2] == ']') {
return offset;
}
offset++;
}
throw new ArrayIndexOutOfBoundsException("Unable to find end of CDATA");
}
protected static int scanCommentEnd(final char[] chars, int offset, int limit) {
while(offset < limit) {
if(chars[offset] == '>' && chars[offset - 1] == '-' && chars[offset - 2] == '-') {
return offset;
}
offset++;
}
throw new ArrayIndexOutOfBoundsException("Unable to find end of comment");
}
/**
* Scan from comment start to comment end, plus one.
*
* @param chars XML data
* @param offset start offset within XML data
* @param limit end offset within XML data
*
* @return offset one character past the comment end
*/
protected static int scanBeyondComment(final char[] chars, int offset, int limit) {
// scan to end of comment + 1
offset += 6; // a comment must be at least 7 chars. <!-- -->
return scanCommentEnd(chars, offset, limit) + 1;
}
protected static int scanBeyondCommentEnd(final char[] chars, int offset, int limit) {
// scan to end of comment + 1
return scanCommentEnd(chars, offset, limit) + 1;
}
protected static int skipSubtree(final char[] chars, int offset, int limit) {
int level = 0;
while(offset < limit) {
if(chars[offset] == '<') {
switch(chars[offset + 1]) {
case '/' : { // end tag
level--;
if(level < 0) {
return offset;
}
offset = scanBeyondEndElement(chars, offset, limit);
continue;
}
case '!': {
// complete cdata and comments so nodes
if(chars[offset + 2] == '-') {
// look for -->
offset = scanBeyondComment(chars, offset, limit);
continue;
} else if(chars[offset + 2] == '[') {
// look for ]]>
offset = scanBeyondCData(chars, offset, limit);
continue;
} else {
// do nothing
}
break;
}
case '?' : {
// processing instruction
offset = scanBeyondProcessingInstruction(chars, offset, limit);
continue;
}
default : {
// start element
// flush bytes
level++;
// scan to end of start element to see if empty element
offset += 2; // skip <a in <a>
while(offset < limit) {
if(chars[offset] == '>') {
if(chars[offset - 1] == '/') {
// empty element
level--;
}
offset++;
break;
}
offset++;
}
continue;
}
}
}
offset++;
}
return offset;
}
protected static boolean isEscapedXML(final char[] chars, int offset, int sourceStart) {
// An escaped minimum xml length is <x/> in which encoded size is 4 + x + / + 4 = 10
// however the end tag is not strictly needed in escaped form, so it is 4 + x + / + > = 7
return offset - sourceStart >= 7
// check for start and end tag (escaped)
&& chars[sourceStart] == '&' // start escape start
&& chars[sourceStart+3] == ';' // start escape end
&& chars[sourceStart + 1] == 'l'
&& chars[sourceStart+2] == 't' // start lt
&& (
chars[offset - 1] == '>' // unescaped end
|| ( // or escaped end
chars[offset - 4] == '&' // end escape start
&& chars[offset - 1] == ';' // end escape end
&& chars[offset - 2] == 't'
&& chars[offset - 3] == 'g' // end gt
)
);
// assume method is inlined
}
protected boolean isXMLDeclaration(final char[] chars, int sourceStart, int sourceEnd) {
return sourceStart < sourceEnd - 6 && chars[sourceStart + 2] == 'x' && chars[sourceStart + 3] == 'm' && chars[sourceStart + 4] == 'l' && Character.isWhitespace(chars[sourceStart + 5]);
// assume method is inlined
}
protected boolean appendEntity(int entityLength, char[] chars, int entityIndex, StringBuilder buffer) {
switch(entityLength) {
case 2: {
if(chars[entityIndex] == 'g' && chars[entityIndex+1] == 't') {
buffer.append('>');
return true;
} else if(chars[entityIndex] == 'l' && chars[entityIndex+1] == 't') {
buffer.append('<');
return true;
}
break;
}
case 3: {
if(chars[entityIndex] == 'a' && chars[entityIndex+1] == 'm' && chars[entityIndex+2] == 'p') {
buffer.append('&');
return true;
}
break;
}
case 4: {
if(chars[entityIndex] == 'a' && chars[entityIndex+1] == 'p' && chars[entityIndex+2] == 'o' && chars[entityIndex+3] == 's') {
buffer.append('\'');
return true;
} else if(chars[entityIndex] == 'q' && chars[entityIndex+1] == 'u' && chars[entityIndex+2] == 'o' && chars[entityIndex+3] == 't') {
buffer.append('"');
return true;
}
break;
}
}
return false;
}
}
| |
package org.tdc.extension.mef.export;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tdc.book.Book;
import org.tdc.book.TestCase;
import org.tdc.book.TestDoc;
import org.tdc.book.TestSet;
import org.tdc.config.book.TaskConfig;
import org.tdc.dom.TestDocXMLGenerator;
import org.tdc.filter.Filter;
import org.tdc.result.Message;
import org.tdc.result.Results;
import org.tdc.result.TaskResult;
import org.tdc.shared.util.SharedUtil;
import org.tdc.task.AbstractTask;
import org.tdc.task.Task;
import org.tdc.util.Util;
/**
* MeF (Modernized eFile) {@link Task} for exporting MeF "Submissions".
* Submissions are represented as {@link TestCase}s in a {@link Book}.
* Each of these Submissions will be exported to a directory
* (named with the Submission ID) and will contain a set of one or more
* XML files (corresponding to the {@link TestDoc}s in the {@link TestCase}).
*/
public class MeFExportTask extends AbstractTask {
public static final String EXPORT_ROOT_OVERRIDE = "export-root";
private static final Logger log = LoggerFactory.getLogger(MeFExportTask.class);
private final MeFExportTaskConfig config;
private final Book book;
private final Path exportRoot;
private final TestDocXMLGenerator xmlGenerator;
private final Filter filter;
public MeFExportTask(
MeFExportTaskConfig config, Book book,
Path exportRoot, TestDocXMLGenerator xmlGenerator, Filter filter) {
this.config = config;
this.book = book;
this.exportRoot = exportRoot;
this.xmlGenerator = xmlGenerator;
this.filter = filter;
}
@Override
public MeFExportTaskConfig getConfig() {
return config;
}
@Override
public void process() {
export();
}
public void export() {
Path batchDir = Util.createBatchDir(
exportRoot,
book.getConfig().getBookName());
// if only one set, and that set is the "default" set,
// don't create a sub directory for it
List<TestSet> testSets = book.getTestSets();
int seq = 1;
for (TestSet testSet : testSets) {
if (filter == null || !filter.ignoreTestSet(testSet)) {
if (testSets.size() == 1 && testSet.getSetName().equals("")) {
// if we only have a "default" set, won't need an index
exportTestSet(testSet, batchDir, -1);
}
else {
// if we have multiple sets, use index = 0 for the default,
// and an incrementing sequence for the rest
exportTestSet(testSet, batchDir,
testSet.getSetName().equals("") ? 0 : seq++);
}
}
}
}
private void exportTestSet(TestSet testSet, Path batchDir, int seq) {
log.debug("Exporting TestSet: {}", testSet.getSetName());
boolean success = false;
try {
StringBuilder submissionIDs = new StringBuilder();
Path setDir = createSetDir(batchDir, seq, testSet.getSetName());
List<TestCase> testCases = testSet.getTestCases();
for (TestCase testCase : testCases) {
if (filter == null || !filter.ignoreTestCase(testSet, testCase)) {
String submissionID = exportTestCase(testSet, testCase, setDir);
submissionIDs.append(submissionID).append(System.lineSeparator());
}
}
writeSubmissionIDsToFile(setDir, submissionIDs.toString());
success = true;
}
finally {
logResult(testSet.getResults(), "Test Set", success);
}
}
private void writeSubmissionIDsToFile(Path setDir, String submissionIDs) {
try {
Files.write(setDir.resolve("SubmissionIDs.txt"),
submissionIDs.getBytes(), StandardOpenOption.CREATE_NEW);
}
catch (IOException e) {
throw new RuntimeException("Unable to create 'SubmissionIDs.txt' file in: " +
setDir.toString(), e);
}
}
private String exportTestCase(TestSet testSet, TestCase testCase, Path setDir) {
log.debug("Exporting TestCase: {}", testCase.getCaseNum());
boolean success = false;
try {
String submissionID = getSubmissionID(testCase);
Path caseDir = createCaseDir(setDir, submissionID);
List<TestDoc> testDocs = testCase.getTestDocs();
for (TestDoc testDoc : testDocs) {
if (filter == null || !filter.ignoreTestDoc(testSet, testCase, testDoc)) {
exportTestDoc(testDoc, caseDir, submissionID);
}
}
success = true;
return submissionID;
}
finally {
logResult(testCase.getResults(), "Test Case", success);
}
}
private void exportTestDoc(TestDoc testDoc, Path caseDir, String submissionID) {
log.debug("Exporting TestCase num {}, TestSet name '{}', column {}",
testDoc.getCaseNum(), testDoc.getSetName(), testDoc.getColNum());
boolean success = false;
try {
xmlGenerator.setDocument(testDoc.getDOMDocument());
String docType = testDoc.getPageConfig().getDocTypeConfig().getDocTypeName();
if (docType.equals(config.getStateDocTypeName())) {
Path filePath = caseDir.resolve("xml").resolve(submissionID + ".xml");
exportTestDocAndCheckExistence(docType, filePath);
}
else if (docType.equals(config.getManifestDocTypeName())) {
Path filePath = caseDir.resolve("manifest").resolve("manifest.xml");
exportTestDocAndCheckExistence(docType, filePath);
}
else if (docType.equals(config.getFederalDocTypeName())) {
Path filePath = caseDir.resolve("irs").resolve("xml").resolve("federal.xml");
exportTestDocAndCheckExistence(docType, filePath);
}
else {
throw new RuntimeException("DocType '" + docType +
"' is an unknown for MeF; unable to export");
}
success = true;
}
finally {
logResult(testDoc.getResults(), "Test Doc", success);
}
}
private void exportTestDocAndCheckExistence(String docType, Path filePath) {
if (Files.exists(filePath)) {
throw new RuntimeException("File '" + filePath.toString() +
"' of DocType '" + docType +
"' already exists; only one document of each type is allowed");
}
xmlGenerator.generateXML(filePath);
}
private void logResult(Results results, String type, boolean success) {
String taskID = config.getTaskID();
TaskResult taskResult = new TaskResult(taskID);
String msg = type + (success ? " exported successfully" : " export failed");
Message message = new Message.Builder(Message.MESSAGE_TYPE_INFO, msg).build();
taskResult.addMessage(message);
results.setTaskResult(taskID, taskResult);
}
private Path createSetDir(Path batchDir, int index, String setName) {
Path setDir = batchDir;
if (index != -1) {
// prefix dir with an index value to ensure that there will
// never be a clash if two 'legalized names' end up
// being the same
String suffix = setName.equals("") ? "DefaultSet" : SharedUtil.legalizeName(setName);
setDir = setDir.resolve(index + "_" + suffix);
Util.createDirectory(setDir);
}
return setDir;
}
private String getSubmissionID(TestCase testCase) {
String subIDVar = config.getSubmissionIDVariable();
String subID = testCase.getCaseVariables().getOrDefault(subIDVar, "").trim();
if (subID.length() == 0) {
throw new RuntimeException("A Submission ID must exist for Test Case " +
testCase.getCaseNum() + " in Test Set '" + testCase.getSetName() + "'");
}
return subID;
}
private Path createCaseDir(Path setDir, String submissionID) {
Path caseDir = setDir.resolve(submissionID);
Util.createDirectory(caseDir);
Util.createDirectory(caseDir.resolve("xml"));
Util.createDirectory(caseDir.resolve("manifest"));
Util.createDirectory(caseDir.resolve("irs"));
Util.createDirectory(caseDir.resolve("irs").resolve("xml"));
return caseDir;
}
public static Task build(
TaskConfig taskConfig, Book book, Map<String, String> taskParams, Filter filter) {
if (!(taskConfig instanceof MeFExportTaskConfig)) {
throw new IllegalStateException("TaskConfig '" + taskConfig.getTaskID() +
"' must be an instance of " + MeFExportTaskConfig.class.getName());
}
TestDocXMLGenerator xmlGenerator = new TestDocXMLGenerator();
MeFExportTaskConfig config = (MeFExportTaskConfig)taskConfig;
Path exportRoot = getExportRoot(config, taskParams);
return new MeFExportTask(config, book, exportRoot, xmlGenerator, filter);
}
private static Path getExportRoot(
MeFExportTaskConfig config, Map<String, String> taskParams) {
Path exportRoot = config.getExportRoot();
if (taskParams != null && taskParams.containsKey(EXPORT_ROOT_OVERRIDE)) {
exportRoot = Paths.get(taskParams.get(EXPORT_ROOT_OVERRIDE));
if (!Files.isDirectory(exportRoot)) {
throw new RuntimeException("Task parameter '" + EXPORT_ROOT_OVERRIDE +
"' does not refer to a valid directory: " + exportRoot);
}
}
return exportRoot;
}
}
| |
/*L
* Copyright Washington University in St. Louis
* Copyright SemanticBits
* Copyright Persistent Systems
* Copyright Krishagni
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/catissue-core/LICENSE.txt for details.
*/
/*
* <p>Title: SpecimenArrayBizLogic Class </p> <p>Description:This class performs
* business level logic for Specimen Array</p> Copyright: Copyright (c) year
* 2006 Company: Washington University, School of Medicine, St. Louis.
* @version 1.1 Created on Aug 28,2006
*/
package edu.wustl.catissuecore.bizlogic;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import edu.wustl.catissuecore.domain.CellSpecimen;
import edu.wustl.catissuecore.domain.CollectionProtocolRegistration;
import edu.wustl.catissuecore.domain.ContainerPosition;
import edu.wustl.catissuecore.domain.FluidSpecimen;
import edu.wustl.catissuecore.domain.MolecularSpecimen;
import edu.wustl.catissuecore.domain.NewSpecimenArrayOrderItem;
import edu.wustl.catissuecore.domain.Site;
import edu.wustl.catissuecore.domain.Specimen;
import edu.wustl.catissuecore.domain.SpecimenArray;
import edu.wustl.catissuecore.domain.SpecimenArrayContent;
import edu.wustl.catissuecore.domain.SpecimenArrayType;
import edu.wustl.catissuecore.domain.SpecimenCollectionGroup;
import edu.wustl.catissuecore.domain.SpecimenPosition;
import edu.wustl.catissuecore.domain.StorageContainer;
import edu.wustl.catissuecore.domain.TissueSpecimen;
import edu.wustl.catissuecore.factory.DomainInstanceFactory;
import edu.wustl.catissuecore.factory.InstanceFactory;
import edu.wustl.catissuecore.uiobject.SpecimenArrayUIObject;
import edu.wustl.catissuecore.util.ApiSearchUtil;
import edu.wustl.catissuecore.util.Position;
import edu.wustl.catissuecore.util.StorageContainerUtil;
import edu.wustl.catissuecore.util.global.AppUtility;
import edu.wustl.catissuecore.util.global.Constants;
import edu.wustl.common.beans.NameValueBean;
import edu.wustl.common.beans.SessionDataBean;
import edu.wustl.common.cde.CDEManager;
import edu.wustl.common.exception.ApplicationException;
import edu.wustl.common.exception.BizLogicException;
import edu.wustl.common.factory.AbstractFactoryConfig;
import edu.wustl.common.factory.IFactory;
import edu.wustl.common.util.global.ApplicationProperties;
import edu.wustl.common.util.global.Status;
import edu.wustl.common.util.global.Validator;
import edu.wustl.common.util.logger.Logger;
import edu.wustl.dao.DAO;
import edu.wustl.dao.QueryWhereClause;
import edu.wustl.dao.condition.EqualClause;
import edu.wustl.dao.exception.DAOException;
import edu.wustl.dao.util.HibernateMetaData;
import edu.wustl.security.exception.SMException;
import edu.wustl.security.privilege.PrivilegeCache;
import edu.wustl.security.privilege.PrivilegeManager;
/**
* <p>
* This class initializes the fields of SpecimenArrayBizLogic.java
* </p>
* @author Ashwin Gupta
* @version 1.1
*/
public class SpecimenArrayBizLogic extends CatissueDefaultBizLogic
{
private transient final Logger logger = Logger.getCommonLogger(SpecimenArrayBizLogic.class);
protected void insert(Object obj, DAO dao, SessionDataBean sessionDataBean)
throws BizLogicException
{
SpecimenArrayUIObject spArrayUIObject=new SpecimenArrayUIObject();
insert(obj,spArrayUIObject,dao,sessionDataBean);
}
/**
* @see edu.wustl.common.bizlogic.AbstractBizLogic#insert(java.lang.Object,
* edu.wustl.common.dao.DAO, edu.wustl.common.beans.SessionDataBean)
* @param obj : obj
* @param dao : dao
* @param sessionDataBean : sessionDataBean
* @throws BizLogicException : BizLogicException
*/
protected void insert(Object obj,Object uiObject, DAO dao, SessionDataBean sessionDataBean)
throws BizLogicException
{
try
{
SpecimenArrayUIObject spArrayUIObject=(SpecimenArrayUIObject)uiObject;
final SpecimenArray specimenArray = (SpecimenArray) obj;
this.checkStorageContainerAvailablePos(specimenArray, dao, sessionDataBean);
this.doUpdateSpecimenArrayContents(specimenArray, null, dao, sessionDataBean, true,spArrayUIObject);
dao.insert(specimenArray.getCapacity());
dao.insert(specimenArray);
SpecimenArrayContent specimenArrayContent = null;
// TODO move this method to HibernateDAOImpl for common use (for
// collection insertion)
for (final Iterator<SpecimenArrayContent> iter = specimenArray.getSpecimenArrayContentCollection().iterator(); iter
.hasNext();)
{
specimenArrayContent = iter.next();
specimenArrayContent.setSpecimenArray(specimenArray);
dao.insert(specimenArrayContent);
}
}
catch (final DAOException daoExp)
{
this.logger.error(daoExp.getMessage(), daoExp);
daoExp.printStackTrace();
throw this
.getBizLogicException(daoExp, daoExp.getErrorKeyName(), daoExp.getMsgValues());
}
}
/**
* @param obj : obj
* @param dao : dao
* @param sessionDataBean : sessionDataBean
* @throws BizLogicException : BizLogicException
*/
@Override
public void postInsert(Object obj, DAO dao, SessionDataBean sessionDataBean)
throws BizLogicException
{
super.postInsert(obj, dao, sessionDataBean);
}
/**
* @param dao : dao
* @param currentObj : currentObj
* @param oldObj : oldObj
* @param sessionDataBean : sessionDataBean
* @throws BizLogicException : BizLogicException
*/
@Override
public void postUpdate(DAO dao, Object currentObj, Object oldObj,
SessionDataBean sessionDataBean) throws BizLogicException
{
super.postUpdate(dao, currentObj, oldObj, sessionDataBean);
}
protected void update(DAO dao, Object obj, Object oldObj, SessionDataBean sessionDataBean)
throws BizLogicException
{
SpecimenArrayUIObject spArrayUIObject=new SpecimenArrayUIObject();
update(dao, obj,oldObj,spArrayUIObject, sessionDataBean);
}
/**
* @param dao : dao
* @param obj : obj
* @param oldObj : oldObj
* @param sessionDataBean : sessionDataBean
* @throws BizLogicException : BizLogicException
*/
@Override
protected void update(DAO dao, Object obj, Object oldObj,Object uiObject, SessionDataBean sessionDataBean)
throws BizLogicException
{
try
{
SpecimenArrayUIObject spArrayUIObject=(SpecimenArrayUIObject)uiObject;
final SpecimenArray specimenArray = (SpecimenArray) obj;
final SpecimenArray oldSpecimenArray = (SpecimenArray) oldObj;
this.retriveScId(dao, specimenArray);
boolean flag = true;
if (oldSpecimenArray.getLocatedAtPosition() != null && specimenArray.getLocatedAtPosition().getParentContainer().getId().longValue() == oldSpecimenArray
.getLocatedAtPosition().getParentContainer().getId().longValue()
// && specimenArray.getLocatedAtPosition() != null
&& specimenArray.getLocatedAtPosition().getPositionDimensionOne().longValue() == oldSpecimenArray
.getLocatedAtPosition().getPositionDimensionOne().longValue()
&& specimenArray.getLocatedAtPosition().getPositionDimensionTwo().longValue() == oldSpecimenArray
.getLocatedAtPosition().getPositionDimensionTwo().longValue())
{
flag = false;
}
if (flag)
{
final IFactory factory = AbstractFactoryConfig.getInstance().getBizLogicFactory();
final StorageContainerBizLogic storageContainerBizLogic = (StorageContainerBizLogic) factory
.getBizLogic(Constants.STORAGE_CONTAINER_FORM_ID);
final String contId= specimenArray.getLocatedAtPosition()
.getParentContainer().getId().toString();
final String posOne= specimenArray
.getLocatedAtPosition().getPositionDimensionOne().toString();
final String posTwo= specimenArray
.getLocatedAtPosition().getPositionDimensionTwo().toString();
storageContainerBizLogic.checkContainer(dao,StorageContainerUtil.setparameterList
(contId, posOne, posTwo, false),sessionDataBean,null);
}
this.doUpdateSpecimenArrayContents(specimenArray, oldSpecimenArray, dao,
sessionDataBean, false,spArrayUIObject);
dao.update(specimenArray.getCapacity(),oldSpecimenArray.getCapacity());
dao.update(specimenArray,oldSpecimenArray);
SpecimenArrayContent specimenArrayContent = null;
// SpecimenArray oldSpecimenArray = (SpecimenArray) oldObj;
final Collection<SpecimenArrayContent> oldSpecArrayContents = ((SpecimenArray) oldObj)
.getSpecimenArrayContentCollection();
for (final Iterator<SpecimenArrayContent> iter = specimenArray.getSpecimenArrayContentCollection().iterator(); iter
.hasNext();)
{
specimenArrayContent = iter.next();
specimenArrayContent.setSpecimenArray(specimenArray);
// increment by 1 because of array index starts from 0.
if (specimenArrayContent.getPositionDimensionOne() != null)
{
// Bug: 2365: grid location of parent array was getting
// changed
if (spArrayUIObject.isAliquot())
{
specimenArrayContent.setPositionDimensionOne(new Integer(
specimenArrayContent.getPositionDimensionOne().intValue()));
specimenArrayContent.setPositionDimensionTwo(new Integer(
specimenArrayContent.getPositionDimensionTwo().intValue()));
}
else
{
specimenArrayContent.setPositionDimensionOne(new Integer(
specimenArrayContent.getPositionDimensionOne().intValue() + 1));
specimenArrayContent.setPositionDimensionTwo(new Integer(
specimenArrayContent.getPositionDimensionTwo().intValue() + 1));
}
}
if (this.checkExistSpecimenArrayContent(specimenArrayContent, oldSpecArrayContents) == null)
{
dao.insert(specimenArrayContent);
}
else
{
Iterator<SpecimenArrayContent> specimenArrayContentItr = oldSpecArrayContents.iterator();
SpecimenArrayContent oldSpecArrayContent = null;
while(specimenArrayContentItr.hasNext())
{
SpecimenArrayContent specimenArrContent = specimenArrayContentItr.next();
if(specimenArrContent.getId().equals(specimenArrayContent.getId()))
{
oldSpecArrayContent = specimenArrContent;
break;
}
}
dao.update(specimenArrayContent,oldSpecArrayContent);
}
}
if (Status.ACTIVITY_STATUS_DISABLED.toString()
.equals(specimenArray.getActivityStatus()))
{
final ContainerPosition prevPosition = specimenArray.getLocatedAtPosition();
specimenArray.setLocatedAtPosition(null);
dao.update(specimenArray,oldSpecimenArray);
if (prevPosition != null)
{
dao.delete(prevPosition);
}
}
}
catch (final DAOException daoExp)
{
this.logger.error(daoExp.getMessage(), daoExp);
daoExp.printStackTrace();
throw this
.getBizLogicException(daoExp, daoExp.getErrorKeyName(), daoExp.getMsgValues());
}
}
/**
* @param specimenArrayContent
* array contents
* @param specArrayContentCollection
* spec array contents
* @return whether it is new or old
*/
private SpecimenArrayContent checkExistSpecimenArrayContent(
SpecimenArrayContent specimenArrayContent, Collection<SpecimenArrayContent> specArrayContentCollection)
{
boolean isNew = true;
SpecimenArrayContent arrayContent = null;
for (final Iterator<SpecimenArrayContent> iter = specArrayContentCollection.iterator(); iter.hasNext();)
{
arrayContent = iter.next();
if (specimenArrayContent.getId() == null)
{
isNew = true;
break;
}
else if (arrayContent.getId() != null)
{
if (arrayContent.getId().longValue() == specimenArrayContent.getId().longValue())
{
isNew = false;
break;
}
}
}
if (isNew)
{
arrayContent = null;
}
return arrayContent;
}
/**
* @param oldSpecimenArray : oldSpecimenArray
* @param specimenArray
* specimen array
* @param dao
* dao
* @param sessionDataBean
* session data bean
* @param isInsertOperation
* is insert operation
* @throws BizLogicException : BizLogicException
*/
private void doUpdateSpecimenArrayContents(SpecimenArray specimenArray,
SpecimenArray oldSpecimenArray, DAO dao, SessionDataBean sessionDataBean,
boolean isInsertOperation,SpecimenArrayUIObject spArrayUIObject) throws BizLogicException
{
try
{
Collection<SpecimenArrayContent> oldSpecimenArrayContentCollection = null;
if (oldSpecimenArray != null)
{
oldSpecimenArrayContentCollection = oldSpecimenArray
.getSpecimenArrayContentCollection();
}
final Collection<SpecimenArrayContent> specimenArrayContentCollection = specimenArray
.getSpecimenArrayContentCollection();
final Collection<SpecimenArrayContent> updatedSpecArrayContentCollection = new HashSet<SpecimenArrayContent>();
SpecimenArrayContent specimenArrayContent = null;
Specimen specimen = null;
if (specimenArrayContentCollection != null && !specimenArrayContentCollection.isEmpty())
{
double quantity = 0.0;
// fetch array type to check specimen class
final Object object = dao.retrieveById(SpecimenArrayType.class.getName(),
specimenArray.getSpecimenArrayType().getId());
SpecimenArrayType arrayType = null;
if (object != null)
{
arrayType = (SpecimenArrayType) object;
}
for (final Iterator<SpecimenArrayContent> iter = specimenArrayContentCollection.iterator(); iter
.hasNext();)
{
specimenArrayContent = iter.next();
/**
* Start: Change for API Search --- Jitendra 06/10/2006 In
* Case of Api Search, previoulsy it was failing since there
* was default class level initialization on domain object.
* For example in User object, it was initialized as
* protected String lastName=""; So we removed default class
* level initialization on domain object and are
* initializing in method setAllValues() of domain object.
* But in case of Api Search, default values will not get
* set since setAllValues() method of domainObject will not
* get called. To avoid null pointer exception, we are
* setting the default values same as we were setting in
* setAllValues() method of domainObject.
*/
// ApiSearchUtil.setSpecimenArrayContentDefault(
// specimenArrayContent);
// End:- Change for API Search
specimen = this.getSpecimen(dao, specimenArrayContent);
if (specimen != null)
{
// check whether array & specimen are compatible on the
// basis of class
if (!this.isArrayAndSpecimenCompatibile(arrayType, specimen))
{
throw this.getBizLogicException(null, "spec.not.compatible", "");
}
// set quantity object to null when there is no value..
// [due to Hibernate exception]
if (specimenArrayContent.getInitialQuantity() != null)
{
if (specimenArrayContent.getInitialQuantity() == null)
{
specimenArrayContent.setInitialQuantity(null);
}
}
// if molecular then check available quantity
if (specimen instanceof MolecularSpecimen)
{
if (specimenArrayContent.getInitialQuantity() != null)
{
quantity = specimenArrayContent.getInitialQuantity().doubleValue();
final double tempQuantity = quantity;
SpecimenArrayContent oldArrayContent = null;
// incase if specimenArray is created from
// aliquot page, then skip the Available
// quantity of specimen.
if (!spArrayUIObject.isAliquot())
{
// in case of update, reduce specimen's
// quantity by difference of new
// specimenArrayContent's quantiy
// and old specimenArrayContent's quantiy.
if (oldSpecimenArrayContentCollection != null)
{
oldArrayContent = this.checkExistSpecimenArrayContent(
specimenArrayContent,
oldSpecimenArrayContentCollection);
if (oldArrayContent != null)
{
quantity = quantity
- oldArrayContent.getInitialQuantity()
.doubleValue();
}
}
if (!this.isAvailableQty(specimen, quantity))
{
throw this.getBizLogicException(null,
"quantity.more.then.distri.quantity", tempQuantity
+ ":"
+ specimen.getAvailableQuantity()
.doubleValue() + ":"
+ specimen.getLabel());
}
}
}
else
{
throw this.getBizLogicException(null, "enter.quantity.mol.spec",
specimen.getLabel());
}
}
specimenArrayContent.setSpecimen(specimen);
// Added by jitendra
if (specimenArrayContent.getPositionDimensionOne() == null
|| specimenArrayContent.getPositionDimensionTwo() == null)
{
throw this.getBizLogicException(null, "array.contentPosition.err.msg",
"");
}
updatedSpecArrayContentCollection.add(specimenArrayContent);
}
}
}
// There should be at least one valid specimen in array
if (updatedSpecArrayContentCollection.isEmpty())
{
throw this.getBizLogicException(null, "spec.array.should.contain.atleast.one.spec",
"");
}
// In case of update, if specimen is removed from specimen array,
// then specimen array content's quantity
// should get added into specimen's available quantity.
if (!isInsertOperation)
{
final Iterator<SpecimenArrayContent> itr = oldSpecimenArrayContentCollection.iterator();
while (itr.hasNext())
{
final SpecimenArrayContent oldSpecimenArrayContent = itr
.next();
final SpecimenArrayContent newSpecimenArrayContent = this
.checkExistSpecimenArrayContent(oldSpecimenArrayContent,
specimenArrayContentCollection);
if (newSpecimenArrayContent == null
|| newSpecimenArrayContent.getSpecimen().getLabel() == null
|| newSpecimenArrayContent.getSpecimen().getLabel().equals(""))
{
final Specimen oldSpecimen = this.getSpecimen(dao, oldSpecimenArrayContent);
if (oldSpecimen != null && oldSpecimen instanceof MolecularSpecimen)
{
final Double oldQuantity = oldSpecimenArrayContent.getInitialQuantity();
Double quantity = oldSpecimen.getAvailableQuantity();
final double newQuantity = quantity.doubleValue()
+ oldQuantity.doubleValue();
quantity = newQuantity;
oldSpecimen.setAvailableQuantity(quantity);
dao.update(oldSpecimen);
}
}
}
}
specimenArray.setSpecimenArrayContentCollection(updatedSpecArrayContentCollection);
}
catch (final DAOException daoExp)
{
this.logger.error(daoExp.getMessage(), daoExp);
daoExp.printStackTrace();
throw this
.getBizLogicException(daoExp, daoExp.getErrorKeyName(), daoExp.getMsgValues());
}
}
/**
* @param specimenArray : specimenArray
* @param dao : dao
* @param sessionDataBean : sessionDataBean
* @throws BizLogicException : BizLogicException
*/
private void checkStorageContainerAvailablePos(SpecimenArray specimenArray, DAO dao,
SessionDataBean sessionDataBean) throws BizLogicException
{
if (specimenArray.getLocatedAtPosition() != null
&& specimenArray.getLocatedAtPosition().getParentContainer() != null)
{
this.retriveScId(dao, specimenArray);
this.retriveScName(specimenArray, dao);
final StorageContainer storageContainerObj = (StorageContainer) specimenArray
.getLocatedAtPosition().getParentContainer();
// check for closed Storage Container
this.checkStatus(dao, storageContainerObj, "Storage Container");
final IFactory factory = AbstractFactoryConfig.getInstance().getBizLogicFactory();
final StorageContainerBizLogic storageContainerBizLogic = (StorageContainerBizLogic) factory
.getBizLogic(Constants.STORAGE_CONTAINER_FORM_ID);
final String contId= storageContainerObj.getId().toString();
final String posOne= specimenArray
.getLocatedAtPosition().getPositionDimensionOne().toString();
final String posTwo= specimenArray
.getLocatedAtPosition().getPositionDimensionTwo().toString();
storageContainerBizLogic.checkContainer(dao,StorageContainerUtil.setparameterList
(contId, posOne, posTwo, false),sessionDataBean,null);
specimenArray.getLocatedAtPosition().setParentContainer(storageContainerObj);
specimenArray.getLocatedAtPosition().setOccupiedContainer( specimenArray );//bug 15137
}
}
/**
* @param specimenArray : specimenArray
* @param dao : dao
* @throws BizLogicException : BizLogicException
*/
private void retriveScName(SpecimenArray specimenArray, DAO dao) throws BizLogicException
{
try
{
if (specimenArray.getLocatedAtPosition().getParentContainer().getId() != null)
{
InstanceFactory<StorageContainer> scInstFact = DomainInstanceFactory.getInstanceFactory(StorageContainer.class);
final StorageContainer storageContainerObj = scInstFact.createObject();//new StorageContainer();
storageContainerObj.setId(specimenArray.getLocatedAtPosition().getParentContainer()
.getId());
final String sourceObjectName = StorageContainer.class.getName();
final String[] selectColumnName = {"name"};
// String[] whereColumnName = {"id"};
// //"storageContainer."+edu.wustl
// .common.util.global.Constants.SYSTEM_IDENTIFIER
// String[] whereColumnCondition = {"="};
// Object[] whereColumnValue =
// {specimenArray.getLocatedAtPosition
// ().getParentContainer().getId()};
// String joinCondition = null;
final QueryWhereClause queryWhereClause = new QueryWhereClause(sourceObjectName);
queryWhereClause.addCondition(new EqualClause("id", specimenArray
.getLocatedAtPosition().getParentContainer().getId()));
final List list = dao
.retrieve(sourceObjectName, selectColumnName, queryWhereClause);
if (!list.isEmpty())
{
storageContainerObj.setName((String) list.get(0));
specimenArray.getLocatedAtPosition().setParentContainer(storageContainerObj);
}
}
}
catch (final DAOException daoExp)
{
this.logger.error(daoExp.getMessage(), daoExp);
daoExp.printStackTrace();
throw this
.getBizLogicException(daoExp, daoExp.getErrorKeyName(), daoExp.getMsgValues());
}
}
/**
* @param specimen
* specimen
* @param quantity
* quantity
* @return whether the quantity is available.
*/
private boolean isAvailableQty(Specimen specimen, double quantity)
{
if (specimen instanceof MolecularSpecimen)
{
final MolecularSpecimen molecularSpecimen = (MolecularSpecimen) specimen;
double availabeQty = Double.parseDouble(molecularSpecimen.getAvailableQuantity()
.toString());// molecularSpecimen.
// getAvailableQuantityInMicrogram
// ().doubleValue();
if (quantity > availabeQty)
{
return false;
}
else
{
availabeQty = availabeQty - quantity;
molecularSpecimen.setAvailableQuantity(new Double(availabeQty));// molecularSpecimen
// .
// setAvailableQuantityInMicrogram
// (
// new
// Double
// (
// availabeQty
// )
// )
// ;
}
}
return true;
}
/**
* @param dao
* dao
* @param arrayContent : arrayContent
* @return Specimen
* @throws BizLogicException : BizLogicException
*/
private Specimen getSpecimen(DAO dao, SpecimenArrayContent arrayContent)
throws BizLogicException
{
try
{
// get list of Participant's names
Specimen specimen = arrayContent.getSpecimen();
if (specimen != null)
{
String columnName = null;
String columnValue = null;
if ((specimen.getLabel() != null) && (!specimen.getLabel().trim().equals("")))
{
columnName = Constants.SPECIMEN_LABEL_COLUMN_NAME;
columnValue = specimen.getLabel();
}
else if ((specimen.getBarcode() != null)
&& (!specimen.getBarcode().trim().equals("")))
{
columnName = Constants.SPECIMEN_BARCODE_COLUMN_NAME;
columnValue = specimen.getBarcode();
}
else
{
return null;
}
final String sourceObjectName = Specimen.class.getName();
final String whereColumnName = columnName;
final String whereColumnValue = columnValue;
final List list = dao.retrieve(sourceObjectName, whereColumnName, whereColumnValue);
if (!list.isEmpty())
{
specimen = (Specimen) list.get(0);
/**
* Name : Virender Reviewer: Prafull Calling Domain object
* from Proxy Object
*/
specimen = (Specimen) HibernateMetaData.getProxyObjectImpl(specimen);
final String activityStatus = specimen.getActivityStatus();
// Bug: 2872:- User should not able to add close/disable
// specimen in Specimen Array.
if (!activityStatus.equals(Status.ACTIVITY_STATUS_ACTIVE.toString()))
{
throw this.getBizLogicException(null, "spec.array.spec.invalid",
columnValue);
}
// return specimenCollectionGroup;
}
else
{
throw this.getBizLogicException(null, "spec.array.spec.does.nt.exists",
columnValue);
}
}
return specimen;
}
catch (final DAOException daoExp)
{
this.logger.error(daoExp.getMessage(), daoExp);
daoExp.printStackTrace();
throw this
.getBizLogicException(daoExp, daoExp.getErrorKeyName(), daoExp.getMsgValues());
}
}
/**
* @param arrayType
* array
* @param specimen
* specimen
* @return true if compatible else false | | ----- on the basis of specimen
* class
*/
private boolean isArrayAndSpecimenCompatibile(SpecimenArrayType arrayType, Specimen specimen)
{
boolean compatible = false;
final String arraySpecimenClassName = arrayType.getSpecimenClass();
final String specSpecimenClassName = this.getClassName(specimen);
if (arraySpecimenClassName.equals(specSpecimenClassName))
{
compatible = true;
}
return compatible;
}
/**
* This function returns the actual type of the specimen i.e Cell / Fluid /
* Molecular / Tissue.
* @param specimen : specimen
* @return String
*/
public final String getClassName(Specimen specimen)
{
String className = "";
if (specimen instanceof CellSpecimen)
{
className = Constants.CELL;
}
else if (specimen instanceof MolecularSpecimen)
{
className = Constants.MOLECULAR;
}
else if (specimen instanceof FluidSpecimen)
{
className = Constants.FLUID;
}
else if (specimen instanceof TissueSpecimen)
{
className = Constants.TISSUE;
}
return className;
}
/**
* Overriding the parent class's method to validate the enumerated attribute
* values
* @param obj : obj
* @param dao :dao
* @param operation : operation
* @return boolean
* @throws BizLogicException : BizLogicException
*/
@Override
protected boolean validate(Object obj, DAO dao, String operation) throws BizLogicException
{
try
{
final SpecimenArray specimenArray = (SpecimenArray) obj;
/**
* Start: Change for API Search --- Jitendra 06/10/2006 In Case of
* Api Search, previoulsy it was failing since there was default
* class level initialization on domain object. For example in User
* object, it was initialized as protected String lastName=""; So we
* removed default class level initialization on domain object and
* are initializing in method setAllValues() of domain object. But
* in case of Api Search, default values will not get set since
* setAllValues() method of domainObject will not get called. To
* avoid null pointer exception, we are setting the default values
* same as we were setting in setAllValues() method of domainObject.
*/
ApiSearchUtil.setSpecimenArrayDefault(specimenArray);
// End:- Change for API Search
// Added by Ashish
if (specimenArray == null)
{
throw this.getBizLogicException(null, "domain.object.null.err.msg",
"Specimen Array");
}
final Validator validator = new Validator();
if (specimenArray.getActivityStatus() == null)
{
specimenArray.setActivityStatus(Status.ACTIVITY_STATUS_ACTIVE.toString());
}
String message = "";
if (specimenArray.getSpecimenArrayType() == null
|| specimenArray.getSpecimenArrayType().getId() == null
|| specimenArray.getSpecimenArrayType().getId().longValue() == -1)
{
message = ApplicationProperties.getValue("array.arrayType");
throw this.getBizLogicException(null, "errors.item.required", message);
}
// fetch array type to check specimen class
final Object object = dao.retrieveById(SpecimenArrayType.class.getName(), specimenArray
.getSpecimenArrayType().getId());
SpecimenArrayType specimenArrayType = null;
if (object != null)
{
specimenArrayType = (SpecimenArrayType) object;
}
else
{
message = ApplicationProperties.getValue("array.arrayType");
throw this.getBizLogicException(null, "errors.invalid", message);
}
// validate name of array
if (Validator.isEmpty(specimenArray.getName()))
{
message = ApplicationProperties.getValue("array.arrayLabel");
throw this.getBizLogicException(null, "errors.item.required", message);
}
// validate storage position
/*
* if (specimenArray.getPositionDimensionOne() == null ||
* specimenArray.getPositionDimensionTwo() == null ||
* !validator.isNumeric
* (String.valueOf(specimenArray.getPositionDimensionOne()), 1) ||
* !validator
* .isNumeric(String.valueOf(specimenArray.getPositionDimensionTwo
* ()), 1) ||(!validator.isNumeric(String.valueOf(specimenArray.
* getStorageContainer().getId()), 1) &&
* validator.isEmpty(specimenArray
* .getStorageContainer().getName())))
*/
if(specimenArray.getLocatedAtPosition()!=null && specimenArray.getLocatedAtPosition().getParentContainer()!=null)
{
if ((!validator.isNumeric(String.valueOf(specimenArray.getLocatedAtPosition()
.getParentContainer().getId()), 1) && Validator.isEmpty(specimenArray
.getLocatedAtPosition().getParentContainer().getName())))
{
message = ApplicationProperties.getValue("array.positionInStorageContainer");
throw this.getBizLogicException(null, "errors.item.format", message);
}
}
if (specimenArray.getLocatedAtPosition() != null
&& specimenArray.getLocatedAtPosition().getParentContainer() != null)
{
this.retriveScId(dao, specimenArray);
}
Integer xPos = null;
Integer yPos = null;
if (specimenArray.getLocatedAtPosition() != null)
{
xPos = specimenArray.getLocatedAtPosition().getPositionDimensionOne();
yPos = specimenArray.getLocatedAtPosition().getPositionDimensionTwo();
}
/**
* Following code is added to set the x and y dimension in case only
* storage container is given and x and y positions are not given
*/
if (xPos == null || yPos == null)
{
{
if (specimenArray.getLocatedAtPosition().getParentContainer() != null)
{
final Position position = StorageContainerUtil.getFirstAvailablePositionInContainer(
specimenArray.getLocatedAtPosition().getParentContainer(), dao);
if (position != null)
{
final ContainerPosition locatedAtPos = specimenArray
.getLocatedAtPosition();
locatedAtPos.setPositionDimensionOne(position.getXPos());
locatedAtPos.setPositionDimensionTwo(position.getYPos());
}
else
{
throw this.getBizLogicException(null, "storage.specified.full", "");
}
xPos = specimenArray.getLocatedAtPosition().getPositionDimensionOne();
yPos = specimenArray.getLocatedAtPosition().getPositionDimensionTwo();
}
}
}
if (xPos == null || yPos == null || xPos.intValue() < 0 || yPos.intValue() < 0)
{
throw this.getBizLogicException(null, "errors.item.format", ApplicationProperties
.getValue("array.positionInStorageContainer"));
}
if (specimenArray.getCreatedBy() == null
|| specimenArray.getCreatedBy().getId() == null
|| !validator.isValidOption(String
.valueOf(specimenArray.getCreatedBy().getId())))
{
message = ApplicationProperties.getValue("array.user");
throw this.getBizLogicException(null, "errors.item.required", message);
}
// validate capacity
if (specimenArray.getCapacity() == null
|| specimenArray.getCapacity().getOneDimensionCapacity() == null
|| specimenArray.getCapacity().getTwoDimensionCapacity() == null)
{
throw this.getBizLogicException(null, "array.capacity.err.msg", "");
}
final List<NameValueBean> specimenClassList = CDEManager.getCDEManager().getPermissibleValueList(
Constants.CDE_NAME_SPECIMEN_CLASS, null);
final String specimenClass = specimenArrayType.getSpecimenClass();
if (!this.isValidClassName(specimenClass))
{
throw this.getBizLogicException(null, "protocol.class.errMsg", "");
}
if (!Validator.isEnumeratedValue(specimenClassList, specimenClass))
{
throw this.getBizLogicException(null, "protocol.class.errMsg", "");
}
final Collection<String> specimenTypes = specimenArrayType.getSpecimenTypeCollection();
if (specimenTypes == null || specimenTypes.isEmpty())
{
throw this.getBizLogicException(null, "protocol.type.errMsg", "");
}
else
{
final Iterator<String> itr = specimenTypes.iterator();
while (itr.hasNext())
{
final String specimenType = itr.next();
if (!Validator.isEnumeratedValue(AppUtility.getSpecimenTypes(specimenClass),
specimenType))
{
throw this.getBizLogicException(null, "protocol.type.errMsg", "");
}
}
}
/*
* Bug no. 7810 Bug Description : Incompatible specimen gets added
* to the specimen array
*/
final Collection<SpecimenArrayContent> specimenArrayContentCollection = specimenArray
.getSpecimenArrayContentCollection();
if (!specimenArrayContentCollection.isEmpty())
{
final Iterator<SpecimenArrayContent> iterator = specimenArrayContentCollection.iterator();
while (iterator.hasNext())
{
final SpecimenArrayContent tempSpecimenArrayContent = iterator
.next();
final Specimen tempSpecimen = this.getSpecimen(dao, tempSpecimenArrayContent);
if (specimenClass != null && tempSpecimen != null
&& !specimenClass.equalsIgnoreCase(tempSpecimen.getSpecimenClass()))
{
message = this.getMessage(tempSpecimenArrayContent);
throw this.getBizLogicException(null, "class.name.different", message);
}
if (specimenTypes != null && !specimenTypes.isEmpty() && tempSpecimen != null)
{
if (!specimenTypes.contains(tempSpecimen.getSpecimenType()))
{
message = this.getMessage(tempSpecimenArrayContent);
throw this.getBizLogicException(null, "type.different", message);
}
}
}
}
else
{
throw this.getBizLogicException(null, "spec.array.null", "");
}
return true;
}
catch (final DAOException daoExp)
{
this.logger.error(daoExp.getMessage(), daoExp);
daoExp.printStackTrace();
throw this
.getBizLogicException(daoExp, daoExp.getErrorKeyName(), daoExp.getMsgValues());
}
}
/**
* @param dao : dao
* @param specimenArray : specimenArray
* @throws BizLogicException : BizLogicException
*/
private void retriveScId(DAO dao, SpecimenArray specimenArray) throws BizLogicException
{
try
{
String message = null;
if (specimenArray.getLocatedAtPosition() != null
&& specimenArray.getLocatedAtPosition().getParentContainer() != null
&& specimenArray.getLocatedAtPosition().getParentContainer().getName() != null)
{
final StorageContainer storageContainerObj = (StorageContainer) HibernateMetaData
.getProxyObjectImpl(specimenArray.getLocatedAtPosition()
.getParentContainer());
final String sourceObjectName = StorageContainer.class.getName();
final String[] selectColumnName = {"id"};
// String[] whereColumnName = {"name"};
// String[] whereColumnCondition = {"="};
// Object[] whereColumnValue =
// {specimenArray.getLocatedAtPosition
// ().getParentContainer().getName()};
// String joinCondition = null;
final QueryWhereClause queryWhereClause = new QueryWhereClause(sourceObjectName);
queryWhereClause.addCondition(new EqualClause("name", specimenArray
.getLocatedAtPosition().getParentContainer().getName()));
final List list = dao
.retrieve(sourceObjectName, selectColumnName, queryWhereClause);
if (!list.isEmpty())
{
storageContainerObj.setId((Long) list.get(0));
specimenArray.getLocatedAtPosition().setParentContainer(storageContainerObj);
}
else
{
message = ApplicationProperties.getValue("array.positionInStorageContainer");
throw this.getBizLogicException(null, "errors.invalid", message);
}
}
}
catch (final DAOException daoExp)
{
this.logger.error(daoExp.getMessage(), daoExp);
daoExp.printStackTrace();
throw this
.getBizLogicException(daoExp, daoExp.getErrorKeyName(), daoExp.getMsgValues());
}
}
/**
* @param className : className
* @return boolean
*/
private boolean isValidClassName(String className)
{
if ((className != null) && (className.equalsIgnoreCase(Constants.CELL))
|| (className.equalsIgnoreCase(Constants.MOLECULAR))
|| (className.equalsIgnoreCase(Constants.FLUID))
|| (className.equalsIgnoreCase(Constants.TISSUE)))
{
return true;
}
return false;
}
/**
* get Unique index to be appended to Name
* @return unique no. to be appended to array name
* @throws BizLogicException : BizLogicException
*/
public int getUniqueIndexForName() throws BizLogicException
{
try
{
final String sourceObjectName = "CATISSUE_CONTAINER";
final String[] selectColumnName = {"max(IDENTIFIER) as MAX_IDENTIFIER"};
return AppUtility.getNextUniqueNo(sourceObjectName, selectColumnName);
}
catch (final ApplicationException exp)
{
this.logger.error(exp.getMessage(), exp);
exp.printStackTrace();
throw this.getBizLogicException(exp, exp.getErrorKeyName(), exp.getMsgValues());
}
}
/**
* @param tempSpecimenArrayContent : tempSpecimenArrayContent
* @return the message to be displayed when exception occurs
*/
public String getMessage(SpecimenArrayContent tempSpecimenArrayContent)
{
final Specimen specimen = tempSpecimenArrayContent.getSpecimen();
String msg = " ";
if (specimen != null)
{
if ((specimen.getLabel() != null) && (!specimen.getLabel().trim().equals("")))
{
msg = "label " + specimen.getLabel();
}
else if ((specimen.getBarcode() != null) && (!specimen.getBarcode().trim().equals("")))
{
msg = "barcode " + specimen.getBarcode();
}
}
return msg;
}
/**
* @param orderItemId : orderItemId
* @return NewSpecimenArrayOrderItem
* @throws BizLogicException : BizLogicException
*/
public NewSpecimenArrayOrderItem getNewSpecimenArrayOrderItem(Long orderItemId)
throws BizLogicException
{
DAO dao = null;
NewSpecimenArrayOrderItem newSpecimenArrayOrderItem = null;
try
{
dao = this.openDAOSession(null);
newSpecimenArrayOrderItem = (NewSpecimenArrayOrderItem) dao.retrieveById(
NewSpecimenArrayOrderItem.class.getName(), orderItemId);
}
catch (final DAOException daoExp)
{
this.logger.error(daoExp.getMessage(), daoExp);
daoExp.printStackTrace();
}
finally
{
this.closeDAOSession(dao);
}
return newSpecimenArrayOrderItem;
}
// END
/**
* Called from DefaultBizLogic to get ObjectId for authorization check
* (non-Javadoc)
* @param dao : dao
* @param domainObject : domainObject
* @return String
* @see edu.wustl.common.bizlogic.DefaultBizLogic#getObjectId(edu.wustl.common.dao.DAO,
* java.lang.Object)
*/
@Override
public String getObjectId(DAO dao, Object domainObject)
{
SpecimenArray specimenArray = null;
Specimen specimen = null;
final StringBuffer stringBuffer = new StringBuffer();
try
{
stringBuffer.append(Constants.COLLECTION_PROTOCOL_CLASS_NAME);
Collection<SpecimenArrayContent> specimenArrayContentCollection = null;
if (domainObject instanceof SpecimenArray)
{
specimenArray = (SpecimenArray) domainObject;
}
if (specimenArray.getSpecimenArrayContentCollection().isEmpty())
{
specimenArray = (SpecimenArray) dao.retrieveById(SpecimenArray.class.getName(),
specimenArray.getId());
specimenArrayContentCollection = specimenArray.getSpecimenArrayContentCollection();
}
else
{
specimenArrayContentCollection = specimenArray.getSpecimenArrayContentCollection();
}
for (final SpecimenArrayContent specimenArrayContent : specimenArrayContentCollection)
{
specimen = this.getSpecimen(dao, specimenArrayContent);
if (specimen != null)
{
final SpecimenCollectionGroup scg = specimen.getSpecimenCollectionGroup();
final CollectionProtocolRegistration cpr = scg
.getCollectionProtocolRegistration();
stringBuffer.append(Constants.UNDERSCORE).append(cpr.getCollectionProtocol().getId());
}
}
}
catch (final Exception e)
{
this.logger.error(e.getMessage(), e);
e.printStackTrace();
}
return stringBuffer.toString();
}
/**
* To get PrivilegeName for authorization check from
* 'PermissionMapDetails.xml' (non-Javadoc)
* @param domainObject : domainObject
* @return String
* @see edu.wustl.common.bizlogic.DefaultBizLogic#getPrivilegeName(java.lang.Object)
*/
@Override
protected String getPrivilegeKey(Object domainObject)
{
return edu.wustl.catissuecore.util.global.Constants.ADD_EDIT_SPECIMEN_ARRAY;
}
@Override
public boolean isAuthorized(DAO dao, Object domainObject, SessionDataBean sessionDataBean)
throws BizLogicException
{
SpecimenArrayUIObject spArrayUIObject=new SpecimenArrayUIObject();
return isAuthorized(dao, domainObject, sessionDataBean, spArrayUIObject);
}
/**
* (non-Javadoc)
* @param dao : dao
* @param domainObject : domainObject
* @param sessionDataBean : sessionDataBean
* @throws BizLogicException : BizLogicException
* @return boolean
* @see edu.wustl.common.bizlogic.DefaultBizLogic#isAuthorized(edu.wustl.common.dao.DAO,
* java.lang.Object, edu.wustl.common.beans.SessionDataBean)
*/
@Override
public boolean isAuthorized(DAO dao, Object domainObject, SessionDataBean sessionDataBean,Object uiObject)
throws BizLogicException
{
boolean isAuthorized = false;
String protectionElementName = null;
SpecimenArray specimenArray = null;
Specimen specimen = null;
SpecimenPosition specimenPosition = null;
try
{
if (sessionDataBean != null && sessionDataBean.isAdmin())
{
return true;
}
// Get the base object id against which authorization will take
// place
protectionElementName = this.getObjectId(dao, domainObject);
Site site = null;
StorageContainer storContainer = null;
// Handle for SERIAL CHECKS, whether user has access to source site
// or not
if (domainObject instanceof SpecimenArray)
{
specimenArray = (SpecimenArray) domainObject;
}
final Collection<SpecimenArrayContent> specimenArrayContentCollection = specimenArray
.getSpecimenArrayContentCollection();
for (final SpecimenArrayContent specimenArrayContent : specimenArrayContentCollection)
{
try
{
specimen = this.getSpecimen(dao, specimenArrayContent);
if (specimen == null)
{
continue;
}
if (specimen.getSpecimenPosition() != null)
{
storContainer = specimen.getSpecimenPosition().getStorageContainer();
}
if (specimen.getSpecimenPosition() != null
&& specimen.getSpecimenPosition().getStorageContainer().getSite() == null)
{
storContainer = (StorageContainer) dao.retrieveById(StorageContainer.class.getName(),
specimen.getSpecimenPosition().getStorageContainer().getId());
}
specimenPosition = specimen.getSpecimenPosition();
if (specimenPosition != null) // Specimen is NOT Virtually
// Located
{
site = storContainer.getSite();
final Set<Long> siteIdSet = new UserBizLogic()
.getRelatedSiteIds(sessionDataBean.getUserId());
if (!siteIdSet.contains(site.getId()))
{
// bug 11611 and 11659
throw AppUtility.getUserNotAuthorizedException(Constants.Association,
site.getObjectId(), domainObject.getClass().getSimpleName());
}
}
}
catch (final DAOException e)
{
this.logger.error(e.getMessage(), e);
e.printStackTrace();
}
}
// Get the required privilege name which we would like to check for
// the logged in user.
final String privilegeName = this.getPrivilegeName(domainObject);
final PrivilegeCache privilegeCache = PrivilegeManager.getInstance().getPrivilegeCache(
sessionDataBean.getUserName());
// Checking whether the logged in user has the required privilege on
// the given protection element
final String[] prArray = protectionElementName.split("_");
final String baseObjectId = prArray[0];
String objId = "";
for (int i = 1; i < prArray.length; i++)
{
objId = baseObjectId + "_" + prArray[i];
isAuthorized = privilegeCache.hasPrivilege(objId, privilegeName);
if (!isAuthorized)
{
break;
}
}
if (isAuthorized)
{
return isAuthorized;
}
else
// Check for ALL CURRENT & FUTURE CASE
{
isAuthorized = AppUtility.checkOnCurrentAndFuture(sessionDataBean,
protectionElementName, privilegeName);
}
if (!isAuthorized)
{
// throw Utility.getUserNotAuthorizedException(privilegeName,
// protectionElementName);
throw AppUtility.getUserNotAuthorizedException(privilegeName,
protectionElementName, domainObject.getClass().getSimpleName());
}
}
catch (final SMException e)
{
this.logger.error(e.getMessage(), e);
e.printStackTrace();
throw this.getBizLogicException(e, e.getErrorKeyName(), e.getMsgValues());
}
catch (final ApplicationException e)
{
this.logger.error(e.getMessage(), e);
e.printStackTrace();
throw new BizLogicException(e.getErrorKey(), e, e.getMsgValues());
}
return isAuthorized;
}
/**
* To check weather the Container to display can holds the given
* specimenArrayTypeId.
* @param specimenArrayTypeId
* The Specimen Array Type Id.
* @param storageContainer
* The StorageContainer reference to be displayed on the page.
* @return true if the given container can hold the specimenArrayType.
* @throws BizLogicException throws BizLogicException
*/
public boolean canHoldSpecimenArrayType(int specimenArrayTypeId,
StorageContainer storageContainer) throws BizLogicException
{
boolean canHold = true;
final Collection specimenArrayTypes = (Collection) this.retrieveAttribute(
StorageContainer.class.getName(), storageContainer.getId(),
"elements(holdsSpecimenArrayTypeCollection)");
final Iterator itr = specimenArrayTypes.iterator();
canHold = false;
while (itr.hasNext())
{
final SpecimenArrayType specimenarrayType = (SpecimenArrayType) itr.next();
final long arraytypeId = specimenarrayType.getId().longValue();
if (arraytypeId == Constants.ALL_SPECIMEN_ARRAY_TYPE_ID
|| arraytypeId == specimenArrayTypeId)
{
return true;
}
}
return canHold;
}
}
| |
package twg2.cli;
import java.io.BufferedReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/** A set of {@link ParameterData} instances to parse against a give set of input values
*
* @param <T> the parameter's name type
*
* @author TeamworkGuy2
* @since 2014-11-22
*/
public final class ParameterSet<T extends CharSequence> {
private static int MAX_PARSE_ATTEMPTS = 10;
/** a unique set of all the parameters in this parameter set */
private List<ParameterData<T, ? extends Object>> parameters;
/** a map of parameter names and aliases to their parameters, the same parameter may appear multiple times associated with different names/aliases */
private Map<T, ParameterData<T, ? extends Object>> parameterNameMap;
/** the output stream to print the help message to when it is parsed by {@link ParameterSet#parse()} */
private Appendable outStream;
public ParameterSet(List<? extends ParameterData<T, ? extends Object>> parameters) {
this(parameters, false, null, null, (String[])null);
}
/** Create a parameter set from a list of parameters
* @param parameters the list of parameters to store in this parameter set
*/
public ParameterSet(List<? extends ParameterData<T, ? extends Object>> parameters,
boolean buildHelpParam, String helpParamName, String helpMsg, String... helpParamAliases) {
this.parameters = new ArrayList<>();
this.parameters.addAll(parameters);
if(buildHelpParam) {
this.parameters.add(createHelpParameter(helpParamName, helpMsg, helpParamAliases));
}
this.parameterNameMap = new HashMap<>();
for(ParameterData<T, ?> parameter : this.parameters) {
this.parameterNameMap.put(parameter.getPrimaryName(), parameter);
for(T name : parameter.getAliases()) {
this.parameterNameMap.put(name, parameter);
}
}
}
/** Parse an array of inputs and invoke {@link ParameterData#parse(Object[], int, int)} on
* the parameters in this parameter set that match any of the inputs.
* @param inputs the array of inputs to parse
* @param off the offset into {@code inputs} at which to start parsing
* @param output the output stream to write information/help messages to
* @return a map of all of this parameter set's parameters mapped to true if the
* parameter was found in the {@code inputs}, false if a parameter was not in the {@code inputs}
*/
public Map<ParameterData<T, Object>, Boolean> parse(T[] inputs, int off, Appendable output) {
outStream = output;
ParameterData<T, Object> param = null;
int paramStart = -1;
for(int i = off, size = inputs.length; i < size; i++) {
@SuppressWarnings("unchecked")
ParameterData<T, Object> paramTemp = (ParameterData<T, Object>)parameterNameMap.get(inputs[i]);
if(paramTemp != null) {
param = paramTemp;
paramStart = i;
if(paramStart+1 >= inputs.length) {
param.parse(inputs, paramStart, 1);
}
break;
}
}
Map<ParameterData<T, Object>, Boolean> parametersCompleted = new LinkedHashMap<>();
for(ParameterData<T, ? extends Object> parameter : parameters) {
@SuppressWarnings("unchecked")
ParameterData<T, Object> prmtr = (ParameterData<T, Object>)parameter;
parametersCompleted.put(prmtr, false);
}
int nextParamStart = -1;
for(int i = paramStart + 1, size = inputs.length; i < size; i++) {
@SuppressWarnings("unchecked")
ParameterData<T, Object> paramTemp = (ParameterData<T, Object>)parameterNameMap.get(inputs[i]);
if(paramTemp != null || (i == size - 1 && param != null)) {
nextParamStart = i + (paramTemp == null && i == size - 1 ? 1 : 0);
param.parse(inputs, paramStart, nextParamStart - paramStart);
parametersCompleted.put(param, true);
paramStart = nextParamStart;
param = paramTemp;
if(param != null && i == size - 1) {
param.parse(inputs, paramStart, 1);
parametersCompleted.put(param, true);
}
}
}
return parametersCompleted;
}
/** Parse an array of inputs and invoke {@link ParameterSet#parseInteractive(CharSequence[], int, BufferedReader, Appendable, String)} on
* the parameters in this parameter set that match any of the inputs.
* @see #parseInteractive(CharSequence[], int, BufferedReader, Appendable, String)
*/
public void parseInteractive(T[] inputs, BufferedReader input, Appendable output,
String paramHelpIdentifier) {
parseInteractive(inputs, 0, input, output, paramHelpIdentifier);
}
/** Parse an array of inputs and invoke {@link ParameterSet#getParameterInteractive(ParameterData, BufferedReader, Appendable, String)} on
* the parameters in this parameter set that match any of the inputs.
* This differs from {@link #parse(CharSequence[], int, Appendable)} because missing parameters
* are requested using the specified output stream and parsed from the specified input stream
* @param inputs the array of inputs to parse
* @param off the offset into {@code inputs} at which to start parsing
* @param input the input stream to read user input from
* @param output the output stream to print user information and prompts to
* @param paramHelpIdentifier the name of the command that causes help information
* to be printed for a parameter
*/
public void parseInteractive(T[] inputs, int off, BufferedReader input, Appendable output,
String paramHelpIdentifier) {
Map<ParameterData<T, Object>, Boolean> parametersCompleted = parse(inputs, off, output);
outStream = output;
for(Map.Entry<ParameterData<T, Object>, Boolean> paramComplete : parametersCompleted.entrySet()) {
if(paramComplete.getValue() == false && paramComplete.getKey().isRequired()) {
@SuppressWarnings("unchecked")
ParameterData<String, ?> paramData = (ParameterData<String, ?>) paramComplete.getKey();
ParameterParserResult parseRes = getParameterInteractive(paramData, input, output, paramHelpIdentifier);
int i = 0;
while(parseRes.isError() && parseRes.getParseError().getParseErrorType() == ParameterParserExceptionType.INVALID_PARSED_INPUT && i < ParameterSet.MAX_PARSE_ATTEMPTS) {
try {
output.append(parseRes.getParseError().getMessage())
.append('\n');
} catch (IOException e) {
throw new RuntimeException("error writing parameter info and request to output stream", e);
}
parseRes = getParameterInteractive(paramData, input, output, paramHelpIdentifier);
i++;
}
if(parseRes.isError()) {
throw new RuntimeException(parseRes.getParseError().getMessage(), parseRes.getParseError().getCause());
}
}
}
outStream = null;
}
/** Create a help parameter which prints this parameter set's help information to {@code outStream}
* when its setter method is called.
* @param helpParamName the name of the help parameter
* @param helpMsg the help message to print
* @param helpParamAliases additional alias names of the help parameter
* @return the created help parameter
*/
private ParameterData<T, ?> createHelpParameter(String helpParamName, String helpMsg,
String... helpParamAliases) {
@SuppressWarnings("unchecked")
ParameterData<T, Boolean> helpParam = (ParameterData<T, Boolean>)ParameterBuilder.newFlag()
.setNameAndAliases(helpParamName, helpParamAliases)
.setSetter((flag) -> {
if(flag == false) {
return;
}
try {
outStream.append(helpMsg);
} catch(Exception e) {
throw new RuntimeException("writing parameter help message to output stream", e);
}
})
.setHelpMessage("enter '" + helpParamName + "' to receive information about this program: ")
.build();
return helpParam;
}
private static final ParameterParserResult getParameterInteractive(ParameterData<String, ?> param,
BufferedReader input, Appendable output, String paramHelpIdentifier) {
try {
output.append(param.getRequestParameterMessage());
} catch (IOException e) {
throw new RuntimeException("error writing parameter request to output stream", e);
}
ParameterParserResult parseRes = null;
try {
String line = input.readLine();
while(paramHelpIdentifier != null && paramHelpIdentifier.equals(line)) {
try {
output.append(parameterInfo(param))
.append('\n')
.append(param.getRequestParameterMessage());
} catch (IOException e) {
throw new RuntimeException("error writing parameter info and request to output stream", e);
}
line = input.readLine();
}
List<String> inputs = new ArrayList<String>(2);
inputs.add(param.getPrimaryName());
// if the parsing is interactive (one line of input per parameter, use the entire line for non-array parameters)
// this saves users having to quote every string parameter they enter
if(param.isParameterArrayType()) {
inputs = ParameterParser.parseParameters(line, '"', true, '\\', inputs);
}
else {
inputs.add(line);
}
String[] inputsAry = inputs.toArray(new String[inputs.size()]);
parseRes = param.parse(inputsAry, 0, inputs.size());
} catch (IOException e) {
throw new RuntimeException("error reading user parameter from input stream", e);
}
return parseRes;
}
/** Create a new parameter set with the given parameters and generate a help parameter
* @param parameters the list of parameters
* @param generateHelpParam true to generate a help parameter
* @param helpParamName the name of the help parameter
* @param helpParamAliases aliases for the name of the help parameter
* @return a {@link ParameterSet} that contains {@code parameters} and a help parameter containing
* information about all of the parameters
*/
@SafeVarargs
public static final ParameterSet<String> newParameterSet(List<? extends ParameterData<String, ? extends Object>> parameters,
boolean generateHelpParam, String helpParamName, String... helpParamAliases) {
List<ParameterData<String, ? extends Object>> paramsCopy = new ArrayList<>(parameters);
ParameterSet<String> paramSet = null;
if(generateHelpParam == true && helpParamName != null) {
List<String> aliases = helpParamAliases != null ? Arrays.asList(helpParamAliases) : null;
StringBuilder sb = new StringBuilder("\t'" + helpParamName + "'" + orParamAliasesToString(",", aliases) +
" - displays this help message\n");
for(ParameterData<String, ?> param : parameters) {
sb.append("\t" + parameterInfo(param) + "\n");
}
sb.append("\n");
final String helpMsg = sb.toString();
paramSet = new ParameterSet<String>(parameters, true, helpParamName, helpMsg, helpParamAliases);
}
else {
paramSet = new ParameterSet<>(paramsCopy);
}
return paramSet;
}
private static final String parameterInfo(ParameterData<String, ?> param) {
return parameterTypeToString(param) + " " + (param.isRequired() ? "(required) - " : "- ") +
param.getHelpMessage();
}
/** Converts a list of aliases to a string in the format:
* {@code "prefix 'alias_1', 'alias_2', ..., 'alias_n'"}
* @param prefix the prefix to add to the beginning of the generated string
* @param aliases the list of aliases
* @return the prefix and list of aliases or {@code ""} if {@code aliases} is null or empty
*/
private static final String orParamAliasesToString(String prefix, List<String> aliases) {
if(aliases != null && !aliases.isEmpty()) {
StringBuilder sb = new StringBuilder(prefix != null ? prefix + " " : "");
int size = aliases.size();
for(int i = 0, count = size - 1; i < count; i++) {
sb.append("'" + aliases.get(i) + "', ");
}
sb.append("'" + aliases.get(size - 1) + "'");
return sb.toString();
}
return "";
}
/** Create a string representation of a parameter in the format:
* {@code 'parameter_name type', 'alias_1', ..., 'alias_n' [#IF_ENUM (one of: [enum_const_1, ..., enum_const_n])]}
* @param param the parameter to generate a string representation of
* @return the string representation of the parameter
*/
private static final <T> String parameterTypeToString(ParameterData<String, T> param) {
String typeName = param.getParameterType() != ParameterType.FLAG ?
param.getParameterType().name().toLowerCase() : "[false]";
boolean isArray = param.isParameterArrayType();
boolean isEnum = param.getParameterType() == ParameterType.ENUM;
return "'" + param.getPrimaryName() + " " + (isArray ? typeName + " [" + typeName + " ...]" : typeName) +
"'" + orParamAliasesToString(",", param.getAliases()) +
(isEnum ? " (one of: " + param.getEnumMap().keySet().toString() + ")" : "");
}
}
| |
/*
* Created: Apr 15, 2013
*/
package org.xerial.snappy;
import static java.lang.Math.min;
import static org.xerial.snappy.SnappyFramed.COMPRESSED_DATA_FLAG;
import static org.xerial.snappy.SnappyFramed.HEADER_BYTES;
import static org.xerial.snappy.SnappyFramed.STREAM_IDENTIFIER_FLAG;
import static org.xerial.snappy.SnappyFramed.UNCOMPRESSED_DATA_FLAG;
import static org.xerial.snappy.SnappyFramed.readBytes;
import static org.xerial.snappy.SnappyFramedOutputStream.MAX_BLOCK_SIZE;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.ReadableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.util.Arrays;
import java.util.zip.Checksum;
import org.xerial.snappy.pool.BufferPool;
import org.xerial.snappy.pool.DefaultPoolFactory;
/**
* Implements the <a
* href="http://snappy.googlecode.com/svn/trunk/framing_format.txt"
* >x-snappy-framed</a> as an {@link InputStream} and
* {@link ReadableByteChannel}.
*
* @author Brett Okken
* @since 1.1.0
*/
public final class SnappyFramedInputStream
extends InputStream
implements
ReadableByteChannel
{
private final Checksum crc32 = SnappyFramed.getCRC32C();
private final ReadableByteChannel rbc;
private final ByteBuffer frameHeader;
private final boolean verifyChecksums;
private final BufferPool bufferPool;
/**
* A single frame read from the underlying {@link InputStream}.
*/
private ByteBuffer input;
/**
* The decompressed data from {@link #input}.
*/
private ByteBuffer uncompressedDirect;
/**
* Indicates if this instance has been closed.
*/
private boolean closed;
/**
* Indicates if we have reached the EOF on {@link #input}.
*/
private boolean eof;
/**
* The position in {@link #input} buffer to read to.
*/
private int valid;
/**
* The next position to read from {@link #buffer}.
*/
private int position;
/**
* Buffer contains a copy of the uncompressed data for the block.
*/
private byte[] buffer;
/**
* Creates a Snappy input stream to read data from the specified underlying
* input stream.
* <p>
* Uses {@link DefaultPoolFactory} to obtain {@link BufferPool} for buffers.
* </p>
*
* @param in the underlying input stream. Must not be {@code null}.
* @throws IOException
*/
public SnappyFramedInputStream(InputStream in)
throws IOException
{
this(in, true, DefaultPoolFactory.getDefaultPool());
}
/**
* Creates a Snappy input stream to read data from the specified underlying
* input stream.
*
* @param in the underlying input stream. Must not be {@code null}.
* @param bufferPool Used to obtain buffer instances. Must not be {@code null}.
* @throws IOException
*/
public SnappyFramedInputStream(InputStream in, BufferPool bufferPool)
throws IOException
{
this(in, true, bufferPool);
}
/**
* Creates a Snappy input stream to read data from the specified underlying
* input stream.
* <p>
* Uses {@link DefaultPoolFactory} to obtain {@link BufferPool} for buffers.
* </p>
*
* @param in the underlying input stream. Must not be {@code null}.
* @param verifyChecksums if true, checksums in input stream will be verified
* @throws IOException
*/
public SnappyFramedInputStream(InputStream in, boolean verifyChecksums)
throws IOException
{
this(in, verifyChecksums, DefaultPoolFactory.getDefaultPool());
}
/**
* Creates a Snappy input stream to read data from the specified underlying
* input stream.
*
* @param in the underlying input stream. Must not be {@code null}.
* @param verifyChecksums if true, checksums in input stream will be verified
* @param bufferPool Used to obtain buffer instances. Must not be {@code null}.
* @throws IOException
*/
public SnappyFramedInputStream(InputStream in, boolean verifyChecksums,
BufferPool bufferPool)
throws IOException
{
this(Channels.newChannel(in), verifyChecksums, bufferPool);
}
/**
* Creates a Snappy input stream to read data from the specified underlying
* channel.
*
* @param in the underlying readable channel. Must not be {@code null}.
* @param bufferPool Used to obtain buffer instances. Must not be {@code null}.
* @throws IOException
*/
public SnappyFramedInputStream(ReadableByteChannel in, BufferPool bufferPool)
throws IOException
{
this(in, true, bufferPool);
}
/**
* Creates a Snappy input stream to read data from the specified underlying
* channel.
* <p>
* Uses {@link DefaultPoolFactory} to obtain {@link BufferPool} for buffers.
* </p>
*
* @param in the underlying readable channel. Must not be {@code null}.
* @throws IOException
*/
public SnappyFramedInputStream(ReadableByteChannel in)
throws IOException
{
this(in, true);
}
/**
* Creates a Snappy input stream to read data from the specified underlying
* channel.
* <p>
* Uses {@link DefaultPoolFactory} to obtain {@link BufferPool} for buffers.
* </p>
*
* @param in the underlying readable channel. Must not be {@code null}.
* @param verifyChecksums if true, checksums in input stream will be verified
* @throws IOException
*/
public SnappyFramedInputStream(ReadableByteChannel in,
boolean verifyChecksums)
throws IOException
{
this(in, verifyChecksums, DefaultPoolFactory.getDefaultPool());
}
/**
* Creates a Snappy input stream to read data from the specified underlying
* channel.
*
* @param in the underlying readable channel. Must not be {@code null}.
* @param verifyChecksums if true, checksums in input stream will be verified
* @param bufferPool Used to obtain buffer instances. Must not be {@code null}.
* @throws IOException
*/
public SnappyFramedInputStream(ReadableByteChannel in,
boolean verifyChecksums, BufferPool bufferPool)
throws IOException
{
if (in == null) {
throw new NullPointerException("in is null");
}
if (bufferPool == null) {
throw new NullPointerException("bufferPool is null");
}
this.bufferPool = bufferPool;
this.rbc = in;
this.verifyChecksums = verifyChecksums;
allocateBuffersBasedOnSize(MAX_BLOCK_SIZE + 5);
this.frameHeader = ByteBuffer.allocate(4);
// stream must begin with stream header
final byte[] expectedHeader = HEADER_BYTES;
final byte[] actualHeader = new byte[expectedHeader.length];
final ByteBuffer actualBuffer = ByteBuffer.wrap(actualHeader);
final int read = SnappyFramed.readBytes(in, actualBuffer);
if (read < expectedHeader.length) {
throw new EOFException(
"encountered EOF while reading stream header");
}
if (!Arrays.equals(expectedHeader, actualHeader)) {
throw new IOException("invalid stream header");
}
}
/**
* @param size
*/
private void allocateBuffersBasedOnSize(int size)
{
if (input != null) {
bufferPool.releaseDirect(input);
}
if (uncompressedDirect != null) {
bufferPool.releaseDirect(uncompressedDirect);
}
if (buffer != null) {
bufferPool.releaseArray(buffer);
}
input = bufferPool.allocateDirect(size);
final int maxCompressedLength = Snappy.maxCompressedLength(size);
uncompressedDirect = bufferPool.allocateDirect(maxCompressedLength);
buffer = bufferPool.allocateArray(maxCompressedLength);
}
@Override
public int read()
throws IOException
{
if (closed) {
return -1;
}
if (!ensureBuffer()) {
return -1;
}
return buffer[position++] & 0xFF;
}
@Override
public int read(byte[] output, int offset, int length)
throws IOException
{
if (output == null) {
throw new IllegalArgumentException("output is null");
}
if (offset < 0 || length < 0 || offset + length > output.length) {
throw new IllegalArgumentException("invalid offset [" + offset
+ "] and length [" + length + ']');
}
if (closed) {
throw new ClosedChannelException();
}
if (length == 0) {
return 0;
}
if (!ensureBuffer()) {
return -1;
}
final int size = min(length, available());
System.arraycopy(buffer, position, output, offset, size);
position += size;
return size;
}
@Override
public int available()
throws IOException
{
if (closed) {
return 0;
}
return valid - position;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isOpen()
{
return !closed;
}
/**
* {@inheritDoc}
*/
@Override
public int read(ByteBuffer dst)
throws IOException
{
if (dst == null) {
throw new IllegalArgumentException("dst is null");
}
if (closed) {
throw new ClosedChannelException();
}
if (dst.remaining() == 0) {
return 0;
}
if (!ensureBuffer()) {
return -1;
}
final int size = min(dst.remaining(), available());
dst.put(buffer, position, size);
position += size;
return size;
}
/**
* Transfers the entire content of this {@link InputStream} to <i>os</i>.
* This potentially limits the amount of buffering required to decompress
* content.
* <p>
* Unlike {@link #read(byte[], int, int)}, this method does not need to be
* called multiple times. A single call will transfer all available content.
* Any calls after the source has been exhausted will result in a return
* value of {@code 0}.
* </p>
*
* @param os The destination to write decompressed content to.
* @return The number of bytes transferred.
* @throws IOException
* @since 1.1.1
*/
public long transferTo(OutputStream os)
throws IOException
{
if (os == null) {
throw new IllegalArgumentException("os is null");
}
if (closed) {
throw new ClosedChannelException();
}
long totTransfered = 0;
while (ensureBuffer()) {
final int available = available();
os.write(buffer, position, available);
position += available;
totTransfered += available;
}
return totTransfered;
}
/**
* Transfers the entire content of this {@link ReadableByteChannel} to
* <i>wbc</i>. This potentially limits the amount of buffering required to
* decompress content.
* <p/>
* <p>
* Unlike {@link #read(ByteBuffer)}, this method does not need to be called
* multiple times. A single call will transfer all available content. Any
* calls after the source has been exhausted will result in a return value
* of {@code 0}.
* </p>
*
* @param wbc The destination to write decompressed content to.
* @return The number of bytes transferred.
* @throws IOException
* @since 1.1.1
*/
public long transferTo(WritableByteChannel wbc)
throws IOException
{
if (wbc == null) {
throw new IllegalArgumentException("wbc is null");
}
if (closed) {
throw new ClosedChannelException();
}
final ByteBuffer bb = ByteBuffer.wrap(buffer);
long totTransfered = 0;
while (ensureBuffer()) {
bb.clear();
bb.position(position);
bb.limit(position + available());
wbc.write(bb);
final int written = bb.position() - position;
position += written;
totTransfered += written;
}
return totTransfered;
}
@Override
public void close()
throws IOException
{
try {
rbc.close();
}
finally {
if (!closed) {
closed = true;
if (input != null) {
bufferPool.releaseDirect(input);
input = null;
}
if (uncompressedDirect != null) {
bufferPool.releaseDirect(uncompressedDirect);
uncompressedDirect = null;
}
if (buffer != null) {
bufferPool.releaseArray(buffer);
buffer = null;
}
}
}
}
static enum FrameAction
{
RAW, SKIP, UNCOMPRESS;
}
public static final class FrameMetaData
{
final int length;
final FrameAction frameAction;
/**
* @param frameAction
* @param length
*/
public FrameMetaData(FrameAction frameAction, int length)
{
super();
this.frameAction = frameAction;
this.length = length;
}
}
public static final class FrameData
{
final int checkSum;
final int offset;
/**
* @param checkSum
* @param offset
*/
public FrameData(int checkSum, int offset)
{
super();
this.checkSum = checkSum;
this.offset = offset;
}
}
private boolean ensureBuffer()
throws IOException
{
if (available() > 0) {
return true;
}
if (eof) {
return false;
}
if (!readBlockHeader()) {
eof = true;
return false;
}
// get action based on header
final FrameMetaData frameMetaData = getFrameMetaData(frameHeader);
if (FrameAction.SKIP == frameMetaData.frameAction) {
SnappyFramed.skip(rbc, frameMetaData.length,
ByteBuffer.wrap(buffer));
return ensureBuffer();
}
if (frameMetaData.length > input.capacity()) {
allocateBuffersBasedOnSize(frameMetaData.length);
}
input.clear();
input.limit(frameMetaData.length);
final int actualRead = readBytes(rbc, input);
if (actualRead != frameMetaData.length) {
throw new EOFException("unexpectd EOF when reading frame");
}
input.flip();
final FrameData frameData = getFrameData(input);
if (FrameAction.UNCOMPRESS == frameMetaData.frameAction) {
input.position(frameData.offset);
final int uncompressedLength = Snappy.uncompressedLength(input);
if (uncompressedLength > uncompressedDirect.capacity()) {
bufferPool.releaseDirect(uncompressedDirect);
bufferPool.releaseArray(buffer);
uncompressedDirect = bufferPool.allocateDirect(uncompressedLength);
buffer = bufferPool.allocateArray(uncompressedLength);
}
uncompressedDirect.clear();
this.valid = Snappy.uncompress(input, uncompressedDirect);
uncompressedDirect.get(buffer, 0, valid);
this.position = 0;
}
else {
// we need to start reading at the offset
input.position(frameData.offset);
this.position = 0;
this.valid = input.remaining();
this.input.get(buffer, 0, input.remaining());
}
if (verifyChecksums) {
final int actualCrc32c = SnappyFramed.maskedCrc32c(crc32, buffer,
position, valid - position);
if (frameData.checkSum != actualCrc32c) {
throw new IOException("Corrupt input: invalid checksum");
}
}
return true;
}
private boolean readBlockHeader()
throws IOException
{
frameHeader.clear();
int read = readBytes(rbc, frameHeader);
if (read == -1) {
return false;
}
if (read < frameHeader.capacity()) {
throw new EOFException("encountered EOF while reading block header");
}
frameHeader.flip();
return true;
}
/**
* @param frameHeader
* @return
* @throws IOException
*/
private FrameMetaData getFrameMetaData(ByteBuffer frameHeader)
throws IOException
{
assert frameHeader.hasArray();
final byte[] frameHeaderArray = frameHeader.array();
int length = (frameHeaderArray[1] & 0xFF);
length |= (frameHeaderArray[2] & 0xFF) << 8;
length |= (frameHeaderArray[3] & 0xFF) << 16;
int minLength = 0;
final FrameAction frameAction;
final int flag = frameHeaderArray[0] & 0xFF;
switch (flag) {
case COMPRESSED_DATA_FLAG:
frameAction = FrameAction.UNCOMPRESS;
minLength = 5;
break;
case UNCOMPRESSED_DATA_FLAG:
frameAction = FrameAction.RAW;
minLength = 5;
break;
case STREAM_IDENTIFIER_FLAG:
if (length != 6) {
throw new IOException(
"stream identifier chunk with invalid length: "
+ length);
}
frameAction = FrameAction.SKIP;
minLength = 6;
break;
default:
// Reserved unskippable chunks (chunk types 0x02-0x7f)
if (flag <= 0x7f) {
throw new IOException("unsupported unskippable chunk: "
+ Integer.toHexString(flag));
}
// all that is left is Reserved skippable chunks (chunk types
// 0x80-0xfe)
frameAction = FrameAction.SKIP;
minLength = 0;
}
if (length < minLength) {
throw new IOException("invalid length: " + length
+ " for chunk flag: " + Integer.toHexString(flag));
}
return new FrameMetaData(frameAction, length);
}
/**
* @param content
* @return
* @throws IOException
*/
private FrameData getFrameData(ByteBuffer content)
throws IOException
{
return new FrameData(getCrc32c(content), 4);
}
private int getCrc32c(ByteBuffer content)
{
final int position = content.position();
return ((content.get(position + 3) & 0xFF) << 24)
| ((content.get(position + 2) & 0xFF) << 16)
| ((content.get(position + 1) & 0xFF) << 8)
| (content.get(position) & 0xFF);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.message;
import java.util.Arrays;
import org.apache.logging.log4j.util.StringBuilderFormattable;
/**
* Handles messages that consist of a format string containing '{}' to represent each replaceable token, and
* the parameters.
* <p>
* This class was originally written for <a href="http://lilithapp.com/">Lilith</a> by Joern Huxhorn where it is
* licensed under the LGPL. It has been relicensed here with his permission providing that this attribution remain.
* </p>
*/
public class ParameterizedMessage implements Message, StringBuilderFormattable {
/**
* Prefix for recursion.
*/
public static final String RECURSION_PREFIX = ParameterFormatter.RECURSION_PREFIX;
/**
* Suffix for recursion.
*/
public static final String RECURSION_SUFFIX = ParameterFormatter.RECURSION_SUFFIX;
/**
* Prefix for errors.
*/
public static final String ERROR_PREFIX = ParameterFormatter.ERROR_PREFIX;
/**
* Separator for errors.
*/
public static final String ERROR_SEPARATOR = ParameterFormatter.ERROR_SEPARATOR;
/**
* Separator for error messages.
*/
public static final String ERROR_MSG_SEPARATOR = ParameterFormatter.ERROR_MSG_SEPARATOR;
/**
* Suffix for errors.
*/
public static final String ERROR_SUFFIX = ParameterFormatter.ERROR_SUFFIX;
private static final long serialVersionUID = -665975803997290697L;
private static final int HASHVAL = 31;
// storing JDK classes in ThreadLocals does not cause memory leaks in web apps, so this is okay
private static ThreadLocal<StringBuilder> threadLocalStringBuilder = new ThreadLocal<StringBuilder>();
private String messagePattern;
private transient Object[] argArray;
private String formattedMessage;
private transient Throwable throwable;
private int[] indices;
private int usedCount;
/**
* Creates a parameterized message.
* @param messagePattern The message "format" string. This will be a String containing "{}" placeholders
* where parameters should be substituted.
* @param arguments The arguments for substitution.
* @param throwable A Throwable.
* @deprecated Use constructor ParameterizedMessage(String, Object[], Throwable) instead
*/
@Deprecated
public ParameterizedMessage(final String messagePattern, final String[] arguments, final Throwable throwable) {
this.argArray = arguments;
this.throwable = throwable;
init(messagePattern);
}
/**
* Creates a parameterized message.
* @param messagePattern The message "format" string. This will be a String containing "{}" placeholders
* where parameters should be substituted.
* @param arguments The arguments for substitution.
* @param throwable A Throwable.
*/
public ParameterizedMessage(final String messagePattern, final Object[] arguments, final Throwable throwable) {
this.argArray = arguments;
this.throwable = throwable;
init(messagePattern);
}
/**
* Constructs a ParameterizedMessage which contains the arguments converted to String as well as an optional
* Throwable.
*
* <p>If the last argument is a Throwable and is NOT used up by a placeholder in the message pattern it is returned
* in {@link #getThrowable()} and won't be contained in the created String[].
* If it is used up {@link #getThrowable()} will return null even if the last argument was a Throwable!</p>
*
* @param messagePattern the message pattern that to be checked for placeholders.
* @param arguments the argument array to be converted.
*/
public ParameterizedMessage(final String messagePattern, final Object... arguments) {
this.argArray = arguments;
init(messagePattern);
}
/**
* Constructor with a pattern and a single parameter.
* @param messagePattern The message pattern.
* @param arg The parameter.
*/
public ParameterizedMessage(final String messagePattern, final Object arg) {
this(messagePattern, new Object[]{arg});
}
/**
* Constructor with a pattern and two parameters.
* @param messagePattern The message pattern.
* @param arg0 The first parameter.
* @param arg1 The second parameter.
*/
public ParameterizedMessage(final String messagePattern, final Object arg0, final Object arg1) {
this(messagePattern, new Object[]{arg0, arg1});
}
private void init(final String messagePattern) {
this.messagePattern = messagePattern;
this.indices = new int[messagePattern == null ? 0 : messagePattern.length() >> 1]; // divide by 2
final int usedCount = ParameterFormatter.countArgumentPlaceholders2(messagePattern, indices);
initThrowable(argArray, usedCount);
this.usedCount = Math.min(usedCount, (argArray == null) ? 0 : argArray.length);
}
private void initThrowable(final Object[] params, final int usedParams) {
if (params != null) {
final int argCount = params.length;
if (usedParams < argCount && this.throwable == null && params[argCount - 1] instanceof Throwable) {
this.throwable = (Throwable) params[argCount - 1];
}
}
}
/**
* Returns the message pattern.
* @return the message pattern.
*/
@Override
public String getFormat() {
return messagePattern;
}
/**
* Returns the message parameters.
* @return the message parameters.
*/
@Override
public Object[] getParameters() {
return argArray;
}
/**
* Returns the Throwable that was given as the last argument, if any.
* It will not survive serialization. The Throwable exists as part of the message
* primarily so that it can be extracted from the end of the list of parameters
* and then be added to the LogEvent. As such, the Throwable in the event should
* not be used once the LogEvent has been constructed.
*
* @return the Throwable, if any.
*/
@Override
public Throwable getThrowable() {
return throwable;
}
/**
* Returns the formatted message.
* @return the formatted message.
*/
@Override
public String getFormattedMessage() {
if (formattedMessage == null) {
final StringBuilder buffer = getThreadLocalStringBuilder();
formatTo(buffer);
formattedMessage = buffer.toString();
}
return formattedMessage;
}
private static StringBuilder getThreadLocalStringBuilder() {
StringBuilder buffer = threadLocalStringBuilder.get();
if (buffer == null) {
buffer = new StringBuilder(255);
threadLocalStringBuilder.set(buffer);
}
buffer.setLength(0);
return buffer;
}
@Override
public void formatTo(final StringBuilder buffer) {
if (formattedMessage != null) {
buffer.append(formattedMessage);
} else {
if (indices[0] < 0) {
ParameterFormatter.formatMessage(buffer, messagePattern, argArray, usedCount);
} else {
ParameterFormatter.formatMessage2(buffer, messagePattern, argArray, usedCount, indices);
}
}
}
/**
* Replace placeholders in the given messagePattern with arguments.
*
* @param messagePattern the message pattern containing placeholders.
* @param arguments the arguments to be used to replace placeholders.
* @return the formatted message.
*/
public static String format(final String messagePattern, final Object[] arguments) {
return ParameterFormatter.format(messagePattern, arguments);
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final ParameterizedMessage that = (ParameterizedMessage) o;
if (messagePattern != null ? !messagePattern.equals(that.messagePattern) : that.messagePattern != null) {
return false;
}
if (!Arrays.equals(this.argArray, that.argArray)) {
return false;
}
//if (throwable != null ? !throwable.equals(that.throwable) : that.throwable != null) return false;
return true;
}
@Override
public int hashCode() {
int result = messagePattern != null ? messagePattern.hashCode() : 0;
result = HASHVAL * result + (argArray != null ? Arrays.hashCode(argArray) : 0);
return result;
}
/**
* Counts the number of unescaped placeholders in the given messagePattern.
*
* @param messagePattern the message pattern to be analyzed.
* @return the number of unescaped placeholders.
*/
public static int countArgumentPlaceholders(final String messagePattern) {
return ParameterFormatter.countArgumentPlaceholders(messagePattern);
}
/**
* This method performs a deep toString of the given Object.
* Primitive arrays are converted using their respective Arrays.toString methods while
* special handling is implemented for "container types", i.e. Object[], Map and Collection because those could
* contain themselves.
* <p>
* It should be noted that neither AbstractMap.toString() nor AbstractCollection.toString() implement such a
* behavior. They only check if the container is directly contained in itself, but not if a contained container
* contains the original one. Because of that, Arrays.toString(Object[]) isn't safe either.
* Confusing? Just read the last paragraph again and check the respective toString() implementation.
* </p>
* <p>
* This means, in effect, that logging would produce a usable output even if an ordinary System.out.println(o)
* would produce a relatively hard-to-debug StackOverflowError.
* </p>
* @param o The object.
* @return The String representation.
*/
public static String deepToString(final Object o) {
return ParameterFormatter.deepToString(o);
}
/**
* This method returns the same as if Object.toString() would not have been
* overridden in obj.
* <p>
* Note that this isn't 100% secure as collisions can always happen with hash codes.
* </p>
* <p>
* Copied from Object.hashCode():
* </p>
* <blockquote>
* As much as is reasonably practical, the hashCode method defined by
* class {@code Object} does return distinct integers for distinct
* objects. (This is typically implemented by converting the internal
* address of the object into an integer, but this implementation
* technique is not required by the Java™ programming language.)
* </blockquote>
*
* @param obj the Object that is to be converted into an identity string.
* @return the identity string as also defined in Object.toString()
*/
public static String identityToString(final Object obj) {
return ParameterFormatter.identityToString(obj);
}
@Override
public String toString() {
return "ParameterizedMessage[messagePattern=" + messagePattern + ", stringArgs=" +
Arrays.toString(argArray) + ", throwable=" + throwable + ']';
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.security.authc.service;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.core.security.action.service.TokenInfo;
import org.elasticsearch.xpack.core.security.audit.logfile.CapturingLogger;
import org.elasticsearch.xpack.core.security.authc.support.Hasher;
import org.elasticsearch.xpack.security.authc.service.ServiceAccount.ServiceAccountId;
import org.elasticsearch.xpack.security.support.CacheInvalidatorRegistry;
import org.junit.After;
import org.junit.Before;
import java.io.BufferedWriter;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class FileServiceAccountTokenStoreTests extends ESTestCase {
private Settings settings;
private Environment env;
private ThreadPool threadPool;
private ClusterService clusterService;
@Before
public void init() {
final String hashingAlgorithm = inFipsJvm()
? randomFrom("pbkdf2", "pbkdf2_50000", "pbkdf2_stretch")
: randomFrom("bcrypt", "bcrypt10", "pbkdf2", "pbkdf2_50000", "pbkdf2_stretch");
settings = Settings.builder()
.put("resource.reload.interval.high", "100ms")
.put("path.home", createTempDir())
.put("xpack.security.authc.service_token_hashing.algorithm", hashingAlgorithm)
.build();
env = TestEnvironment.newEnvironment(settings);
threadPool = new TestThreadPool("test");
clusterService = mock(ClusterService.class);
final DiscoveryNode discoveryNode = mock(DiscoveryNode.class);
when(clusterService.localNode()).thenReturn(discoveryNode);
when(discoveryNode.getName()).thenReturn("node");
}
@After
public void shutdown() {
terminate(threadPool);
}
public void testParseFile() throws Exception {
Path path = getDataPath("service_tokens");
Map<String, char[]> parsedTokenHashes = FileServiceAccountTokenStore.parseFile(path, null);
assertThat(parsedTokenHashes, notNullValue());
assertThat(parsedTokenHashes.size(), is(5));
assertThat(
new String(parsedTokenHashes.get("elastic/fleet-server/bcrypt")),
equalTo("$2a$10$uuCzGHRrEz/QMB/.bmL8qOKXHhPNt57dYBbWCH/Hbb3SjUyZ.Hf1i")
);
assertThat(
new String(parsedTokenHashes.get("elastic/fleet-server/bcrypt10")),
equalTo("$2a$10$ML0BUUxdzs8ApPNf1ayAwuh61ZhfqlzN/1DgZWZn6vNiUhpu1GKTe")
);
assertThat(
new String(parsedTokenHashes.get("elastic/fleet-server/pbkdf2")),
equalTo("{PBKDF2}10000$0N2h5/AsDS5uO0/A+B6y8AnTCJ3Tqo8nygbzu1gkgpo=$5aTcCtteHf2g2ye7Y3p6jSZBoGhNJ7l6F3tmUhPTwRo=")
);
assertThat(
new String(parsedTokenHashes.get("elastic/fleet-server/pbkdf2_50000")),
equalTo("{PBKDF2}50000$IMzlphNClmrP/du40yxGM3fNjklg8CuACds12+Ry0jM=$KEC1S9a0NOs3OJKM4gEeBboU18EP4+3m/pyIA4MBDGk=")
);
assertThat(
new String(parsedTokenHashes.get("elastic/fleet-server/pbkdf2_stretch")),
equalTo("{PBKDF2_STRETCH}10000$Pa3oNkj8xTD8j2gTgjWnTvnE6jseKApWMFjcNCLxX1U=$84ECweHFZQ2DblHEjHTRWA+fG6h5bVMyTSJUmFvTo1o=")
);
assertThat(parsedTokenHashes.get("elastic/fleet-server/plain"), nullValue());
}
public void testParseFileNotExists() throws IllegalAccessException, IOException {
Logger logger = CapturingLogger.newCapturingLogger(Level.TRACE, null);
final List<String> events = CapturingLogger.output(logger.getName(), Level.TRACE);
events.clear();
final Map<String, char[]> tokenHashes = FileServiceAccountTokenStore.parseFile(
getDataPath("service_tokens").getParent().resolve("does-not-exist"),
logger
);
assertThat(tokenHashes.isEmpty(), is(true));
assertThat(events, hasSize(2));
assertThat(events.get(1), containsString("does not exist"));
}
public void testAutoReload() throws Exception {
Path serviceTokensSourceFile = getDataPath("service_tokens");
Path configDir = env.configFile();
Files.createDirectories(configDir);
Path targetFile = configDir.resolve("service_tokens");
Files.copy(serviceTokensSourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING);
final String hashingAlgo = settings.get("xpack.security.authc.service_token_hashing.algorithm");
final Hasher hasher = Hasher.resolve(hashingAlgo);
try (ResourceWatcherService watcherService = new ResourceWatcherService(settings, threadPool)) {
final AtomicInteger counter = new AtomicInteger(0);
FileServiceAccountTokenStore store = new FileServiceAccountTokenStore(
env,
watcherService,
threadPool,
clusterService,
mock(CacheInvalidatorRegistry.class)
);
store.addListener(counter::getAndIncrement);
// Token name shares the hashing algorithm name for convenience
final String qualifiedTokenName = "elastic/fleet-server/" + hashingAlgo;
assertThat(store.getTokenHashes().containsKey(qualifiedTokenName), is(true));
final int oldValue1 = counter.get();
// A blank line should not trigger update
try (BufferedWriter writer = Files.newBufferedWriter(targetFile, StandardCharsets.UTF_8, StandardOpenOption.APPEND)) {
writer.append("\n");
}
watcherService.notifyNow(ResourceWatcherService.Frequency.HIGH);
if (counter.get() != oldValue1) {
fail("Listener should not be called as service tokens are not changed.");
}
assertThat(store.getTokenHashes().containsKey(qualifiedTokenName), is(true));
// Add a new entry
final int oldValue2 = counter.get();
final char[] newTokenHash = hasher.hash(
new SecureString("46ToAwIHZWxhc3RpYwVmbGVldAZ0b2tlbjEWWkYtQ3dlWlVTZldJX3p5Vk9ySnlSQQAAAAAAAAA".toCharArray())
);
try (BufferedWriter writer = Files.newBufferedWriter(targetFile, StandardCharsets.UTF_8, StandardOpenOption.APPEND)) {
writer.newLine();
writer.append("elastic/fleet-server/token1:").append(new String(newTokenHash));
}
assertBusy(() -> {
assertThat("Waited too long for the updated file to be picked up", counter.get(), greaterThan(oldValue2));
assertThat(store.getTokenHashes().containsKey("elastic/fleet-server/token1"), is(true));
}, 5, TimeUnit.SECONDS);
// Remove the new entry
final int oldValue3 = counter.get();
Files.copy(serviceTokensSourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING);
assertBusy(() -> {
assertThat("Waited too long for the updated file to be picked up", counter.get(), greaterThan(oldValue3));
assertThat(store.getTokenHashes().containsKey("elastic/fleet-server/token1"), is(false));
assertThat(store.getTokenHashes().containsKey(qualifiedTokenName), is(true));
}, 5, TimeUnit.SECONDS);
// Write a mal-formatted line
final int oldValue4 = counter.get();
if (randomBoolean()) {
try (BufferedWriter writer = Files.newBufferedWriter(targetFile, StandardCharsets.UTF_8, StandardOpenOption.APPEND)) {
writer.newLine();
writer.append("elastic/fleet-server/tokenxfoobar");
}
} else {
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
try (BufferedWriter writer = Files.newBufferedWriter(targetFile, StandardCharsets.UTF_16, StandardOpenOption.APPEND)) {
writer.newLine();
writer.append("elastic/fleet-server/tokenx:").append(new String(newTokenHash));
}
}
assertBusy(() -> {
assertThat("Waited too long for the updated file to be picked up", counter.get(), greaterThan(oldValue4));
assertThat(store.getTokenHashes().isEmpty(), is(true));
}, 5, TimeUnit.SECONDS);
// Restore to original file again
final int oldValue5 = counter.get();
Files.copy(serviceTokensSourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING);
assertBusy(() -> {
assertThat("Waited too long for the updated file to be picked up", counter.get(), greaterThan(oldValue5));
assertThat(store.getTokenHashes().containsKey(qualifiedTokenName), is(true));
}, 5, TimeUnit.SECONDS);
// Duplicate entry
final int oldValue6 = counter.get();
try (BufferedWriter writer = Files.newBufferedWriter(targetFile, StandardCharsets.UTF_8, StandardOpenOption.APPEND)) {
writer.newLine();
writer.append(qualifiedTokenName).append(":").append(new String(newTokenHash));
}
assertBusy(() -> {
assertThat("Waited too long for the updated file to be picked up", counter.get(), greaterThan(oldValue6));
assertThat(store.getTokenHashes().get(qualifiedTokenName), equalTo(newTokenHash));
}, 5, TimeUnit.SECONDS);
}
}
public void testFindTokensFor() throws IOException {
Path serviceTokensSourceFile = getDataPath("service_tokens");
Path configDir = env.configFile();
Files.createDirectories(configDir);
Path targetFile = configDir.resolve("service_tokens");
Files.copy(serviceTokensSourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING);
FileServiceAccountTokenStore store = new FileServiceAccountTokenStore(
env,
mock(ResourceWatcherService.class),
threadPool,
clusterService,
mock(CacheInvalidatorRegistry.class)
);
final ServiceAccountId accountId = new ServiceAccountId("elastic", "fleet-server");
final List<TokenInfo> tokenInfos = store.findTokensFor(accountId);
assertThat(tokenInfos, hasSize(5));
assertThat(
tokenInfos.stream().map(TokenInfo::getName).collect(Collectors.toUnmodifiableSet()),
equalTo(Set.of("pbkdf2", "bcrypt10", "pbkdf2_stretch", "pbkdf2_50000", "bcrypt"))
);
assertThat(
tokenInfos.stream().map(TokenInfo::getSource).collect(Collectors.toUnmodifiableSet()),
equalTo(EnumSet.of(TokenInfo.TokenSource.FILE))
);
assertThat(
tokenInfos.stream().map(TokenInfo::getNodeNames).collect(Collectors.toUnmodifiableSet()),
equalTo(Set.of(List.of("node")))
);
}
}
| |
/*
* $Id$
* This file is a part of the Arakhne Foundation Classes, http://www.arakhne.org/afc
*
* Copyright (c) 2000-2012 Stephane GALLAND.
* Copyright (c) 2005-10, Multiagent Team, Laboratoire Systemes et Transports,
* Universite de Technologie de Belfort-Montbeliard.
* Copyright (c) 2013-2020 The original authors, and other authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.arakhne.maven;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.lang.reflect.Array;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.URL;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CodingErrorAction;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.EventListener;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.ResourceBundle;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Pattern;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.ArtifactUtils;
import org.apache.maven.artifact.handler.ArtifactHandler;
import org.apache.maven.artifact.handler.manager.ArtifactHandlerManager;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.versioning.VersionRange;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.Contributor;
import org.apache.maven.model.Dependency;
import org.apache.maven.model.Developer;
import org.apache.maven.model.License;
import org.apache.maven.model.Model;
import org.apache.maven.model.Organization;
import org.apache.maven.model.Parent;
import org.apache.maven.model.Plugin;
import org.apache.maven.model.Scm;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.logging.Log;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.MavenProjectBuilder;
import org.apache.maven.project.ProjectBuildingException;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import org.eclipse.aether.RepositorySystem;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.resolution.ArtifactRequest;
import org.eclipse.aether.resolution.ArtifactResolutionException;
import org.eclipse.aether.resolution.ArtifactResult;
import org.sonatype.plexus.build.incremental.BuildContext;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.wc.SVNClientManager;
import org.tmatesoft.svn.core.wc.SVNInfo;
import org.tmatesoft.svn.core.wc.SVNRevision;
/**
* Abstract implementation for all Arakhnê maven modules. This implementation is thread safe.
*
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
*
* @component
*/
@SuppressWarnings({"checkstyle:classfanoutcomplexity", "checkstyle:classdataabstractioncoupling",
"checkstyle:methodcount"})
public abstract class AbstractArakhneMojo extends AbstractMojo {
/**
* Empty string constant.
*/
public static final String EMPTY_STRING = ExtendedArtifact.EMPTY_STRING;
/**
* Maven tag for artifcat id.
*/
public static final String PROP_ARTIFACTID = "artifactId"; //$NON-NLS-1$
/**
* Maven tag for goup id.
*/
public static final String PROP_GROUPID = "groupId"; //$NON-NLS-1$
/**
* Maven tag for version description.
*/
public static final String PROP_VERSION = "version"; //$NON-NLS-1$
/**
* Preferred charset for the new MacOS and Linux operating systems.
*/
public static final String PREFERRED_CHARSET_UNIX = "UTF-8"; //$NON-NLS-1$
/**
* Preferred charset for the Windows operating systems.
*/
public static final String PREFERRED_CHARSET_WINDOWS = "windows-1250"; //$NON-NLS-1$
/**
* Preferred charset for the old MacOS operating systems.
*/
public static final String PREFERRED_CHARSET_MACOS = "MacRoman"; //$NON-NLS-1$
/**
* Preferred charset for the Java virtual machine (internal charset).
*/
public static final String PREFERRED_CHARSET_JVM = "UTF-16"; //$NON-NLS-1$
private static final int FILE_BUFFER = 4096;
/**
* Invocation date.
*/
protected final Date invocationDate = new Date();
/**
* Map the directory of pom.xml files to the definition of the corresponding maven module.
*/
private final Map<File, ExtendedArtifact> localArtifactDescriptions = new TreeMap<>();
/**
* Map the artifact id to the definition of the corresponding maven module.
*/
private final Map<String, ExtendedArtifact> remoteArtifactDescriptions = new TreeMap<>();
/**
* Manager of the SVN repository.
*/
private SVNClientManager svnManager;
/**
* Are the preferred charset in the preferred order.
*/
private Charset[] preferredCharsets;
/** Construct.
*/
public AbstractArakhneMojo() {
final List<Charset> availableCharsets = new ArrayList<>();
// New Mac OS and Linux OS
addCharset(availableCharsets, PREFERRED_CHARSET_UNIX);
// Windows OS
addCharset(availableCharsets, PREFERRED_CHARSET_WINDOWS);
// Old Mac OS
addCharset(availableCharsets, PREFERRED_CHARSET_MACOS);
// Java Internal
addCharset(availableCharsets, PREFERRED_CHARSET_JVM);
this.preferredCharsets = new Charset[availableCharsets.size()];
availableCharsets.toArray(this.preferredCharsets);
availableCharsets.clear();
}
/** Replies the preferred URL for the given contributor.
*
* @param contributor the contributor.
* @param log the log.
* @return the URL or <code>null</code> if no URL could be built.
*/
protected static URL getContributorURL(Contributor contributor, Log log) {
URL url = null;
if (contributor != null) {
String rawUrl = contributor.getUrl();
if (rawUrl != null && !EMPTY_STRING.equals(rawUrl)) {
try {
url = new URL(rawUrl);
} catch (Throwable exception) {
url = null;
}
}
if (url == null) {
rawUrl = contributor.getEmail();
if (rawUrl != null && !EMPTY_STRING.equals(rawUrl)) {
try {
url = new URL("mailto:" + rawUrl); //$NON-NLS-1$
} catch (Throwable exception) {
url = null;
}
}
}
}
return url;
}
/**
* Copy a directory.
*
* @param in input directory.
* @param out output directory.
* @param skipHiddenFiles indicates if the hidden files should be ignored.
* @throws IOException on error.
* @since 3.3
*/
public final void dirCopy(File in, File out, boolean skipHiddenFiles) throws IOException {
assert in != null;
assert out != null;
getLog().debug(in.toString() + "->" + out.toString()); //$NON-NLS-1$
getLog().debug("Ignore hidden files: " + skipHiddenFiles); //$NON-NLS-1$
out.mkdirs();
final LinkedList<File> candidates = new LinkedList<>();
candidates.add(in);
File[] children;
while (!candidates.isEmpty()) {
final File f = candidates.removeFirst();
getLog().debug("Scanning: " + f); //$NON-NLS-1$
if (f.isDirectory()) {
children = f.listFiles();
if (children != null && children.length > 0) {
// Non empty directory
for (final File c : children) {
if (!skipHiddenFiles || !c.isHidden()) {
getLog().debug("Discovering: " + c); //$NON-NLS-1$
candidates.add(c);
}
}
}
} else {
// not a directory
final File targetFile = toOutput(in, f, out);
targetFile.getParentFile().mkdirs();
fileCopy(f, targetFile);
}
}
}
private static File toOutput(File root, File file, File newRoot) {
final String filename = file.getAbsolutePath();
final String rootPath = root.getAbsolutePath();
return new File(filename.replaceAll("^\\Q" + rootPath + "\\E", //$NON-NLS-1$ //$NON-NLS-2$
newRoot.getAbsolutePath()));
}
/**
* Delete a directory and its content.
*
* @param dir the directory to remove.
* @throws IOException on error.
* @since 3.3
*/
public final void dirRemove(File dir) throws IOException {
if (dir != null) {
getLog().debug("Deleting tree: " + dir.toString()); //$NON-NLS-1$
final LinkedList<File> candidates = new LinkedList<>();
candidates.add(dir);
File[] children;
final BuildContext buildContext = getBuildContext();
while (!candidates.isEmpty()) {
final File f = candidates.getFirst();
getLog().debug("Scanning: " + f); //$NON-NLS-1$
if (f.isDirectory()) {
children = f.listFiles();
if (children != null && children.length > 0) {
// Non empty directory
for (final File c : children) {
getLog().debug("Discovering: " + c); //$NON-NLS-1$
candidates.push(c);
}
} else {
// empty directory
getLog().debug("Deleting: " + f); //$NON-NLS-1$
candidates.removeFirst();
f.delete();
buildContext.refresh(f.getParentFile());
}
} else {
// not a directory
candidates.removeFirst();
if (f.exists()) {
getLog().debug("Deleting: " + f); //$NON-NLS-1$
f.delete();
buildContext.refresh(f.getParentFile());
}
}
}
getLog().debug("Deletion done"); //$NON-NLS-1$
}
}
/**
* Copy a file.
*
* @param in input file.
* @param out output file.
* @throws IOException on error.
*/
public final void fileCopy(File in, File out) throws IOException {
assert in != null;
assert out != null;
getLog().debug("Copying file: " + in.toString() + " into " + out.toString()); //$NON-NLS-1$ //$NON-NLS-2$
try (FileInputStream fis = new FileInputStream(in)) {
try (FileChannel inChannel = fis.getChannel()) {
try (FileOutputStream fos = new FileOutputStream(out)) {
try (FileChannel outChannel = fos.getChannel()) {
inChannel.transferTo(0, inChannel.size(), outChannel);
}
}
}
} finally {
getBuildContext().refresh(out);
}
}
/**
* Copy a file.
*
* @param in input file.
* @param out output file.
* @throws IOException on error.
*/
public final void fileCopy(URL in, File out) throws IOException {
assert in != null;
try (InputStream inStream = in.openStream()) {
try (OutputStream outStream = new FileOutputStream(out)) {
final byte[] buf = new byte[FILE_BUFFER];
int len;
while ((len = inStream.read(buf)) > 0) {
outStream.write(buf, 0, len);
}
}
} finally {
getBuildContext().refresh(out);
}
}
/**
* Read a resource property and replace the parametrized macros by the given parameters.
*
* @param source
* is the source of the properties.
* @param label
* is the name of the property.
* @param params
* are the parameters to replace.
* @return the read text.
*/
public static final String getLString(Class<?> source, String label, Object... params) {
final ResourceBundle rb = ResourceBundle.getBundle(source.getCanonicalName());
String text = rb.getString(label);
text = text.replaceAll("[\\n\\r]", "\n"); //$NON-NLS-1$ //$NON-NLS-2$
text = text.replaceAll("\\t", "\t"); //$NON-NLS-1$ //$NON-NLS-2$
text = MessageFormat.format(text, params);
return text;
}
/**
* Remove the path prefix from a file.
*
* @param prefix path prefix to remove.
* @param file input filename.
* @return the {@code file} without the prefix.
*/
public static final String removePathPrefix(File prefix, File file) {
final String r = file.getAbsolutePath().replaceFirst(
"^" //$NON-NLS-1$
+ Pattern.quote(prefix.getAbsolutePath()),
EMPTY_STRING);
if (r.startsWith(File.separator)) {
return r.substring(File.separator.length());
}
return r;
}
private static void addCharset(List<Charset> availableCharsets, String csName) {
try {
final Charset cs = Charset.forName(csName);
if (!availableCharsets.contains(cs)) {
availableCharsets.add(cs);
}
} catch (Throwable exception) {
//
}
}
/**
* Replies the preferred charsets in the preferred order of use.
*
* @return the preferred charsets in the preferred order of use.
*/
public final Charset[] getPreferredCharsets() {
return this.preferredCharsets;
}
/**
* Set the preferred charsets in the preferred order of use.
*
* @param charsets
* are the preferred charsets in the preferred order of use.
*/
public final void setPreferredCharsets(Charset... charsets) {
this.preferredCharsets = charsets;
}
/**
* Replies the manager of the SVN repository.
*
* @return the manager of the SVN repository.
*/
public final synchronized SVNClientManager getSVNClientManager() {
if (this.svnManager == null) {
this.svnManager = SVNClientManager.newInstance();
}
return this.svnManager;
}
/**
* Replies the artifact handler manager.
*
* <p>It is an attribute defined as: <pre><code>
* <span>/</span>* <span>@</span>component
* <span>*</span>/
* private ArtifactHandlerManager manager;
* </code></pre>
*
* @return the artifact resolver.
*/
public abstract ArtifactHandlerManager getArtifactHandlerManager();
/**
* Replies the output directory of the project. Basically it is <code>getRootDirectory()+"/target"</code>.
*
* <p>It is an attribute defined as: <pre><code>
* <span>/</span>* <span>@</span>parameter expression="${project.build.directory}"
* <span>*</span>/
* private File outputDirectory;
* </code></pre>
*
* @return the output directory.
*/
public abstract File getOutputDirectory();
/**
* Replies the root directory of the project. Basically it is the value stored inside the
* Maven property named <code>project.basedir</code>.
*
* <p>It is an attribute defined as: <pre><code>
* <span>/</span>* <span>@</span>parameter expression="${project.basedir}"
* <span>*</span>/
* private File baseDirectory;
* </code></pre>
*
* @return the root directory.
*/
public abstract File getBaseDirectory();
/** Replies the build context that may be used during Mojo execution.
* This build context permits to be used inside and outside the
* Eclipse IDE.
*
* @return the build context.
*/
public abstract BuildContext getBuildContext();
/**
* Replies the current maven session. Basically it is an internal component of Maven.
*
* <p>It is an attribute defined as: <pre><code>
* <span>/</span>* <span>@</span>component role="org.apache.maven.project.MavenProjectBuilder"
* * <span>@</span>required
* * <span>@</span>readonly
* <span>*</span>/
* private MavenProjectBuilder projectBuilder;
* </code></pre>
*
* @return the maven session
*/
public abstract MavenProjectBuilder getMavenProjectBuilder();
/**
* Replies the current project builder. Basically it is an internal component of Maven.
*
* <p>It is an attribute defined as: <pre><code>
* <span>/</span>* <span>@</span>parameter expression="${session}"
* * <span>@</span>required
* <span>*</span>/
* private MavenSession mvnSession;
* </code></pre>
*
* @return the maven session
*/
public abstract MavenSession getMavenSession();
/**
* Search and reply the maven artifact which is corresponding to the given file.
*
* @param file
* is the file for which the maven artifact should be retreived.
* @return the maven artifact or <code>null</code> if none.
*/
public final synchronized ExtendedArtifact searchArtifact(File file) {
final String filename = removePathPrefix(getBaseDirectory(), file);
getLog().debug("Retreiving module for " + filename); //$NON-NLS-1$
File theFile = file;
File pomDirectory = null;
while (theFile != null && pomDirectory == null) {
if (theFile.isDirectory()) {
final File pomFile = new File(theFile, "pom.xml"); //$NON-NLS-1$
if (pomFile.exists()) {
pomDirectory = theFile;
}
}
theFile = theFile.getParentFile();
}
if (pomDirectory != null) {
ExtendedArtifact a = this.localArtifactDescriptions.get(pomDirectory);
if (a == null) {
a = readPom(pomDirectory);
this.localArtifactDescriptions.put(pomDirectory, a);
getLog().debug("Found local module description for " //$NON-NLS-1$
+ a.toString());
}
return a;
}
final BuildContext buildContext = getBuildContext();
buildContext.addMessage(file,
1, 1,
"The maven module for this file cannot be retreived.", //$NON-NLS-1$
BuildContext.SEVERITY_WARNING, null);
return null;
}
/**
* Replies the project's remote repositories to use for the resolution of plugins and their dependencies..
*
* <p>It is an attribute defined as: <pre><code>
* <span>/</span>* <span>@</span>parameter default-value="${project.remoteProjectRepositories}"
* <span>*</span>/
* private List<RemoteRepository> remoteRepos;
* </code></pre>
*
* @return the repository system
*/
public abstract List<RemoteRepository> getRemoteRepositoryList();
/**
* Replies the repository system used by this maven instance. Basically it is an internal component of Maven.
*
* <p>It is an attribute defined as: <pre><code>
* <span>/</span>* <span>@</span>component
* <span>*</span>/
* private RepositorySystem repoSystem;
* </code></pre>
*
* @return the repository system
*/
public abstract RepositorySystem getRepositorySystem();
/**
* Replies the current repository/network configuration of Maven..
*
* <p>It is an attribute defined as: <pre><code>
* <span>/</span>* <span>@</span>parameter default-value="${repositorySystemSession}"
* <span>@</span>readonly
* <span>*</span>/
* private RepositorySystemSession repoSession;
* </code></pre>
*
* @return the repository system
*/
public abstract RepositorySystemSession getRepositorySystemSession();
/**
* Retreive the extended artifact definition of the given artifact.
* @param mavenArtifact - the artifact to resolve
* @return the artifact definition.
* @throws MojoExecutionException on error.
*/
public final Artifact resolveArtifact(Artifact mavenArtifact) throws MojoExecutionException {
final org.eclipse.aether.artifact.Artifact aetherArtifact = createArtifact(mavenArtifact);
final ArtifactRequest request = new ArtifactRequest();
request.setArtifact(aetherArtifact);
request.setRepositories(getRemoteRepositoryList());
final ArtifactResult result;
try {
result = getRepositorySystem().resolveArtifact(getRepositorySystemSession(), request);
} catch (ArtifactResolutionException e) {
throw new MojoExecutionException(e.getMessage(), e);
}
return createArtifact(result.getArtifact());
}
/**
* Retreive the extended artifact definition of the given artifact id.
*
* @param groupId
* is the identifier of the group.
* @param artifactId
* is the identifier of the artifact.
* @param version
* is the version of the artifact to retreive.
* @return the artifact definition.
* @throws MojoExecutionException on error.
*/
public final Artifact resolveArtifact(String groupId, String artifactId, String version) throws MojoExecutionException {
return resolveArtifact(createArtifact(groupId, artifactId, version));
}
/**
* Replies a list of files which are found on the file system.
*
* @param directory
* is the directory to search in.
* @param filter
* is the file selector
* @return the list of files.
*/
public final Collection<File> findFiles(File directory, FileFilter filter) {
final Collection<File> files = new ArrayList<>();
findFiles(directory, filter, files);
return files;
}
/**
* Replies a list of files which are found on the file system.
*
* @param directory
* is the directory to search in.
* @param filter
* is the file selector
* @param fileOut
* is the list of files to fill.
*/
public final synchronized void findFiles(File directory, FileFilter filter, Collection<? super File> fileOut) {
if (directory != null && filter != null) {
File candidate;
final List<File> candidates = new ArrayList<>();
final String relativePath = removePathPrefix(getBaseDirectory(), directory);
getLog().debug("Retreiving " //$NON-NLS-1$
+ filter.toString() + " files from " //$NON-NLS-1$
+ relativePath);
candidates.add(directory);
int nbFiles = 0;
while (!candidates.isEmpty()) {
candidate = candidates.remove(0);
if (candidate.isDirectory()) {
final File[] children = candidate.listFiles(filter);
if (children != null) {
for (final File child : children) {
if (child != null && child.isDirectory()) {
candidates.add(child);
} else {
fileOut.add(child);
++nbFiles;
}
}
}
}
}
getLog().debug("Found " //$NON-NLS-1$
+ nbFiles + " file(s)"); //$NON-NLS-1$
}
}
/**
* Replies a map of files which are found on the file system. The map has the
* found files as keys and the search directory as values.
*
* @param directory
* is the directory to search in.
* @param filter
* is the file selector
* @param fileOut
* is the list of files to fill.
*/
public final synchronized void findFiles(File directory, FileFilter filter, Map<? super File, File> fileOut) {
findFiles(directory, filter, fileOut, null);
}
/**
* Replies a map of files which are found on the file system. The map has the
* found files as keys and the search directory as values.
*
* @param directory
* is the directory to search in.
* @param filter
* is the file selector
* @param fileOut
* is the list of files to fill.
* @param listener on the files that are not matching the file filter.
*/
public final synchronized void findFiles(File directory, FileFilter filter, Map<? super File, File> fileOut,
FindFileListener listener) {
if (directory != null && filter != null) {
File candidate;
final List<File> candidates = new ArrayList<>();
final String relativePath = removePathPrefix(getBaseDirectory(), directory);
getLog().debug("Retreiving " //$NON-NLS-1$
+ filter.toString() + " files from " //$NON-NLS-1$
+ relativePath);
candidates.add(directory);
int nbFiles = 0;
while (!candidates.isEmpty()) {
candidate = candidates.remove(0);
if (candidate.isDirectory()) {
final File[] children = candidate.listFiles();
if (children != null) {
for (final File child : children) {
if (child != null && child.isDirectory()) {
candidates.add(child);
} else if (filter.accept(child)) {
fileOut.put(child, directory);
++nbFiles;
} else if (listener != null) {
listener.findFile(child, directory);
}
}
}
}
}
getLog().debug("Found " //$NON-NLS-1$
+ nbFiles + " file(s)"); //$NON-NLS-1$
}
}
/**
* Replies the maven artifact which is described by the <code>pom.xml</code> file in the given directory.
*
* @param pomDirectory
* is the directory where to find the <code>pom.xml</code> file.
* @return the artifact or <code>null</code>.
*/
public final synchronized ExtendedArtifact readPom(File pomDirectory) {
return readPomFile(new File(pomDirectory, "pom.xml")); //$NON-NLS-1$
}
/**
* Replies the maven artifact which is described by the given <code>pom.xml</code>.
*
* @param pomFile
* is the <code>pom.xml</code> file.
* @return the artifact or <code>null</code>.
*/
@SuppressWarnings({"checkstyle:cyclomaticcomplexity", "checkstyle:npathcomplexity", "checkstyle:nestedifdepth"})
public final synchronized ExtendedArtifact readPomFile(File pomFile) {
String groupId;
final String artifactId;
final String name;
String version;
final String url;
final Organization organization;
final Scm scm;
List<Developer> developers;
List<Contributor> contributors;
List<License> licenses;
final Parent parent;
getLog().debug("Read pom file: " + pomFile.toString()); //$NON-NLS-1$
if (!pomFile.canRead()) {
return null;
}
final MavenXpp3Reader pomReader = new MavenXpp3Reader();
try (FileReader fr = new FileReader(pomFile)) {
final Model model = pomReader.read(fr);
groupId = model.getGroupId();
artifactId = model.getArtifactId();
name = model.getName();
version = model.getVersion();
url = model.getUrl();
organization = model.getOrganization();
scm = model.getScm();
developers = model.getDevelopers();
contributors = model.getContributors();
licenses = model.getLicenses();
parent = model.getParent();
} catch (IOException | XmlPullParserException e) {
return null;
}
if (developers == null) {
developers = new ArrayList<>();
} else {
final List<Developer> list = new ArrayList<>();
list.addAll(developers);
developers = list;
}
if (contributors == null) {
contributors = new ArrayList<>();
} else {
final List<Contributor> list = new ArrayList<>();
list.addAll(contributors);
contributors = list;
}
if (licenses == null) {
licenses = new ArrayList<>();
} else {
final List<License> list = new ArrayList<>();
list.addAll(licenses);
licenses = list;
}
if (parent != null) {
final String relPath = parent.getRelativePath();
File parentPomDirectory = new File(pomFile.getParentFile(), relPath);
try {
parentPomDirectory = parentPomDirectory.getCanonicalFile();
if (!parentPomDirectory.isDirectory()) {
parentPomDirectory = parentPomDirectory.getParentFile();
}
ExtendedArtifact parentArtifact = this.localArtifactDescriptions.get(parentPomDirectory);
if (parentArtifact == null) {
parentArtifact = readPom(parentPomDirectory);
if (parentArtifact != null) {
this.localArtifactDescriptions.put(parentPomDirectory, parentArtifact);
getLog().debug("Add local module description for " //$NON-NLS-1$
+ parentArtifact.toString());
} else {
final String key = ArtifactUtils.key(
parent.getGroupId(), parent.getArtifactId(), parent.getVersion());
final Artifact artifact = createArtifact(parent.getGroupId(),
parent.getArtifactId(), parent.getVersion());
final ArtifactRepository repo = getMavenSession().getLocalRepository();
String artifactPath = repo.pathOf(artifact);
artifactPath = artifactPath.replaceFirst("\\.jar$", ".pom"); //$NON-NLS-1$ //$NON-NLS-2$
final File artifactFile = new File(repo.getBasedir(), artifactPath);
getLog().debug("Getting pom file in local repository for " //$NON-NLS-1$
+ key + ": " + artifactFile.getAbsolutePath()); //$NON-NLS-1$
final BuildContext buildContext = getBuildContext();
buildContext.removeMessages(pomFile);
if (artifactFile.canRead()) {
parentArtifact = readPomFile(artifactFile);
if (parentArtifact != null) {
this.remoteArtifactDescriptions.put(key, parentArtifact);
getLog().debug("Add remote module description for " //$NON-NLS-1$
+ parentArtifact.toString());
} else {
buildContext.addMessage(
pomFile,
1, 1,
"Unable to retreive the pom file of " + key, //$NON-NLS-1$
BuildContext.SEVERITY_WARNING, null);
}
} else {
buildContext.addMessage(
pomFile,
1, 1,
"Cannot read the file for '" + key + "': " //$NON-NLS-1$ //$NON-NLS-2$
+ artifactFile.getAbsolutePath(),
BuildContext.SEVERITY_WARNING, null);
}
}
}
if (parentArtifact != null) {
developers.addAll(parentArtifact.getDevelopers());
contributors.addAll(parentArtifact.getContributors());
}
} catch (IOException e) {
getLog().warn(e);
}
// Be sure that the optional fields version and groupId are correctly set.
if (version == null || version.isEmpty()) {
version = parent.getVersion();
}
if (groupId == null || groupId.isEmpty()) {
groupId = parent.getGroupId();
}
}
String scmRevision = null;
try {
final SVNClientManager svnManager = getSVNClientManager();
final SVNInfo svnInfo = svnManager.getWCClient().doInfo(pomFile.getParentFile(), SVNRevision.UNDEFINED);
if (svnInfo != null) {
final SVNRevision revision = svnInfo.getRevision();
if (revision != null) {
scmRevision = Long.toString(revision.getNumber());
}
}
} catch (SVNException exception) {
//
}
final Artifact a = createArtifact(groupId, artifactId, version);
return new ExtendedArtifact(a, name, url, organization, scmRevision, scm, developers, contributors, licenses);
}
/** Create an Jar runtime artifact from the given values.
*
* @param groupId group id.
* @param artifactId artifact id.
* @param version version number.
* @return the artifact
*/
public final Artifact createArtifact(String groupId, String artifactId, String version) {
return createArtifact(groupId, artifactId, version, "runtime", "jar"); //$NON-NLS-1$ //$NON-NLS-2$
}
/** Convert the maven artifact to Aether artifact.
*
* @param artifact - the maven artifact.
* @return the Aether artifact.
*/
protected static final org.eclipse.aether.artifact.Artifact createArtifact(Artifact artifact) {
return new DefaultArtifact(
artifact.getGroupId(),
artifact.getArtifactId(),
artifact.getClassifier(),
artifact.getType(),
artifact.getVersion());
}
/** Convert the Aether artifact to maven artifact.
*
* @param artifact - the Aether artifact.
* @return the maven artifact.
*/
protected final Artifact createArtifact(org.eclipse.aether.artifact.Artifact artifact) {
return createArtifact(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion());
}
/**
* Create an artifact from the given values.
*
* @param groupId group id.
* @param artifactId artifact id.
* @param version version number.
* @param scope artifact scope.
* @param type artifact type.
* @return the artifact
*/
public final Artifact createArtifact(String groupId, String artifactId, String version, String scope, String type) {
VersionRange versionRange = null;
if (version != null) {
versionRange = VersionRange.createFromVersion(version);
}
String desiredScope = scope;
if (Artifact.SCOPE_TEST.equals(desiredScope)) {
desiredScope = Artifact.SCOPE_TEST;
}
if (Artifact.SCOPE_PROVIDED.equals(desiredScope)) {
desiredScope = Artifact.SCOPE_PROVIDED;
}
if (Artifact.SCOPE_SYSTEM.equals(desiredScope)) {
// system scopes come through unchanged...
desiredScope = Artifact.SCOPE_SYSTEM;
}
final ArtifactHandler handler = getArtifactHandlerManager().getArtifactHandler(type);
return new org.apache.maven.artifact.DefaultArtifact(
groupId, artifactId, versionRange,
desiredScope, type, null,
handler, false);
}
/**
* Check if the values of the attributes of this Mojo are correctly set. This function may
* be overridden by subclasses to test subclasse's attributes.
*
* @throws MojoExecutionException on error.
*/
protected abstract void checkMojoAttributes() throws MojoExecutionException;
@SuppressWarnings({"checkstyle:returncount", "checkstyle:cyclomaticcomplexity", "checkstyle:npathcomplexity"})
private static String getLogType(Object obj) {
if (obj instanceof Boolean || obj instanceof AtomicBoolean) {
return "B"; //$NON-NLS-1$
}
if (obj instanceof Byte) {
return "b"; //$NON-NLS-1$
}
if (obj instanceof Short) {
return "s"; //$NON-NLS-1$
}
if (obj instanceof Integer || obj instanceof AtomicInteger) {
return "i"; //$NON-NLS-1$
}
if (obj instanceof Long || obj instanceof AtomicLong) {
return "l"; //$NON-NLS-1$
}
if (obj instanceof Float) {
return "f"; //$NON-NLS-1$
}
if (obj instanceof Double) {
return "d"; //$NON-NLS-1$
}
if (obj instanceof BigDecimal) {
return "D"; //$NON-NLS-1$
}
if (obj instanceof BigInteger) {
return "I"; //$NON-NLS-1$
}
if (obj instanceof CharSequence) {
return "s"; //$NON-NLS-1$
}
if (obj instanceof Array) {
final Array array = (Array) obj;
return array.getClass().getComponentType().getName() + "[]"; //$NON-NLS-1$
}
if (obj instanceof Set<?>) {
return "set"; //$NON-NLS-1$
}
if (obj instanceof Map<?, ?>) {
return "map"; //$NON-NLS-1$
}
if (obj instanceof List<?>) {
return "list"; //$NON-NLS-1$
}
if (obj instanceof Collection<?>) {
return "col"; //$NON-NLS-1$
}
return "o"; //$NON-NLS-1$
}
/**
* Throw an exception when the given object is null.
*
* @param message
* is the message to put in the exception.
* @param obj the object to test.
*/
protected final void assertNotNull(String message, Object obj) {
if (getLog().isDebugEnabled()) {
getLog().debug(
"\t(" //$NON-NLS-1$
+ getLogType(obj)
+ ") " //$NON-NLS-1$
+ message
+ " = " //$NON-NLS-1$
+ obj);
}
if (obj == null) {
throw new AssertionError("assertNotNull: " + message); //$NON-NLS-1$
}
}
@Override
public final void execute() throws MojoExecutionException {
try {
checkMojoAttributes();
executeMojo();
} finally {
clearInternalBuffers();
}
}
/**
* Clear internal buffers.
*/
protected synchronized void clearInternalBuffers() {
this.localArtifactDescriptions.clear();
this.remoteArtifactDescriptions.clear();
}
/**
* Invoked when the Mojo should be executed.
*
* @throws MojoExecutionException on error.
*/
protected abstract void executeMojo() throws MojoExecutionException;
/** Join the values with the given joint.
*
* @param joint the joint.
* @param values the values.
* @return the jointed values
*/
public static String join(String joint, String... values) {
final StringBuilder b = new StringBuilder();
for (final String value : values) {
if (value != null && !EMPTY_STRING.equals(value)) {
if (b.length() > 0) {
b.append(joint);
}
b.append(value);
}
}
return b.toString();
}
private static void detectEncoding(File file, CharsetDecoder decoder) throws IOException, CharacterCodingException {
decoder.onMalformedInput(CodingErrorAction.REPORT);
decoder.onUnmappableCharacter(CodingErrorAction.REPORT);
try (FileInputStream fis = new FileInputStream(file)) {
try (ReadableByteChannel channel = Channels.newChannel(fis)) {
try (Reader reader = Channels.newReader(channel, decoder, -1)) {
try (BufferedReader bReader = new BufferedReader(reader)) {
String line = bReader.readLine();
while (line != null) {
line = bReader.readLine();
}
}
}
}
}
}
/**
* Try to detect and reply the encoding of the given file. This function uses the
* charsets replied by {@link #getPreferredCharsets()} to select a charset when many are possible.
*
* @param file
* is the file to read.
* @return the encoding charset of the given file or <code>null</code> if the encoding could not be detected.
* @see #getPreferredCharsets()
* @see #setPreferredCharsets(Charset...)
*/
@SuppressWarnings("checkstyle:npathcomplexity")
public final Charset detectEncoding(File file) {
final Collection<Charset> fittingCharsets = new TreeSet<>();
for (final Charset c : Charset.availableCharsets().values()) {
final CharsetDecoder decoder = c.newDecoder();
try {
detectEncoding(file, decoder);
fittingCharsets.add(c);
} catch (Throwable e) {
//
}
}
if (getLog().isDebugEnabled()) {
getLog().debug("Valid charsets for " + file.getName() + ":\n" //$NON-NLS-1$ //$NON-NLS-2$
+ fittingCharsets.toString());
}
for (final Charset prefCharset : getPreferredCharsets()) {
if (prefCharset.canEncode() && fittingCharsets.contains(prefCharset)) {
getLog().debug("Use preferred charset for " + file.getName() //$NON-NLS-1$
+ ": " + prefCharset.displayName()); //$NON-NLS-1$
return prefCharset;
}
}
final Charset platformCharset = Charset.defaultCharset();
if (platformCharset.canEncode() && fittingCharsets.contains(platformCharset)) {
getLog().debug("Use platform default charset for " + file.getName() + ": " //$NON-NLS-1$ //$NON-NLS-2$
+ platformCharset.displayName());
return Charset.defaultCharset();
}
final Iterator<Charset> iterator = fittingCharsets.iterator();
while (iterator.hasNext()) {
final Charset c = iterator.next();
if (c.canEncode()) {
getLog().debug("Use first valid charset for " + file.getName() + ": " //$NON-NLS-1$ //$NON-NLS-2$
+ c.displayName());
return c;
}
}
return null;
}
/** Replies the dependencies specified in the the Maven configuration
* of the current project.
*
* @param isTransitive indicates if the dependencies of dependencies
* must also be replied by the iterator.
* @return the iterator.
* @see #getDependencies(MavenProject, boolean)
*/
public final Iterator<MavenProject> getDependencies(boolean isTransitive) {
return getDependencies(getMavenSession().getCurrentProject(), isTransitive);
}
/** Replies the dependencies specified in the the Maven configuration
* of the given project.
*
* @param project is the maven project for which the dependencies must be replied.
* @param isTransitive indicates if the dependencies of dependencies
* must also be replied by the iterator.
* @return the iterator.
* @see #getDependencies(boolean)
*/
public final Iterator<MavenProject> getDependencies(MavenProject project, boolean isTransitive) {
return new DependencyIterator(project, isTransitive);
}
/** Replies the plugins specified in the the Maven configuration
* of the current project.
*
* @param isTransitive indicates if the plugins of dependencies
* must also be replied by the iterator.
* @return the iterator.
* @see #getPlugins(MavenProject, boolean)
*/
public final Iterator<Plugin> getPlugins(boolean isTransitive) {
return getPlugins(getMavenSession().getCurrentProject(), isTransitive);
}
/** Replies the plugins specified in the the Maven configuration
* of the given project.
*
* @param project is the maven project for which the plugins must be replied.
* @param isTransitive indicates if the plugins of dependencies
* must also be replied by the iterator.
* @return the iterator.
* @see #getPlugins(boolean)
*/
public final Iterator<Plugin> getPlugins(MavenProject project, boolean isTransitive) {
return new PluginIterator(project, isTransitive);
}
/** Load the Maven project for the given artifact.
*
* @param artifact the artifact.
* @return the maven project.
*/
public MavenProject getMavenProject(Artifact artifact) {
try {
final MavenSession session = getMavenSession();
final MavenProject current = session.getCurrentProject();
final MavenProject prj = getMavenProjectBuilder().buildFromRepository(
artifact,
current.getRemoteArtifactRepositories(),
session.getLocalRepository());
return prj;
} catch (ProjectBuildingException e) {
getLog().warn(e);
}
return null;
}
/** Dependency iterator.
*
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
*/
private class DependencyIterator implements Iterator<MavenProject> {
private final List<ArtifactRepository> remoteRepositiories;
private final boolean isTransitive;
private final File projectFile;
private List<Dependency> dependencies = new ArrayList<>();
private Set<String> treated = new TreeSet<>();
private MavenProject next;
/** Constuctor.
* @param project is the project for which the dependencies must
* be replied.
* @param isTransitive indicates if the dependencies of dependencies must also be replied
* by the iterator.
*/
DependencyIterator(MavenProject project, boolean isTransitive) {
this.isTransitive = isTransitive;
this.remoteRepositiories = project.getRemoteArtifactRepositories();
this.dependencies.addAll(project.getDependencies());
this.projectFile = project.getFile();
getBuildContext().removeMessages(this.projectFile);
searchNext();
}
private void searchNext() {
this.next = null;
while (this.next == null && !this.dependencies.isEmpty()) {
final Dependency dependency = this.dependencies.remove(0);
if (dependency != null) {
final String artifactId = dependency.getGroupId() + ":" + dependency.getArtifactId() //$NON-NLS-1$
+ ":" + dependency.getVersion(); //$NON-NLS-1$
if (!this.treated.contains(artifactId)) {
boolean isTreated = false;
try {
final Artifact dependencyArtifact = createArtifact(
dependency.getGroupId(),
dependency.getArtifactId(),
dependency.getVersion(),
dependency.getScope(),
dependency.getType());
resolveArtifact(dependencyArtifact);
final MavenProjectBuilder builder = getMavenProjectBuilder();
final MavenProject dependencyProject = builder.buildFromRepository(
dependencyArtifact,
this.remoteRepositiories,
getMavenSession().getLocalRepository());
if (dependencyProject != null) {
if (this.isTransitive) {
this.dependencies.addAll(dependencyProject.getDependencies());
}
this.next = dependencyProject;
isTreated = true;
}
} catch (MojoExecutionException | ProjectBuildingException e) {
getBuildContext().addMessage(
this.projectFile,
1, 1,
"Unable to retreive the Maven plugin: " + artifactId, //$NON-NLS-1$
BuildContext.SEVERITY_WARNING,
e);
isTreated = true;
}
if (isTreated) {
this.treated.add(artifactId);
}
}
}
}
}
@Override
public boolean hasNext() {
return this.next != null;
}
@Override
public MavenProject next() {
final MavenProject n = this.next;
if (n == null) {
throw new NoSuchElementException();
}
searchNext();
return n;
}
}
/** Plugin iterator.
*
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
*/
private class PluginIterator implements Iterator<Plugin> {
private final Iterator<MavenProject> dependencyIterator;
private Iterator<org.apache.maven.model.Plugin> pluginIterator;
private Plugin next;
/** Constructor.
* @param project the project.
* @param isTransitive indicates if the dependency is transitive.
*/
PluginIterator(MavenProject project, boolean isTransitive) {
this.dependencyIterator = getDependencies(project, isTransitive);
searchNext();
}
private void searchNext() {
this.next = null;
while (this.next == null) {
if (this.pluginIterator != null && this.pluginIterator.hasNext()) {
this.next = this.pluginIterator.next();
} else if (this.dependencyIterator.hasNext()) {
final MavenProject project = this.dependencyIterator.next();
final List<Plugin> buildPlugins = project.getBuildPlugins();
if (buildPlugins != null) {
this.pluginIterator = buildPlugins.iterator();
}
} else {
return;
}
}
}
@Override
public boolean hasNext() {
return this.next != null;
}
@Override
public Plugin next() {
final org.apache.maven.model.Plugin n = this.next;
if (n == null) {
throw new NoSuchElementException();
}
searchNext();
return n;
}
}
/**
* Abstract implementation for all Arakhnê maven modules. This implementation is thread safe.
*
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
*
* @component
*/
@FunctionalInterface
public interface FindFileListener extends EventListener {
/** Invoked when a file which is not matching the file filter was found.
*
* @param file is the file that is not matching the file filter.
* @param rootDirectory is the root directory in which the file was found.
*/
void findFile(File file, File rootDirectory);
}
}
| |
package org.sagebionetworks.repo.web.service;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.sagebionetworks.file.services.FileUploadService;
import org.sagebionetworks.repo.manager.AuthenticationManager;
import org.sagebionetworks.repo.manager.file.FileHandleManager;
import org.sagebionetworks.repo.manager.file.LocalFileUploadRequest;
import org.sagebionetworks.repo.manager.trash.EntityInTrashCanException;
import org.sagebionetworks.repo.model.ACCESS_TYPE;
import org.sagebionetworks.repo.model.AccessControlList;
import org.sagebionetworks.repo.model.AuthorizationConstants;
import org.sagebionetworks.repo.model.Entity;
import org.sagebionetworks.repo.model.FileEntity;
import org.sagebionetworks.repo.model.Folder;
import org.sagebionetworks.repo.model.Project;
import org.sagebionetworks.repo.model.ResourceAccess;
import org.sagebionetworks.repo.model.auth.JSONWebTokenHelper;
import org.sagebionetworks.repo.model.auth.LoginResponse;
import org.sagebionetworks.repo.model.auth.NewIntegrationTestUser;
import org.sagebionetworks.repo.model.file.S3FileHandle;
import org.sagebionetworks.repo.model.project.ProjectSetting;
import org.sagebionetworks.repo.model.project.ProjectSettingsType;
import org.sagebionetworks.repo.model.project.S3StorageLocationSetting;
import org.sagebionetworks.repo.model.project.UploadDestinationListSetting;
import org.sagebionetworks.repo.model.util.ModelConstants;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.io.Files;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(locations = { "classpath:test-context.xml" })
public class TrashServiceImplAutowiredTest {
@Autowired
private AdministrationService adminService;
// Bypass Auth Service to sign terms of use.
@Autowired
private AuthenticationManager authManager;
@Autowired
private CertifiedUserService certifiedUserService;
@Autowired
private FileUploadService fileUploadService;
@Autowired
private EntityService entityService;
// Used only to create test file handles.
@Autowired
private FileHandleManager fileHandleManager;
@Autowired
private ProjectSettingsService projectSettingsService;
@Autowired
private TrashService trashService;
private List<Entity> entitiesToDelete;
private List<S3FileHandle> fileHandlesToDelete;
private List<File> filesToDelete;
private Long adminUserId;
private String projectId;
private Long userId;
@BeforeEach
public void beforeEach() {
// Set up lists of entities to delete.
entitiesToDelete = new ArrayList<>();
fileHandlesToDelete = new ArrayList<>();
filesToDelete = new ArrayList<>();
// Set up test user.
adminUserId = AuthorizationConstants.BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId();
userId = createUser();
// Set up test project.
Project project = new Project();
String projectName = "project" + new Random().nextInt();
project.setName(projectName);
project = entityService.createEntity(userId, project, null);
entitiesToDelete.add(project);
projectId = project.getId();
}
private long createUser() {
// Create the user.
NewIntegrationTestUser user = new NewIntegrationTestUser();
String username = UUID.randomUUID().toString();
user.setEmail(username + "@test.com");
user.setUsername(username);
LoginResponse loginResponse = adminService.createOrGetTestUser(adminUserId, user);
String accessTokenSubject = JSONWebTokenHelper.getSubjectFromJWTAccessToken(loginResponse.getAccessToken());
long createdUserId = Long.valueOf(accessTokenSubject);
certifiedUserService.setUserCertificationStatus(adminUserId, createdUserId, true);
// Before we can create file entities, we must agree to terms of use.
authManager.setTermsOfUseAcceptance(createdUserId, true);
return createdUserId;
}
@AfterEach
public void afterEach() {
// Delete entities.
for (Entity entity : Lists.reverse(entitiesToDelete)) {
entityService.deleteEntity(userId, entity.getId());
}
// Delete file handles.
for (S3FileHandle fileHandle : fileHandlesToDelete) {
fileUploadService.deleteFileHandle(fileHandle.getId(), userId);
}
// Delete local files.
for (File file : filesToDelete) {
file.delete();
}
}
@Test
public void fileHandleNonOwnerCanDeleteAndRestoreFileEntity() throws Exception {
// Create another user and give them access to the project.
long user2Id = createUser();
ResourceAccess userAccess = new ResourceAccess();
userAccess.setPrincipalId(userId);
userAccess.setAccessType(ModelConstants.ENTITY_ADMIN_ACCESS_PERMISSIONS);
ResourceAccess user2Access = new ResourceAccess();
user2Access.setPrincipalId(user2Id);
user2Access.setAccessType(EnumSet.of(ACCESS_TYPE.CREATE, ACCESS_TYPE.READ, ACCESS_TYPE.UPDATE,
ACCESS_TYPE.DELETE));
AccessControlList acl = entityService.getEntityACL(projectId, userId);
acl.setResourceAccess(ImmutableSet.of(userAccess, user2Access));
entityService.updateEntityACL(userId, acl);
// Upload a file to the project (with user 1).
FileEntity fileEntity = uploadFile(projectId, null);
String fileEntityId = fileEntity.getId();
// User 2 can delete and restore the file.
trashService.moveToTrash(user2Id, fileEntityId, false);
assertThrows(EntityInTrashCanException.class, () -> entityService.getEntity(user2Id, fileEntityId));
trashService.restoreFromTrash(user2Id, fileEntityId, null);
FileEntity restored = entityService.getEntity(user2Id, fileEntityId, FileEntity.class);
assertNotNull(restored);
}
@Test
public void trashAndRestoreStsFile() throws Exception {
// Create folder A, which is STS-enabled.
Folder folderA = createFolder(projectId);
long storageLocationId = createStsStorageLocation();
applyStorageLocationToFolder(folderA, storageLocationId);
// Create folder B, with the same STS storage location.
Folder folderB = createFolder(projectId);
applyStorageLocationToFolder(folderB, storageLocationId);
// Upload file to folder A.
FileEntity fileEntity = uploadFile(folderA.getId(), storageLocationId);
String fileEntityId = fileEntity.getId();
// Trash the file entity. Getting the file entity will now throw an EntityInTrashCanException.
trashService.moveToTrash(userId, fileEntityId, false);
assertThrows(EntityInTrashCanException.class, () -> entityService.getEntity(userId, fileEntityId));
// Restore the file entity. It is gettable now.
trashService.restoreFromTrash(userId, fileEntityId, null);
FileEntity restored = entityService.getEntity(userId, fileEntityId, FileEntity.class);
assertNotNull(restored);
// Trash the file entity again and attempt to restore to folder B. This fails, because you cannot restore a
// file to an STS-enabled folder, unless it was the original parent.
trashService.moveToTrash(userId, fileEntityId, false);
assertThrows(IllegalArgumentException.class, () -> trashService.restoreFromTrash(userId, fileEntityId,
folderB.getId()));
// Delete the storage location from folder B. Now restoring to it works because we don't have the restriction
// on non-STS-enabled folders.
deleteStorageLocationFromFolder(folderB);
trashService.restoreFromTrash(userId, fileEntityId, folderB.getId());
restored = entityService.getEntity(userId, fileEntityId, FileEntity.class);
assertNotNull(restored);
}
@Test
public void trashAndRestoreStsFolder() throws Exception {
// Create folder A, which is STS-enabled.
Folder folderA = createFolder(projectId);
long storageLocationId = createStsStorageLocation();
applyStorageLocationToFolder(folderA, storageLocationId);
// Create folder B, with the same STS storage location.
Folder folderB = createFolder(projectId);
applyStorageLocationToFolder(folderB, storageLocationId);
// Create a subfolder in folder A.
Folder subfolder = createFolder(folderA.getId());
String subfolderId = subfolder.getId();
// Trash the subfolder. Getting the subfolder will now throw an EntityInTrashCanException.
trashService.moveToTrash(userId, subfolderId, false);
assertThrows(EntityInTrashCanException.class, () -> entityService.getEntity(userId, subfolderId));
// Restore the subfolder. It is gettable now.
trashService.restoreFromTrash(userId, subfolderId, null);
Folder restored = entityService.getEntity(userId, subfolderId, Folder.class);
assertNotNull(restored);
// Trash the subfolder again and attempt to restore to folder B. This fails, because you cannot restore a
// file to an STS-enabled folder, unless it was the original parent.
trashService.moveToTrash(userId, subfolderId, false);
assertThrows(IllegalArgumentException.class, () -> trashService.restoreFromTrash(userId, subfolderId,
folderB.getId()));
// Delete the storage location from folder B. Now restoring to it works because we don't have the restriction
// on non-STS-enabled folders.
deleteStorageLocationFromFolder(folderB);
trashService.restoreFromTrash(userId, subfolderId, folderB.getId());
restored = entityService.getEntity(userId, subfolderId, Folder.class);
assertNotNull(restored);
}
private Folder createFolder(String parentId) {
Folder folder = new Folder();
folder.setParentId(parentId);
folder = entityService.createEntity(userId, folder, null);
entitiesToDelete.add(folder);
return folder;
}
private long createStsStorageLocation() throws Exception {
S3StorageLocationSetting storageLocationSetting = new S3StorageLocationSetting();
storageLocationSetting.setStsEnabled(true);
storageLocationSetting = (S3StorageLocationSetting) projectSettingsService.createStorageLocationSetting(userId,
storageLocationSetting);
return storageLocationSetting.getStorageLocationId();
}
private void applyStorageLocationToFolder(Folder folder, long storageLocationId) {
UploadDestinationListSetting projectSetting = new UploadDestinationListSetting();
projectSetting.setLocations(ImmutableList.of(storageLocationId));
projectSetting.setProjectId(folder.getId());
projectSetting.setSettingsType(ProjectSettingsType.upload);
projectSettingsService.createProjectSetting(userId, projectSetting);
}
private void deleteStorageLocationFromFolder(Folder folder) {
ProjectSetting projectSetting = projectSettingsService.getProjectSettingByProjectAndType(userId,
folder.getId(), ProjectSettingsType.upload);
projectSettingsService.deleteProjectSetting(userId, projectSetting.getId());
}
private FileEntity uploadFile(String parentId, Long storageLocationId) throws Exception {
// Create the file.
File file = File.createTempFile("TrashServiceImplAutowiredTest", ".txt");
filesToDelete.add(file);
Files.asCharSink(file, StandardCharsets.UTF_8).write("dummy content");
// Create file handle.
LocalFileUploadRequest uploadRequest = new LocalFileUploadRequest().withContentType("text/plain")
.withFileToUpload(file).withStorageLocationId(storageLocationId).withUserId(userId.toString());
S3FileHandle fileHandle = fileHandleManager.uploadLocalFile(uploadRequest);
fileHandlesToDelete.add(fileHandle);
// Create file entity.
FileEntity fileEntity = new FileEntity();
fileEntity.setDataFileHandleId(fileHandle.getId());
fileEntity.setParentId(parentId);
fileEntity = entityService.createEntity(userId, fileEntity, null);
entitiesToDelete.add(fileEntity);
return fileEntity;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.olingo2.springboot;
import java.util.Map;
import javax.annotation.Generated;
import org.apache.camel.component.olingo2.internal.Olingo2ApiName;
import org.apache.camel.spring.boot.ComponentConfigurationPropertiesCommon;
import org.apache.camel.support.jsse.SSLContextParameters;
import org.apache.http.HttpHost;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* Communicates with OData 2.0 services using Apache Olingo.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo")
@ConfigurationProperties(prefix = "camel.component.olingo2")
public class Olingo2ComponentConfiguration
extends
ComponentConfigurationPropertiesCommon {
/**
* Whether to enable auto configuration of the olingo2 component. This is
* enabled by default.
*/
private Boolean enabled;
/**
* To use the shared configuration
*/
private Olingo2ConfigurationNestedConfiguration configuration;
/**
* Enable usage of global SSL context parameters.
*/
private Boolean useGlobalSslContextParameters = false;
/**
* Whether the component should resolve property placeholders on itself when
* starting. Only properties which are of String type can use property
* placeholders.
*/
private Boolean resolvePropertyPlaceholders = true;
public Olingo2ConfigurationNestedConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(
Olingo2ConfigurationNestedConfiguration configuration) {
this.configuration = configuration;
}
public Boolean getUseGlobalSslContextParameters() {
return useGlobalSslContextParameters;
}
public void setUseGlobalSslContextParameters(
Boolean useGlobalSslContextParameters) {
this.useGlobalSslContextParameters = useGlobalSslContextParameters;
}
public Boolean getResolvePropertyPlaceholders() {
return resolvePropertyPlaceholders;
}
public void setResolvePropertyPlaceholders(
Boolean resolvePropertyPlaceholders) {
this.resolvePropertyPlaceholders = resolvePropertyPlaceholders;
}
public static class Olingo2ConfigurationNestedConfiguration {
public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.olingo2.Olingo2Configuration.class;
/**
* What kind of operation to perform
*/
private Olingo2ApiName apiName;
/**
* What sub operation to use for the selected operation
*/
private String methodName;
/**
* Target OData service base URI, e.g.
* http://services.odata.org/OData/OData.svc
*/
private String serviceUri;
/**
* Content-Type header value can be used to specify JSON or XML message
* format, defaults to application/json;charset=utf-8
*/
private String contentType = "application/json;charset=utf-8";
/**
* Custom HTTP headers to inject into every request, this could include
* OAuth tokens, etc.
*/
private Map httpHeaders;
/**
* HTTP connection creation timeout in milliseconds, defaults to 30,000
* (30 seconds)
*/
private Integer connectTimeout = 30000;
/**
* HTTP request timeout in milliseconds, defaults to 30,000 (30 seconds)
*/
private Integer socketTimeout = 30000;
/**
* HTTP proxy server configuration
*/
private HttpHost proxy;
/**
* To configure security using SSLContextParameters
*/
private SSLContextParameters sslContextParameters;
/**
* Custom HTTP async client builder for more complex HTTP client
* configuration, overrides connectionTimeout, socketTimeout, proxy and
* sslContext. Note that a socketTimeout MUST be specified in the
* builder, otherwise OData requests could block indefinitely
*/
private HttpAsyncClientBuilder httpAsyncClientBuilder;
/**
* Custom HTTP client builder for more complex HTTP client
* configuration, overrides connectionTimeout, socketTimeout, proxy and
* sslContext. Note that a socketTimeout MUST be specified in the
* builder, otherwise OData requests could block indefinitely
*/
private HttpClientBuilder httpClientBuilder;
public Olingo2ApiName getApiName() {
return apiName;
}
public void setApiName(Olingo2ApiName apiName) {
this.apiName = apiName;
}
public String getMethodName() {
return methodName;
}
public void setMethodName(String methodName) {
this.methodName = methodName;
}
public String getServiceUri() {
return serviceUri;
}
public void setServiceUri(String serviceUri) {
this.serviceUri = serviceUri;
}
public String getContentType() {
return contentType;
}
public void setContentType(String contentType) {
this.contentType = contentType;
}
public Map getHttpHeaders() {
return httpHeaders;
}
public void setHttpHeaders(Map httpHeaders) {
this.httpHeaders = httpHeaders;
}
public Integer getConnectTimeout() {
return connectTimeout;
}
public void setConnectTimeout(Integer connectTimeout) {
this.connectTimeout = connectTimeout;
}
public Integer getSocketTimeout() {
return socketTimeout;
}
public void setSocketTimeout(Integer socketTimeout) {
this.socketTimeout = socketTimeout;
}
public HttpHost getProxy() {
return proxy;
}
public void setProxy(HttpHost proxy) {
this.proxy = proxy;
}
public SSLContextParameters getSslContextParameters() {
return sslContextParameters;
}
public void setSslContextParameters(
SSLContextParameters sslContextParameters) {
this.sslContextParameters = sslContextParameters;
}
public HttpAsyncClientBuilder getHttpAsyncClientBuilder() {
return httpAsyncClientBuilder;
}
public void setHttpAsyncClientBuilder(
HttpAsyncClientBuilder httpAsyncClientBuilder) {
this.httpAsyncClientBuilder = httpAsyncClientBuilder;
}
public HttpClientBuilder getHttpClientBuilder() {
return httpClientBuilder;
}
public void setHttpClientBuilder(HttpClientBuilder httpClientBuilder) {
this.httpClientBuilder = httpClientBuilder;
}
}
}
| |
/**
* Copyright (C) the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ninja.cache;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import java.io.NotSerializableException;
import java.io.Serializable;
import java.util.Map;
import static ninja.utils.TimeUtil.parseDuration;
/**
* A convenience class to access the underlying cache implementation.
*
* Makes getting and setting of objects a lot simpler.
*
* This class originates from Play 1.2.5's excellent cache implementation.
*
* @author ra
*
*/
@Singleton
public class NinjaCache {
private static final int ETERNITY = Integer.MAX_VALUE;
private final Cache cache;
@Inject
public NinjaCache(Cache cache) {
this.cache = cache;
}
/**
* Add an element only if it doesn't exist.
* @param key Element key
* @param value Element value
* @param expiration Ex: 10s, 3mn, 8h
*/
public void add(String key, Object value, String expiration) {
checkSerializable(value);
cache.add(key, value, parseDuration(expiration));
}
/**
* Add an element only if it doesn't exist, and return only when
* the element is effectively cached.
* @param key Element key
* @param value Element value
* @param expiration Ex: 10s, 3mn, 8h
* @return If the element an eventually been cached
*/
public boolean safeAdd(String key, Object value, String expiration) {
checkSerializable(value);
return cache.safeAdd(key, value, parseDuration(expiration));
}
/**
* Add an element only if it doesn't exist and store it indefinitely.
* @param key Element key
* @param value Element value
*/
public void add(String key, Object value) {
checkSerializable(value);
cache.add(key, value, ETERNITY);
}
/**
* Set an element.
* @param key Element key
* @param value Element value
* @param expiration Ex: 10s, 3mn, 8h
*/
public void set(String key, Object value, String expiration) {
checkSerializable(value);
cache.set(key, value, parseDuration(expiration));
}
/**
* Set an element and return only when the element is effectively cached.
* @param key Element key
* @param value Element value
* @param expiration Ex: 10s, 3mn, 8h
* @return If the element an eventually been cached
*/
public boolean safeSet(String key, Object value, String expiration) {
checkSerializable(value);
return cache.safeSet(key, value, parseDuration(expiration));
}
/**
* Set an element and store it indefinitely.
* @param key Element key
* @param value Element value
*/
public void set(String key, Object value) {
checkSerializable(value);
cache.set(key, value, ETERNITY);
}
/**
* Replace an element only if it already exists.
* @param key Element key
* @param value Element value
* @param expiration Ex: 10s, 3mn, 8h
*/
public void replace(String key, Object value, String expiration) {
checkSerializable(value);
cache.replace(key, value, parseDuration(expiration));
}
/**
* Replace an element only if it already exists and return only when the
* element is effectively cached.
* @param key Element key
* @param value Element value
* @param expiration Ex: 10s, 3mn, 8h
* @return If the element an eventually been cached
*/
public boolean safeReplace(String key, Object value, String expiration) {
checkSerializable(value);
return cache.safeReplace(key, value, parseDuration(expiration));
}
/**
* Replace an element only if it already exists and store it indefinitely.
* @param key Element key
* @param value Element value
*/
public void replace(String key, Object value) {
checkSerializable(value);
cache.replace(key, value, ETERNITY);
}
/**
* Increment the element value (must be a Number).
* @param key Element key
* @param by The incr value
* @return The new value
*/
public long incr(String key, int by) {
return cache.incr(key, by);
}
/**
* Increment the element value (must be a Number) by 1.
* @param key Element key
* @return The new value
*/
public long incr(String key) {
return cache.incr(key, 1);
}
/**
* Decrement the element value (must be a Number).
* @param key Element key
* @param by The decr value
* @return The new value
*/
public long decr(String key, int by) {
return cache.decr(key, by);
}
/**
* Decrement the element value (must be a Number) by 1.
* @param key Element key
* @return The new value
*/
public long decr(String key) {
return cache.decr(key, 1);
}
/**
* Retrieve an object.
* @param key The element key
* @return The element value or null
*/
public Object get(String key) {
return cache.get(key);
}
/**
* Bulk retrieve.
* @param key List of keys
* @return Map of keys & values
*/
public Map<String, Object> get(String... key) {
return cache.get(key);
}
/**
* Delete an element from the cache.
* @param key The element key
*/
public void delete(String key) {
cache.delete(key);
}
/**
* Delete an element from the cache and return only when the
* element is effectively removed.
* @param key The element key
* @return If the element an eventually been deleted
*/
public boolean safeDelete(String key) {
return cache.safeDelete(key);
}
/**
* Clear all data from cache.
*/
public void clear() {
cache.clear();
}
/**
* Convenient clazz to get a value a class type;
* @param <T> The needed type
* @param key The element key
* @param clazz The type class
* @return The element value or null
*/
@SuppressWarnings("unchecked")
public <T> T get(String key, Class<T> clazz) {
return (T) cache.get(key);
}
/**
* Utility that check that an object is serializable.
*/
void checkSerializable(Object value) {
if (value != null && !(value instanceof Serializable)) {
throw new CacheException(
"Cannot cache a non-serializable value of type "
+ value.getClass().getName(),
new NotSerializableException(value.getClass().getName()));
}
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rules.keys;
import com.facebook.buck.hashing.FileHashLoader;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildTargetSourcePath;
import com.facebook.buck.rules.DependencyAggregation;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.rules.RuleKeyAppendable;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.google.common.base.Preconditions;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import java.io.IOException;
import java.nio.file.Path;
import javax.annotation.Nonnull;
/**
* A factory for generating input-based {@link RuleKey}s.
*
* @see SupportsInputBasedRuleKey
*/
public final class InputBasedRuleKeyFactory
extends ReflectiveRuleKeyFactory<
InputBasedRuleKeyFactory.Builder,
RuleKey> {
private final FileHashLoader fileHashLoader;
private final SourcePathResolver pathResolver;
private final SourcePathRuleFinder ruleFinder;
private final LoadingCache<RuleKeyAppendable, Result> cache;
private final long inputSizeLimit;
public InputBasedRuleKeyFactory(
int seed,
FileHashLoader hashLoader,
SourcePathResolver pathResolver,
SourcePathRuleFinder ruleFinder,
long inputSizeLimit) {
super(seed);
this.fileHashLoader = hashLoader;
this.pathResolver = pathResolver;
this.ruleFinder = ruleFinder;
this.inputSizeLimit = inputSizeLimit;
// Build the cache around the sub-rule-keys and their dep lists.
cache = CacheBuilder.newBuilder().weakKeys().build(
new CacheLoader<RuleKeyAppendable, Result>() {
@Override
public Result load(
@Nonnull RuleKeyAppendable appendable) {
Builder subKeyBuilder = new Builder();
appendable.appendToRuleKey(subKeyBuilder);
return subKeyBuilder.buildResult();
}
});
}
public InputBasedRuleKeyFactory(
int seed,
FileHashLoader hashLoader,
SourcePathResolver pathResolver,
SourcePathRuleFinder ruleFinder) {
this(seed, hashLoader, pathResolver, ruleFinder, Long.MAX_VALUE);
}
@Override
protected Builder newBuilder(final BuildRule rule) {
final Iterable<DependencyAggregation> aggregatedRules =
Iterables.filter(rule.getDeps(), DependencyAggregation.class);
return new Builder() {
private boolean hasEffectiveDirectDep(BuildRule dep) {
for (BuildRule aggregationRule : aggregatedRules) {
if (aggregationRule.getDeps().contains(dep)) {
return true;
}
}
return false;
}
// Construct the rule key, verifying that all the deps we saw when constructing it
// are explicit dependencies of the rule.
@Override
public RuleKey build() {
Result result = buildResult();
for (BuildRule usedDep : result.getDeps()) {
Preconditions.checkState(
rule.getDeps().contains(usedDep) || hasEffectiveDirectDep(usedDep),
"%s: %s not in deps (%s)",
rule.getBuildTarget(),
usedDep.getBuildTarget(),
rule.getDeps());
}
return result.getRuleKey();
}
};
}
/* package */ class Builder extends RuleKeyBuilder<RuleKey> {
private final ImmutableList.Builder<Iterable<BuildRule>> deps = ImmutableList.builder();
private final SizeLimiter sizeLimiter = new SizeLimiter(inputSizeLimit);
private Builder() {
super(ruleFinder, pathResolver, fileHashLoader);
}
@Override
protected Builder setAppendableRuleKey(RuleKeyAppendable appendable) {
Result result = cache.getUnchecked(appendable);
deps.add(result.getDeps());
setAppendableRuleKey(result.getRuleKey());
return this;
}
@Override
public Builder setPath(Path absolutePath, Path ideallyRelative) throws IOException {
// TODO(plamenko): this check should not be necessary, but otherwise some tests fail due to
// FileHashLoader throwing NoSuchFileException which doesn't get correctly propagated.
if (inputSizeLimit != Long.MAX_VALUE) {
sizeLimiter.add(fileHashLoader.getSize(absolutePath));
}
super.setPath(absolutePath, ideallyRelative);
return this;
}
// Input-based rule keys are evaluated after all dependencies for a rule are available on
// disk, and so we can always resolve the `Path` packaged in a `SourcePath`. We hash this,
// rather than the rule key from it's `BuildRule`.
@Override
protected Builder setSourcePath(SourcePath sourcePath) throws IOException {
if (sourcePath instanceof BuildTargetSourcePath) {
deps.add(ImmutableSet.of(ruleFinder.getRuleOrThrow((BuildTargetSourcePath) sourcePath)));
// fall through and call setSourcePathDirectly as well
}
super.setSourcePathDirectly(sourcePath);
return this;
}
// Rules supporting input-based rule keys should be described entirely by their `SourcePath`
// inputs. If we see a `BuildRule` when generating the rule key, this is likely a break in
// that contract, so check for that.
@Override
protected Builder setBuildRule(BuildRule rule) {
throw new IllegalStateException(
String.format(
"Input-based rule key builders cannot process build rules. " +
"Was given %s to add to rule key.",
rule));
}
// Build the rule key and the list of deps found from this builder.
final Result buildResult() {
return new Result(buildRuleKey(), Iterables.concat(deps.build()));
}
@Override
public RuleKey build() {
return buildRuleKey();
}
}
private static class Result {
private final RuleKey ruleKey;
private final Iterable<BuildRule> deps;
public Result(
RuleKey ruleKey,
Iterable<BuildRule> deps) {
this.ruleKey = ruleKey;
this.deps = deps;
}
public RuleKey getRuleKey() {
return ruleKey;
}
public Iterable<BuildRule> getDeps() {
return deps;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.